code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.seanshubin.learn.datomic.domain
case class SeqComparison(same: Boolean, messageLines: Seq[String])
object SeqComparison {
def compare(left: Seq[String], right: Seq[String]): SeqComparison = {
compareRecursive(left.toList, right.toList, 0, Nil)
}
private def compareRecursive(actualSeq: List[String], expectedSeq: List[String], index: Int, messageLines: List[String]): SeqComparison = {
(actualSeq, expectedSeq) match {
case (Nil, Nil) => SeqComparison(same = true, messageLines)
case (actualHead :: actualTail, Nil) =>
val newMessageLines = messageLines.reverse ++ Seq(
s"extra at $index",
actualHead)
SeqComparison(same = false, newMessageLines)
case (Nil, expectedHead :: expectedTail) =>
val newMessageLines = messageLines.reverse ++ Seq(
s"missing at $index",
expectedHead)
SeqComparison(same = false, newMessageLines)
case (actualHead :: actualTail, expectedHead :: expectedTail) =>
if (actualHead == expectedHead) {
compareRecursive(actualTail, expectedTail, index + 1, s"same[$index]: $actualHead" :: messageLines)
} else {
val newMessageLines = messageLines.reverse ++ Seq(
s"different at $index",
s"expected: $expectedHead",
s"actual : $actualHead")
SeqComparison(same = false, newMessageLines)
}
}
}
}
|
SeanShubin/learn-datomic
|
domain/src/main/scala/com/seanshubin/learn/datomic/domain/SeqComparison.scala
|
Scala
|
unlicense
| 1,434 |
package com.olegych.scastie.client.components.editor
import org.scalajs.dom
import org.scalajs.dom.raw.HTMLElement
import codemirror.{Editor => CMEditor}
import codemirror.CodeMirror.{Pos => CMPosition}
private[editor] class LoadingMessage() {
private val message = {
val ul = dom.document
.createElement("ul")
.asInstanceOf[HTMLElement]
ul.className = ul.className.concat(" CodeMirror-hints loading-message")
ul.style.opacity = "0"
val li = dom.document.createElement("li").asInstanceOf[HTMLElement]
li.className = li.className.concat("CodeMirror-hint")
val span = dom.document.createElement("span").asInstanceOf[HTMLElement]
span.className = span.className.concat("cm-def")
span.innerHTML = "Loading..."
li.appendChild(span)
ul.appendChild(li)
ul
}
def hide(): Unit = {
message.style.opacity = "0"
}
def show(editor: CMEditor, pos: CMPosition): Unit = {
editor.addWidget(pos, message, scrollIntoView = true)
message.style.opacity = "1"
}
def isVisible: Boolean = message.style.opacity == "1"
}
|
OlegYch/scastie
|
client/src/main/scala/com.olegych.scastie.client/components/editor/LoadingMessage.scala
|
Scala
|
apache-2.0
| 1,088 |
/*
* Copyright 2011-2014 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.ebpi.yaidom.integrationtest
import java.io.ByteArrayInputStream
import scala.Vector
import scala.xml.NodeSeq.seqToNodeSeq
import org.junit.Test
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.Suite
import org.w3c.dom.DOMException
import org.xml.sax.SAXParseException
import nl.ebpi.yaidom.convert.ScalaXmlConversions
import nl.ebpi.yaidom.core.EName
import nl.ebpi.yaidom.core.QName
import nl.ebpi.yaidom.core.Scope
import nl.ebpi.yaidom.parse.DocumentParserUsingDom
import nl.ebpi.yaidom.parse.DocumentParserUsingDomLS
import nl.ebpi.yaidom.parse.DocumentParserUsingSax
import nl.ebpi.yaidom.parse.DocumentParserUsingStax
import nl.ebpi.yaidom.queryapi.HasENameApi.withEName
import nl.ebpi.yaidom.resolved
import nl.ebpi.yaidom.scalaxml.ScalaXmlElem
import nl.ebpi.yaidom.simple.Node
import nl.ebpi.yaidom.simple.NodeBuilder
import javax.xml.parsers.DocumentBuilderFactory
import javax.xml.parsers.SAXParserFactory
/**
* Test case showing why preferring yaidom over the standard Scala XML library makes sense.
*
* Yaidom does not like non-namespace-well-formed XML, unlike Scala XML. This is one of
* the reasons that yaidom is a better precision tool for XML processing than Scala XML.
* Yaidom does not try to be as concise as possible, but precise namespace support is
* considered more important. Yaidom is (now and always) meant to be a good basis for the
* kind of XML processing performed in XBRL processing and validation.
*
* This test case also shows how yaidom's namespace support is consistent with the existence
* of multiple (native) element implementations, such as simple elements, resolved elements
* (which represent James Clark's minimal element representation) and element builders
* (which are not XML representations themselves, but make creation of XML possible without
* passing in-scope namespaces around).
*
* In short, if you care about the test results of this test, showing some differences
* between yaidom and Scala XML with respect to namespaces, consider using yaidom.
* If not, by all means, use Scala XML.
*
* @author Chris de Vreeze
*/
@RunWith(classOf[JUnitRunner])
class TheCaseForYaidomTest extends Suite {
/**
* Our XML, which is not namespace-well-formed.
* See http://stackoverflow.com/questions/14871752/is-xml-document-with-undeclared-prefix-well-formed.
*/
private val wrongXml =
"""<root><prefix:element></prefix:element></root>"""
private val wrongXml2 =
"""<link:linkbaseRef xlink:arcrole="http://www.w3.org/1999/xlink/properties/linkbase" xlink:href="my-lab-en.xml" xlink:role="http://www.xbrl.org/2003/role/labelLinkbaseRef" xlink:type="simple"/>"""
// Some test methods for the first non-namespace-well-formed XML.
// They show that yaidom fails faster on non-namespace-well-formed XML than Scala XML.
/**
* Tries to create a DOM tree for the (wrong) XML. It fails, as expected.
*/
@Test def testTryCreatingDomForWrongXml(): Unit = {
val dbf = DocumentBuilderFactory.newInstance()
val db = dbf.newDocumentBuilder()
val d = db.newDocument()
val rootElm = d.createElementNS(null, "root")
d.appendChild(rootElm)
intercept[DOMException] {
val elmElm = d.createElementNS(null, "prefix:element")
rootElm.appendChild(elmElm)
}
}
/**
* Tries to parse the XML into a yaidom Elem (via SAX), but fails, as expected.
*/
@Test def testTryParsingWrongXmlViaSax(): Unit = {
val parser = DocumentParserUsingSax.newInstance
intercept[SAXParseException] {
parser.parse(new ByteArrayInputStream(wrongXml.getBytes("UTF-8")))
}
}
/**
* Tries to parse the XML into a yaidom Elem (via StAX), but fails, as expected.
*/
@Test def testTryParsingWrongXmlViaStax(): Unit = {
val parser = DocumentParserUsingStax.newInstance
intercept[Exception] {
parser.parse(new ByteArrayInputStream(wrongXml.getBytes("UTF-8")))
}
}
/**
* Tries to parse the XML into a yaidom Elem (via DOM), but fails, as expected.
*/
@Test def testTryParsingWrongXmlViaDom(): Unit = {
val parser = DocumentParserUsingDom.newInstance
intercept[SAXParseException] {
parser.parse(new ByteArrayInputStream(wrongXml.getBytes("UTF-8")))
}
}
/**
* Tries to parse the XML into a yaidom Elem (via DOM LS), but fails, as expected.
*/
@Test def testTryParsingWrongXmlViaDomLS(): Unit = {
val parser = DocumentParserUsingDomLS.newInstance
intercept[RuntimeException] {
parser.parse(new ByteArrayInputStream(wrongXml.getBytes("UTF-8")))
}
}
/**
* Tries to parse the XML into a yaidom Elem again, but fails, as expected.
* This time the underlying parser is not namespace-aware, but still yaidom Elem
* creation fails.
*/
@Test def testTryParsingWrongXmlAgain(): Unit = {
val spf = SAXParserFactory.newInstance
spf.setNamespaceAware(false)
val parser = DocumentParserUsingSax.newInstance(spf)
// SAX parsing succeeds, but yaidom Elem creation does not
intercept[RuntimeException] {
parser.parse(new ByteArrayInputStream(wrongXml.getBytes("UTF-8")))
}
}
/**
* Tries to parse the XML using scala.xml.XML, but succeeds, unexpectedly.
*
* A fatal error is reported, but the exception is "eaten", returning a scala.xml.Elem, which is
* not namespace-well-formed.
*/
@Test def testTryParsingWrongXmlUsingScalaXml(): Unit = {
val scalaElem = scala.xml.XML.load(new ByteArrayInputStream(wrongXml.getBytes("UTF-8")))
// Why null? Why not an Option?
assertResult(null) {
scalaElem.prefix
}
assertResult("root") {
scalaElem.label
}
assertResult("prefix") {
scalaElem.child.head.prefix
}
assertResult("element") {
scalaElem.child.head.label
}
}
/**
* Creates an ElemBuilder for the XML, which succeeds. After all, ElemBuilders are
* only builders of XML, and do not claim to be valid namespace-well-formed XML
* themselves. Indeed, ElemBuilders can only be queried for qualified names, but not
* for expanded names.
*/
@Test def testCreateElemBuilderForWrongXml(): Unit = {
import NodeBuilder._
val elemBuilder =
elem(QName("root"), Vector(emptyElem(QName("prefix:element"))))
assertResult(false) {
elemBuilder.canBuild(Scope.Empty)
}
val ns = "http://namespace"
assertResult(true) {
elemBuilder.canBuild(Scope.from("prefix" -> ns))
}
}
/**
* Tries to create an Elem for the XML, without passing the
* correct parent scope. This fails, as expected.
*/
@Test def testTryCreatingElemForWrongXml(): Unit = {
import Node._
intercept[RuntimeException] {
elem(
QName("root"),
Scope.Empty,
Vector(emptyElem(QName("prefix:element"), Scope.Empty)))
}
}
/**
* Tries to convert an ElemBuilder for the XML into an Elem, this time passing the
* correct parent scope. This succeeds, as expected, and so do the subsequent queries.
*/
@Test def testQueryElemForFixedWrongXml(): Unit = {
import NodeBuilder._
val elemBuilder =
elem(QName("root"), Vector(emptyElem(QName("prefix:element"))))
val ns = "http://namespace"
val elm = elemBuilder.build(Scope.from("prefix" -> ns))
val elemENames = elm.findAllElemsOrSelf.map(_.resolvedName)
assertResult(List(EName("root"), EName(ns, "element"))) {
elemENames
}
assertResult(elm.findAllElemsOrSelf) {
elm.filterElemsOrSelf(e => e.localName == "root" || e.localName == "element")
}
assertResult(List(elm)) {
elm.filterElemsOrSelf(withEName(None, "root"))
}
assertResult(Nil) {
elm.filterElemsOrSelf(withEName(ns, "root"))
}
assertResult(elm.findAllElems) {
elm.filterElemsOrSelf(withEName(ns, "element"))
}
assertResult(Nil) {
elm.filterElemsOrSelf(withEName(None, "element"))
}
assertResult(elm.findAllElemsOrSelf) {
elm.filterElemsOrSelf(e => e.resolvedName == EName("root") || e.resolvedName == EName(ns, "element"))
}
}
/**
* Tries to create a Scala XML Elem for the XML, which succeeds (although it should not succeed).
*/
@Test def testCreateScalaXmlElemForWrongXml(): Unit = {
val scalaElem =
<root><prefix:element></prefix:element></root>
assertResult(true) {
scalaElem.child.size == 1
}
}
/**
* Queries the Scala XML Elem. Here we see that Scala XML happily allows for querying
* a non-namespace-well-formed XML tree. It allows for querying the child element,
* returning the correct prefix, but returning null for the namespace. Indeed, the
* namespace is absent, due to the non-namespace-well-formedness, but this is not
* "XML querying", if we consider namespace-well-formedness essential for XML.
*/
@Test def testQueryScalaXmlElemForWrongXml(): Unit = {
val scalaElem =
<root><prefix:element></prefix:element></root>
assertResult("root") {
scalaElem.label
}
assertResult(null) {
scalaElem.prefix
}
assertResult(null) {
scalaElem.namespace
}
val child = scalaElem.child.head.asInstanceOf[scala.xml.Elem]
assertResult("element") {
child.label
}
assertResult("prefix") {
child.prefix
}
assertResult(null) {
child.namespace
}
}
/**
* Tries to query a wrapper around the Scala XML Elem, which only fails once an incorrect
* Scope is used under the hood.
*/
@Test def testQueryWrappedScalaXmlElemForWrongXml(): Unit = {
val scalaElem =
<root><prefix:element></prefix:element></root>
val elm = ScalaXmlElem(scalaElem)
assertResult(QName("root")) {
elm.qname
}
assertResult(EName("root")) {
elm.resolvedName
}
val childElem = elm.findAllChildElems.head
assertResult(QName("prefix", "element")) {
childElem.qname
}
intercept[RuntimeException] {
childElem.resolvedName
}
}
/**
* Tries to convert the Scala XML Elem to a yaidom Elem, which fails, as expected.
*/
@Test def testTryToConvertScalaXmlElemForWrongXml(): Unit = {
val scalaElem =
<root><prefix:element></prefix:element></root>
intercept[RuntimeException] {
ScalaXmlConversions.convertToElem(scalaElem)
}
}
// The same test methods for the second non-namespace-well-formed XML.
/**
* Tries to create a DOM tree for the (wrong) 2nd XML. It fails, as expected.
*/
@Test def testTryCreatingDomForWrongXml2(): Unit = {
val dbf = DocumentBuilderFactory.newInstance()
val db = dbf.newDocumentBuilder()
val d = db.newDocument()
intercept[DOMException] {
val linkbaseRefElm = d.createElementNS(null, "link:linkbaseRef")
d.appendChild(linkbaseRefElm)
}
}
/**
* Tries to parse the 2nd XML into a yaidom Elem (via SAX), but fails, as expected.
*/
@Test def testTryParsingWrongXml2ViaSax(): Unit = {
val parser = DocumentParserUsingSax.newInstance
intercept[SAXParseException] {
parser.parse(new ByteArrayInputStream(wrongXml2.getBytes("UTF-8")))
}
}
/**
* Tries to parse the 2nd XML into a yaidom Elem (via StAX), but fails, as expected.
*/
@Test def testTryParsingWrongXml2ViaStax(): Unit = {
val parser = DocumentParserUsingStax.newInstance
intercept[Exception] {
parser.parse(new ByteArrayInputStream(wrongXml2.getBytes("UTF-8")))
}
}
/**
* Tries to parse the 2nd XML into a yaidom Elem (via DOM), but fails, as expected.
*/
@Test def testTryParsingWrongXml2ViaDom(): Unit = {
val parser = DocumentParserUsingDom.newInstance
intercept[SAXParseException] {
parser.parse(new ByteArrayInputStream(wrongXml2.getBytes("UTF-8")))
}
}
/**
* Tries to parse the 2nd XML into a yaidom Elem (via DOM LS), but fails, as expected.
*/
@Test def testTryParsingWrongXml2ViaDomLS(): Unit = {
val parser = DocumentParserUsingDomLS.newInstance
intercept[RuntimeException] {
parser.parse(new ByteArrayInputStream(wrongXml2.getBytes("UTF-8")))
}
}
/**
* Tries to parse the 2nd XML into a yaidom Elem again, but fails, as expected.
* This time the underlying parser is not namespace-aware, but still yaidom Elem
* creation fails.
*/
@Test def testTryParsingWrongXml2Again(): Unit = {
val spf = SAXParserFactory.newInstance
spf.setNamespaceAware(false)
val parser = DocumentParserUsingSax.newInstance(spf)
// SAX parsing succeeds, but yaidom Elem creation does not
intercept[RuntimeException] {
parser.parse(new ByteArrayInputStream(wrongXml2.getBytes("UTF-8")))
}
}
/**
* Tries to parse the 2nd XML using scala.xml.XML, but succeeds, unexpectedly.
*
* A fatal error is reported, but the exception is "eaten", returning a scala.xml.Elem, which is
* not namespace-well-formed.
*/
@Test def testTryParsingWrongXml2UsingScalaXml(): Unit = {
val scalaElem = scala.xml.XML.load(new ByteArrayInputStream(wrongXml2.getBytes("UTF-8")))
assertResult("link") {
scalaElem.prefix
}
assertResult("linkbaseRef") {
scalaElem.label
}
assertResult(4) {
scalaElem.attributes.size
}
// Now try to query for the non-namespace-well-formed XLink attributes ...
// Also: The MetaData API is quite cumbersome, just like the Node and NodeSeq API
}
/**
* Creates an ElemBuilder for the 2nd XML, which succeeds. After all, ElemBuilders are
* only builders of XML, and do not claim to be valid namespace-well-formed XML
* themselves. Indeed, ElemBuilders can only be queried for qualified names, but not
* for expanded names.
*/
@Test def testCreateElemBuilderForWrongXml2(): Unit = {
import NodeBuilder._
val elemBuilder =
emptyElem(
QName("link:linkbaseRef"),
Vector(
QName("xlink:arcrole") -> "http://www.w3.org/1999/xlink/properties/linkbase",
QName("xlink:href") -> "my-lab-en.xml",
QName("xlink:role") -> "http://www.xbrl.org/2003/role/labelLinkbaseRef",
QName("xlink:type") -> "simple"))
assertResult(false) {
elemBuilder.canBuild(Scope.Empty)
}
val nsXLink = "http://www.w3.org/1999/xlink"
val nsLink = "http://www.xbrl.org/2003/linkbase"
assertResult(false) {
elemBuilder.canBuild(Scope.from("xlink" -> nsXLink))
}
assertResult(true) {
elemBuilder.canBuild(Scope.from("xlink" -> nsXLink, "link" -> nsLink))
}
}
/**
* Tries to create an Elem for the 2nd XML, without passing the
* correct parent scope. This fails, as expected.
*/
@Test def testTryCreatingElemForWrongXml2(): Unit = {
import Node._
intercept[RuntimeException] {
emptyElem(
QName("link:linkbaseRef"),
Vector(
QName("xlink:arcrole") -> "http://www.w3.org/1999/xlink/properties/linkbase",
QName("xlink:href") -> "my-lab-en.xml",
QName("xlink:role") -> "http://www.xbrl.org/2003/role/labelLinkbaseRef",
QName("xlink:type") -> "simple"),
Scope.Empty)
}
}
/**
* Tries to convert an ElemBuilder for the 2nd XML into an Elem, this time passing the
* correct parent scope. This succeeds, as expected, and so do the subsequent queries.
*/
@Test def testQueryElemForFixedWrongXml2(): Unit = {
import NodeBuilder._
val elemBuilder =
emptyElem(
QName("link:linkbaseRef"),
Vector(
QName("xlink:arcrole") -> "http://www.w3.org/1999/xlink/properties/linkbase",
QName("xlink:href") -> "my-lab-en.xml",
QName("xlink:role") -> "http://www.xbrl.org/2003/role/labelLinkbaseRef",
QName("xlink:type") -> "simple"))
val nsXLink = "http://www.w3.org/1999/xlink"
val nsLink = "http://www.xbrl.org/2003/linkbase"
val elm = elemBuilder.build(Scope.from("xlink" -> nsXLink, "link" -> nsLink))
val elemENames = elm.findAllElemsOrSelf.map(_.resolvedName)
val rootAttrENames = elm.resolvedAttributes.map(_._1)
assertResult(List(EName(nsLink, "linkbaseRef"))) {
elemENames
}
assertResult(List(EName(nsXLink, "arcrole"), EName(nsXLink, "href"), EName(nsXLink, "role"), EName(nsXLink, "type"))) {
rootAttrENames
}
assertResult(true) {
elm.attributeOption(EName(nsXLink, "type")).isDefined
}
assertResult(false) {
elm.attributeOption(EName(None, "type")).isDefined
}
}
/**
* Tries to create a Scala XML Elem for the 2nd XML, which succeeds (although it should not succeed).
*/
@Test def testCreateScalaXmlElemForWrongXml2(): Unit = {
val scalaElem =
<link:linkbaseRef xlink:arcrole="http://www.w3.org/1999/xlink/properties/linkbase" xlink:href="my-lab-en.xml" xlink:role="http://www.xbrl.org/2003/role/labelLinkbaseRef" xlink:type="simple"/>
assertResult(true) {
scalaElem.child.size == 0
}
}
/**
* Queries the 2nd Scala XML Elem. Here we see that Scala XML happily allows for querying
* a non-namespace-well-formed XML tree. It allows for querying the child element,
* returning the correct prefix, but returning null for the namespace. Indeed, the
* namespace is absent, due to the non-namespace-well-formedness, but this is not
* "XML querying", if we consider namespace-well-formedness essential for XML.
*/
@Test def testQueryScalaXmlElemForWrongXml2(): Unit = {
val scalaElem =
<link:linkbaseRef xlink:arcrole="http://www.w3.org/1999/xlink/properties/linkbase" xlink:href="my-lab-en.xml" xlink:role="http://www.xbrl.org/2003/role/labelLinkbaseRef" xlink:type="simple"/>
assertResult("linkbaseRef") {
scalaElem.label
}
assertResult("link") {
scalaElem.prefix
}
assertResult(null) {
scalaElem.namespace
}
val nsXLink = "http://www.w3.org/1999/xlink"
val nsLink = "http://www.xbrl.org/2003/linkbase"
assertResult(None) {
scalaElem.attribute(nsXLink, "arcrole")
}
assertResult(Some("http://www.w3.org/1999/xlink/properties/linkbase")) {
scalaElem.attribute(null, "arcrole").map(_.text)
}
}
/**
* Tries to query a wrapper around the 2nd Scala XML Elem, which only fails once an incorrect
* Scope is used under the hood.
*/
@Test def testQueryWrappedScalaXmlElemForWrongXml2(): Unit = {
val scalaElem =
<link:linkbaseRef xlink:arcrole="http://www.w3.org/1999/xlink/properties/linkbase" xlink:href="my-lab-en.xml" xlink:role="http://www.xbrl.org/2003/role/labelLinkbaseRef" xlink:type="simple"/>
val elm = ScalaXmlElem(scalaElem)
val nsXLink = "http://www.w3.org/1999/xlink"
assertResult(QName("link:linkbaseRef")) {
elm.qname
}
intercept[RuntimeException] {
elm.resolvedName
}
intercept[RuntimeException] {
elm.attributeOption(EName(nsXLink, "type"))
}
}
/**
* Tries to convert the 2nd Scala XML Elem to a yaidom Elem, which fails, as expected.
*/
@Test def testTryToConvertScalaXmlElemForWrongXml2(): Unit = {
val scalaElem =
<link:linkbaseRef xlink:arcrole="http://www.w3.org/1999/xlink/properties/linkbase" xlink:href="my-lab-en.xml" xlink:role="http://www.xbrl.org/2003/role/labelLinkbaseRef" xlink:type="simple"/>
intercept[RuntimeException] {
ScalaXmlConversions.convertToElem(scalaElem)
}
}
// For comparing XML, yaidom's resolved elements are a good basis. For example, attribute
// order is non-existing in resolved elements. Scala XML does not consider two elements
// the same if only the attribute order is different.
// See http://stackoverflow.com/questions/4401702/scala-xml-loadstring-vs-literal-expression.
/**
* For comparing XML, yaidom offers resolved elements as a basis. In resolved elements,
* attribute order does not play any role.
*/
@Test def testEqualityIfAttributeOrderDiffers(): Unit = {
import Node._
val ns = "http://www.w3.org/1999/xhtml"
val elm1 =
emptyElem(
QName("link"),
Vector(QName("href") -> "/css/main.css", QName("rel") -> "stylesheet", QName("type") -> "text/css"),
Scope.from("" -> ns))
val elm2 =
emptyElem(
QName("link"),
Vector(QName("rel") -> "stylesheet", QName("href") -> "/css/main.css", QName("type") -> "text/css"),
Scope.from("" -> ns))
assertResult(false) {
elm1 == elm2
}
assertResult(true) {
resolved.Elem(elm1) == resolved.Elem(elm2)
}
}
/**
* In Scala XML, prefixes are relevant for XML equality comparisons.
*/
@Test def testScalaXmlEqualityIfPrefixDiffers(): Unit = {
val ns = "http://www.w3.org/1999/xhtml"
val xml1 =
<link xmlns={ ns } href="/css/main.css" rel="stylesheet" type="text/css"/>
val xml2 =
<link xmlns={ ns } href="/css/main.css" rel="stylesheet" type="text/css"/>
val xml3 =
<h:link xmlns:h={ ns } href="/css/main.css" rel="stylesheet" type="text/css"/>
assertResult(true) {
xml1 == xml2
}
assertResult(false) {
xml1 == xml3
}
}
/**
* For comparing XML, yaidom offers resolved elements as a basis. In resolved elements,
* prefixes are irrelevant, in contrast to namespace URIs.
*/
@Test def testEqualityIfPrefixDiffers(): Unit = {
import Node._
val nsXLink = "http://www.w3.org/1999/xlink"
val nsLink = "http://www.xbrl.org/2003/linkbase"
val scope1 = Scope.from("xlink" -> nsXLink, "link" -> nsLink)
val elm1 =
emptyElem(
QName("link:linkbaseRef"),
Vector(
QName("xlink:arcrole") -> "http://www.w3.org/1999/xlink/properties/linkbase",
QName("xlink:href") -> "my-lab-en.xml",
QName("xlink:role") -> "http://www.xbrl.org/2003/role/labelLinkbaseRef",
QName("xlink:type") -> "simple"),
scope1)
val scope2 = Scope.from("xl" -> nsXLink, "lb" -> nsLink)
val elm2 =
emptyElem(
QName("lb:linkbaseRef"),
Vector(
QName("xl:arcrole") -> "http://www.w3.org/1999/xlink/properties/linkbase",
QName("xl:href") -> "my-lab-en.xml",
QName("xl:role") -> "http://www.xbrl.org/2003/role/labelLinkbaseRef",
QName("xl:type") -> "simple"),
scope2)
val scope3 = Scope.from("xl" -> nsXLink, "" -> nsLink)
val elm3 =
emptyElem(
QName("linkbaseRef"),
Vector(
QName("xl:arcrole") -> "http://www.w3.org/1999/xlink/properties/linkbase",
QName("xl:href") -> "my-lab-en.xml",
QName("xl:role") -> "http://www.xbrl.org/2003/role/labelLinkbaseRef",
QName("xl:type") -> "simple"),
scope3)
assertResult(false) {
elm1 == elm2
}
assertResult(true) {
resolved.Elem(elm1) == resolved.Elem(elm2)
}
assertResult(false) {
elm1 == elm3
}
assertResult(true) {
resolved.Elem(elm1) == resolved.Elem(elm3)
}
}
// TODO Pattern matching. See http://www.codecommit.com/blog/scala/working-with-scalas-xml-support.
// Also see discussion http://scala-language.1934581.n4.nabble.com/Namespace-support-in-XML-patterns-td2006894.html.
// Or see http://alvinalexander.com/scala/using-match-expressions-with-xml-in-scala.
}
|
EBPI/yaidom
|
src/test/scala/nl/ebpi/yaidom/integrationtest/TheCaseForYaidomTest.scala
|
Scala
|
apache-2.0
| 23,921 |
/*
* Nodes that affect control flow.
*/
package see.nodes
import see.EvalError
import see.ResultException
import see.Scope
import see.SeeException
import see.values.Bool
import see.values.Val
// Node for the conditional operator. This takes three parameters (p1 ? p2 : p3)
private[see] object Condition extends Factory2 {
override def apply(tc: Node, fc: Node) = new ConditionP(tc, fc)
}
private[see] class ConditionP(val ifTrue: Node, val ifFalse: Node)
extends Proto {
override def precedence = PREC.Condition
override def finish(n: Node) = Some(new Condition(n, ifTrue, ifFalse))
}
private[see] class Condition(c: Node, val ifTrue: Node, val ifFalse: Node)
extends Atom(c) {
override def evalIn(s: Scope): Val = {
val condVal = s.exec(opd)
if (condVal.toBool)
ifTrue evalIn s
else
ifFalse evalIn s
}
override def simplifyIn(s: Scope) = {
val c = opd.simplifyIn(s)
c match {
case Constant(v) => if (v.toBool) ifTrue.simplifyIn(s)
else ifFalse.simplifyIn(s)
case _ => new Condition(
c, ifTrue.simplifyIn(s), ifFalse.simplifyIn(s) )
}
}
override def isDefinedIn(s: Scope) = {
if (opd isDefinedIn s) {
if ((opd evalIn s).toBool) ifTrue isDefinedIn s
else ifFalse isDefinedIn s
}
else false
}
override def toString() = "[" + opd + " ? " + ifTrue + " : " + ifFalse + "]"
}
// Node for the loop operator.
// The loop body will be executed until condition is false.
// If it never evaluates to true, the def node will be evaluated exactly once
// and its result is returned.
// This takes three parameters (p1 ?? p2 : p3)
private[see] object Loop extends Factory2 {
override def apply(body: Node, dflt: Node) = new LoopP(body, dflt)
}
private[see] class LoopP(val body: Node, val dflt: Node) extends Proto {
override def precedence = PREC.Condition
override def finish(n: Node) = Some(new Loop(n, body, dflt))
}
private[see] class Loop(c: Node, val body: Node, val dflt: Node)
extends Atom(c) {
override def evalIn(s: Scope): Val = {
while (s.exec(opd).toBool) {
val result = s.exec(body) // required here to ensure execution
if(!s.exec(opd).toBool)
return result
}
dflt evalIn s
}
override def simplifyIn(s: Scope) = {
val c = opd.simplifyIn(s)
c match {
case Constant(Bool(false)) => dflt.simplifyIn(s)
case Constant(Bool(true)) =>
throw new SeeException("Infinite loop detected.")
case _ => new Loop(
c, body.simplifyIn(s), dflt.simplifyIn(s))
}
}
override def isDefinedIn(s: Scope) = {
if (opd isDefinedIn s) {
if ((opd evalIn s).toBool) body isDefinedIn s
else dflt isDefinedIn s
}
else false
}
override def toString() = "[" + opd + " ?? " + body + " : " + dflt + "]"
}
// Asserts a certain condition. Throws eval error if condition fails.
private[see] object Assertion extends Factory {
override def apply(res: Node) = new AssertionP(res)
}
private[see] class AssertionP(val res: Node) extends Proto {
override def precedence = PREC.Assertion
override def finish(n: Node) = Some(new Assertion(n, res))
}
private[see] class Assertion(c: Node, fail: Node)
extends Atom(c) {
override def isDefinedIn(s: Scope) =
opd.isDefinedIn(s) && fail.isDefinedIn(s)
override def toString = "assert " + opd + " : " + fail
override def evalIn(s: Scope) = {
if (s.exec(opd).toBool) s.getResult
else {
val msg = fail.evalIn(s).toString
throw new EvalError(msg)
}
}
override def simplifyIn(s: Scope) =
new Assertion(opd.simplifyIn(s), fail.simplifyIn(s))
}
// Terminates current block with result if condition is true.
// Roughly equivalent to if (cond) return result;
private[see] object Return extends Factory {
override def apply(res: Node) = new ReturnP(res)
}
private[see] class ReturnP(val res: Node) extends Proto {
override def precedence = PREC.Return
override def finish(n: Node) = Some(new Return(n, res))
}
private[see] class Return(c: Node, result: Node)
extends Atom(c) {
override def isDefinedIn(s: Scope) =
opd.isDefinedIn(s) && result.isDefinedIn(s)
override def toString = "if (" + opd + ") return " + result
override def evalIn(s: Scope) = {
if (s.exec(opd).toBool) throw ResultException(result evalIn s)
else s.getResult
}
override def simplifyIn(s: Scope) =
new Return(opd.simplifyIn(s), result.simplifyIn(s))
}
|
RayRacine/scee
|
src/main/scala/see/nodes/Flow.scala
|
Scala
|
bsd-3-clause
| 4,453 |
package services
import com.avaje.ebean.Expr
import edu.cmu.lti.ws4j.impl.{Resnik, Lin}
import edu.cmu.lti.ws4j.util.StopWordRemover
import scala.collection.JavaConversions
import JavaConversions._
import edu.cmu.lti.lexical_db.NictWordNet
import models.{User, Match}
/**
* Created by Adam on 11/15/2014.
*/
object MatcherService {
val db = new NictWordNet
val rc = new Resnik(db) // new Resnik Lesk WuPalmer JiangConrath
def matchesJ(in:User):java.util.Map[User, java.lang.Float] = matches(in).map(f => f._1 -> new java.lang.Float(f._2))
def matches(in:User) = {
play.Logger.info("Generating matches for " + in.email)
val alreadyMatched = Match.find.where().eq("targetUser.id", in.id).findList.map(_.getMatchedUser).toList :::
Match.find.where().eq("matchedUser.id", in.id).findList.map(_.getTargetUser).toList
val x = User.find.all().filter(_.id != in.id).map { u => // could add user weightings easily here
u -> List((u.bio, in.bio), (u.interests, in.interests), (u.educationalField, in.educationalField), (u.employmentField, in.employmentField)).foldLeft(0f) { (score, text) =>
score + docSim(text._1, text._2)
}
}.toMap.filter(f => alreadyMatched.find(u => u.id == f._1.id).isEmpty)
play.Logger.info("Finished generating matches for " + in.email)
x
}
val sw = StopWordRemover.getInstance
def docSim(d1:String,d2:String) = {
val x= sw.removeStopWords(d1.split(" ")).toList.foldLeft(0f) { (score, d1w) =>
score + sw.removeStopWords(d2.split(" ")).toList.foldLeft(0f) { (score2, d2w) =>
if (!d1w.equals(d2w)) {
val y = rc.calcRelatednessOfWords(d1w, d2w)
score2 + y.toFloat
} else score2
}
}
//play.Logger.info(d1 + "\\n" + d2 + "\\n\\t----------### " + x + " ###----------")
x
}
}
|
kylewetherald/blindlove
|
app/services/MatcherService.scala
|
Scala
|
mit
| 1,835 |
// ' Project: smath
// Module: API / ode
// Description: Interface for first-order ODE systems
//
// Copyright (c) 2015 Johannes Kastner <[email protected]>
// Distributed under the MIT License (see included file LICENSE)
package biz.enef.smath.ode
import biz.enef.smath.ArrayX
import biz.enef.smath.linear.MatD
/**
* Describes a first-order ODE system to be solved numerically.
*/
trait FirstOrderSystem[T] {
/**
* Number of equations
*/
def dimension: Int
/**
* Compute the current time derivative of the state vector.
*
* @param t current value of the independent time variable
* @param y vector containing the current value of the state vector
* @param ydot output vector that must contain the computed derivative on return
*/
def computeDerivative(t: T, y: ArrayX[T], ydot: ArrayX[T]) : Unit
}
trait FirstOrderSystemD extends FirstOrderSystem[Double]
/**
* A linear first-order ODE system:
* <br/>
* `ydot = M*y`
* </br>
* where `M` is a constant `n*n` matrix.
*
* @param M matrix with constant ODE coefficients
*/
case class LinearFirstOrderSystemD(M: MatD) extends FirstOrderSystemD {
assert(M.rows==M.cols)
override val dimension: Int = M.rows
@inline
override def computeDerivative(t: Double, y: ArrayX[Double], ydot: ArrayX[Double]): Unit = M.operate(y,ydot)
}
|
jokade/smath
|
shared/src/main/scala/biz/enef/smath/ode/FirstOrderSystem.scala
|
Scala
|
mit
| 1,352 |
package svstm.transactions
import scala.concurrent.stm.InTxn
import scala.concurrent.stm.stubs.StubInTxn
import scala.concurrent.stm.svstm.SVSTMTxnExecutor
import svstm.vbox.VBox
object Transaction {
def apply(readOnly: Boolean = false, parent: Transaction = null): Transaction = {
(parent, readOnly) match {
case (null, true) => new TopLevelReadTransaction(SVSTMTxnExecutor.mostRecentNumber.get())
case (null, false) => new TopLevelReadWriteTransaction(SVSTMTxnExecutor.mostRecentNumber.get())
case (txn, _) => txn.makeNestedTransaction()
}
}
}
abstract class Transaction(val number: Int, val parent: Transaction = null) extends InTxn with StubInTxn {
def this(parent: Transaction) = this(parent.number, parent)
def getBoxValue[T](vbox: VBox[T]): T
def setBoxValue[T](vbox: VBox[T], value: T)
def doCommit()
def makeNestedTransaction(): Transaction
def hasParent() = parent != null;
def isTopLevel() = !hasParent()
def atomic[A](block: InTxn => A): A = {
val result = block(this)
this.doCommit()
result
}
}
|
fcristovao/SVSTM
|
src/main/scala/svstm/transactions/Transaction.scala
|
Scala
|
apache-2.0
| 1,050 |
//===========================================================================
// Copyright 2014 Delving B.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//===========================================================================
package mapping
import com.rockymadden.stringmetric.similarity.RatcliffObershelpMetric
import mapping.SkosVocabulary._
import org.apache.jena.rdf.model.{Model, Resource}
import play.api.Logger
import play.api.libs.json.{JsObject, Json, Writes}
import triplestore.GraphProperties._
import triplestore.{GraphProperties, TripleStore}
import scala.collection.JavaConversions._
import scala.collection.mutable
import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext}
object SkosVocabulary {
val IGNORE_BRACKET = """ *[(].*[)]$""".r
def getLanguageLabel(labels: List[Label], languageOpt: Option[String]): Option[Label] = {
val find: Option[Label] = languageOpt.flatMap(lang => labels.find(_.language == lang))
if (find.isDefined) find else labels.headOption
}
def getLabels(resource: Resource, propertyName: String, preferred: Boolean, model: Model): List[Label] = {
val property = model.getProperty(GraphProperties.SKOS, propertyName)
model.listStatements(resource, property, null).map(_.getObject.asLiteral()).map(
literal => Label(preferred = preferred, literal.getLanguage, literal.getString)
).toList
}
def getPrefLabels(resource: Resource, model: Model) = getLabels(resource, "prefLabel", preferred = true, model)
def getAltLabels(resource: Resource, model: Model) = getLabels(resource, "altLabel", preferred = false, model)
implicit val writesLabelSearch = new Writes[LabelSearch] {
def writeQuery(query: LabelQuery) = {
val language: String = query.languageOpt.getOrElse("")
Json.obj(
"language" -> language,
"sought" -> query.sought,
"count" -> query.count
)
}
def writeProximityResult(result: ProximityResult, languageOpt: Option[String]): JsObject = {
val narrowerLabels: Seq[Label] = result.concept.narrower.flatMap(_.getPrefLabel(Some(result.label.language)))
val broaderLabels: Seq[Label] = result.concept.broader.flatMap(_.getPrefLabel(Some(result.label.language)))
Json.obj(
"proximity" -> result.proximity,
"preferred" -> result.label.preferred,
"label" -> result.label.text,
"prefLabel" -> result.prefLabel.text,
"uri" -> result.concept.resource.toString,
// "conceptScheme" -> result.concept.vocabulary.name,
"attributionName" -> result.concept.vocabulary.spec,
"narrower" -> narrowerLabels.map(_.text),
"broader" -> broaderLabels.map(_.text)
)
}
def writes(search: LabelSearch) = Json.obj(
"query" -> writeQuery(search.query),
"results" -> search.results.map(writeProximityResult(_, search.query.languageOpt))
)
}
case class LabelSearch(query: LabelQuery, results: List[ProximityResult])
case class LabelQuery(sought: String, languageOpt: Option[String], count: Int)
case class Label(preferred: Boolean, language: String, var text: String = "") {
override def toString: String = s"""${if (preferred) "Pref" else "Alt"}Label[$language]("$text")"""
}
case class ProximityResult(label: Label, prefLabel: Label, proximity: Double, concept: Concept)
case class Concept(vocabulary: SkosVocabulary, resource: Resource, conceptMap: mutable.HashMap[String, Concept], model: Model) {
def getRelated(resource: Resource, propertyName: String, model: Model): Seq[Resource] = {
val property = model.getProperty(GraphProperties.SKOS, propertyName)
model.listStatements(resource, property, null).map(_.getObject.asResource()).toSeq
}
conceptMap.put(resource.getURI, this)
lazy val prefLabels = getPrefLabels(resource, model)
lazy val altLabels = getAltLabels(resource, model)
lazy val labels = prefLabels ++ altLabels
lazy val narrower: Seq[Concept] = getRelated(resource, "narrower", model).flatMap(resource => conceptMap.get(resource.getURI))
lazy val broader: Seq[Concept] = getRelated(resource, "broader", model).flatMap(resource => conceptMap.get(resource.getURI))
lazy val frequency: Option[Int] = {
val frequencyValue = model.listObjectsOfProperty(resource, model.getProperty(skosFrequency)).toList.headOption
frequencyValue.map(_.asLiteral().getInt)
}
lazy val fieldProperty: Option[String] = {
val fieldPropertyValue = model.listObjectsOfProperty(resource, model.getProperty(skosField.uri)).toList.headOption
fieldPropertyValue.map(_.asResource().toString)
}
lazy val fieldPropertyTag: Option[String] = {
val fieldPropertyTagValue = model.listObjectsOfProperty(resource, model.getProperty(skosFieldTag.uri)).toList.headOption
fieldPropertyTagValue.map(_.asLiteral().toString)
}
def getPrefLabel(languageOpt: Option[String]) = getLanguageLabel(prefLabels, languageOpt)
def getAltLabel(languageOpt: Option[String]) = getLanguageLabel(altLabels, languageOpt)
def search(sought: String, languageOpt: Option[String]): Option[ProximityResult] = {
val toSearch: List[Label] = languageOpt.map(lang => labels.filter(_.language == lang)).getOrElse(labels)
val judged = toSearch.map { label =>
val text = IGNORE_BRACKET.replaceFirstIn(label.text.toLowerCase, "")
(RatcliffObershelpMetric.compare(sought, text), label)
}
val prefLabel = getPrefLabel(languageOpt).getOrElse(Label(preferred = true, "??", "No prefLabel!"))
val searchResults = judged.filter(_._1.isDefined).map(p => ProximityResult(p._2, prefLabel, p._1.get, this))
searchResults.sortBy(-1 * _.proximity).headOption
}
override def toString: String = s"""
|Concept($resource)
| Labels: ${labels.mkString(",")}
| Narrower: ${narrower.map(_.prefLabels.head).mkString(",")}
| Broader: ${broader.map(_.prefLabels.head).mkString(",")}
""".stripMargin.trim
}
}
case class SkosVocabulary(spec: String, graphName: String)(implicit ec: ExecutionContext, ts: TripleStore) {
lazy val futureModel = ts.dataGet(graphName)
futureModel.onFailure {
case e: Throwable => Logger.warn(s"No data found for skos vocabulary $graphName", e)
}
futureModel.onSuccess {
case x => Logger.debug(s"Loaded $graphName")
}
lazy val m: Model = Await.result(futureModel, 60.seconds)
private val conceptMap = new mutable.HashMap[String, Concept]()
lazy val concepts: List[Concept] = {
val typeProperty = m.getProperty(rdfType)
val conceptResource = m.getResource(s"${SKOS}Concept")
val subjects = m.listSubjectsWithProperty(typeProperty, conceptResource).toSeq
subjects.map(statement => Concept(this, statement, conceptMap, m))
}.toList
def search(sought: String, count: Int, languageOpt: Option[String]): LabelSearch = {
val cleanSought = IGNORE_BRACKET.replaceFirstIn(sought, "")
val judged = concepts.flatMap(_.search(sought.toLowerCase, languageOpt))
val results = judged.sortBy(-1 * _.proximity).take(count).toList
LabelSearch(LabelQuery(cleanSought, languageOpt, count), results)
}
lazy val uriLabelMap: Map[String, String] = concepts.map(c =>
c.resource.toString -> c.getPrefLabel(None).map(_.text).getOrElse("No pref label!")
).toMap
lazy val languages: List[String] = concepts.map(c => c.labels.map(_.language)).flatten.sorted.distinct.toList
override def toString: String = graphName
}
|
delving/narthex
|
app/mapping/SkosVocabulary.scala
|
Scala
|
apache-2.0
| 8,098 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import collection.GenTraversable
import SharedHelpers._
import Matchers._
import org.scalactic.Prettifier
import org.scalactic.ArrayHelper.deep
class NoneOfContainMatcherSpec extends FunSpec {
private val prettifier = Prettifier.default
describe("noneOf ") {
def checkStackDepth(e: exceptions.StackDepthException, left: Any, right: GenTraversable[Any], lineNumber: Int): Unit = {
e.message should be (Some(FailureMessages.containedAtLeastOneOf(prettifier, left, UnquotedString(right.mkString(", ")))))
e.failedCodeFileName should be (Some("NoneOfContainMatcherSpec.scala"))
e.failedCodeLineNumber should be (Some(lineNumber))
}
it("should succeed when left List contains elements available in right List") {
List(1, 2, 3, 4, 5) should contain noneOf (6, 7, 8)
Array(1, 2, 3, 4, 5) should contain noneOf (6, 7, 8)
Set(1, 2, 3, 4, 5) should contain noneOf (6, 7, 8)
Map(1 -> "one", 2 -> "two", 3 -> "three", 4 -> "four", 5 -> "five") should contain noneOf (6 -> "six", 7 -> "seven", 8 -> "eight")
// SKIP-SCALATESTJS,NATIVE-START
javaList(1, 2, 3, 4, 5) should contain noneOf (6, 7, 8)
javaSet(1, 2, 3, 4, 5) should contain noneOf (6, 7, 8)
javaMap(Entry(1, "one"), Entry(2, "two"), Entry(3, "three"), Entry(4, "four"), Entry(5, "five")) should contain noneOf (Entry(6, "six"), Entry(7, "seven"), Entry(8, "eight"))
// SKIP-SCALATESTJS,NATIVE-END
}
it("should succeed when left list contains none of right list") {
List(1, 2, 3) should contain noneOf (7, 8)
Array(1, 2, 3) should contain noneOf (7, 8)
Set(1, 2, 3) should contain noneOf (7, 8)
Map(1 -> "one", 2 -> "two", 3 -> "three") should contain noneOf (7 -> "seven", 8 -> "eight")
// SKIP-SCALATESTJS,NATIVE-START
javaList(1, 2, 3) should contain noneOf (7, 8)
javaSet(1, 2, 3) should contain noneOf (7, 8)
javaMap(Entry(1, "one"), Entry(2, "two"), Entry(3, "three")) should contain noneOf (Entry(7, "seven"), Entry(8, "eight"))
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw NotAllowedException when noneOf contains duplicate element") {
val e1 = intercept[exceptions.NotAllowedException] {
List(1, 2, 3) should contain noneOf (6, 8, 6)
}
e1.getMessage() should be (FailureMessages.noneOfDuplicate)
val e2 = intercept[exceptions.NotAllowedException] {
Set(1, 2, 3) should contain noneOf (6, 8, 6)
}
e2.getMessage() should be (FailureMessages.noneOfDuplicate)
val e3 = intercept[exceptions.NotAllowedException] {
Array(1, 2, 3) should contain noneOf (6, 8, 6)
}
e3.getMessage() should be (FailureMessages.noneOfDuplicate)
}
it("should throw TestFailedException with correct stack depth and message when left List contains element in right List") {
val left1 = List(1, 2, 3)
val e1 = intercept[exceptions.TestFailedException] {
left1 should contain noneOf (0, 3, 8)
}
checkStackDepth(e1, left1, deep(Array(0, 3, 8)), thisLineNumber - 2)
val left2 = Array(1, 2, 3)
val e2 = intercept[exceptions.TestFailedException] {
left2 should contain noneOf (0, 3, 8)
}
checkStackDepth(e2, left2, deep(Array(0, 3, 8)), thisLineNumber - 2)
val left3 = Map(1 -> "one", 2 -> "two", 3 -> "three")
val e3 = intercept[exceptions.TestFailedException] {
left3 should contain noneOf (0 -> "zero", 3 -> "three", 8 -> "eight")
}
checkStackDepth(e3, left3, Array(0 -> "zero", 3 -> "three", 8 -> "eight"), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left4 = javaList(1, 2, 3)
val e4 = intercept[exceptions.TestFailedException] {
left4 should contain noneOf (0, 3, 8)
}
checkStackDepth(e4, left4, deep(Array(0, 3, 8)), thisLineNumber - 2)
val left5 = javaMap(Entry(1, "one"), Entry(2, "two"), Entry(3, "three"))
val e5 = intercept[exceptions.TestFailedException] {
left5 should contain noneOf (Entry(0, "zero"), Entry(3, "three"), Entry(8, "eight"))
}
checkStackDepth(e5, left5, Array(Entry(0, "zero"), Entry(3, "three"), Entry(8, "eight")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
}
describe("not noneOf ") {
def checkStackDepth(e: exceptions.StackDepthException, left: Any, right: GenTraversable[Any], lineNumber: Int): Unit = {
val leftText = FailureMessages.decorateToStringValue(prettifier, left)
e.message should be (Some(FailureMessages.didNotContainAtLeastOneOf(prettifier, left, UnquotedString(right.mkString(", ")))))
e.failedCodeFileName should be (Some("NoneOfContainMatcherSpec.scala"))
e.failedCodeLineNumber should be (Some(lineNumber))
}
it("should succeed when left List contains element in right List") {
List(1, 2, 3) should not contain noneOf (0, 2, 8)
Array(1, 2, 3) should not contain noneOf (0, 2, 8)
Set(1, 2, 3) should not contain noneOf (0, 2, 8)
Map(1 -> "one", 2 -> "two", 3 -> "three") should not contain noneOf (0 -> "zero", 2 -> "two", 8 -> "eight")
// SKIP-SCALATESTJS,NATIVE-START
javaList(1, 2, 3) should not contain noneOf (0, 2, 8)
javaSet(1, 2, 3) should not contain noneOf (0, 2, 8)
javaMap(Entry(1, "one"), Entry(2, "two"), Entry(3, "three")) should not contain noneOf (Entry(0, "zero"), Entry(2, "two"), Entry(8, "eight"))
// SKIP-SCALATESTJS,NATIVE-END
}
it("should throw TestFailedException with correct stack depth and message when left List contains only element in right List in same order") {
val left1 = List(1, 2, 3)
val e1 = intercept[exceptions.TestFailedException] {
left1 should not contain noneOf (7, 8, 9)
}
checkStackDepth(e1, left1, deep(Array(7, 8, 9)), thisLineNumber - 2)
val left2 = Array(1, 2, 3)
val e2 = intercept[exceptions.TestFailedException] {
left2 should not contain noneOf (7, 8, 9)
}
checkStackDepth(e2, left2, deep(Array(7, 8, 9)), thisLineNumber - 2)
val left3 = Map(1 -> "one", 2 -> "two", 3 -> "three")
val e3 = intercept[exceptions.TestFailedException] {
left3 should not contain noneOf (7 -> "seven", 8 -> "eight", 9 -> "nine")
}
checkStackDepth(e3, left3, Array(7 -> "seven", 8 -> "eight", 9 -> "nine"), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-START
val left4 = javaList(1, 2, 3)
val e4 = intercept[exceptions.TestFailedException] {
left4 should not contain noneOf (7, 8, 9)
}
checkStackDepth(e4, left4, deep(Array(7, 8, 9)), thisLineNumber - 2)
val left5 = javaMap(Entry(1, "one"), Entry(2, "two"), Entry(3, "three"))
val e5 = intercept[exceptions.TestFailedException] {
left5 should not contain noneOf (Entry(7, "seven"), Entry(8, "eight"), Entry(9, "nine"))
}
checkStackDepth(e5, left5, Array(Entry(7, "seven"), Entry(8, "eight"), Entry(9, "nine")), thisLineNumber - 2)
// SKIP-SCALATESTJS,NATIVE-END
}
}
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/NoneOfContainMatcherSpec.scala
|
Scala
|
apache-2.0
| 7,752 |
package dao.generic
/**
* Identifyable base for all Strong Entity Model types i.e. every Strong entity is expected to have a
* primary key column with name "id"
* @tparam PK Primary key type
* @tparam E Actual case class EntityRow type
*/
trait EntityAutoInc[PK, E <: EntityAutoInc[PK, E]] extends Entity[PK] { self: E =>
import shapeless._
import tag.@@
//------------------------------------------------------------------------
// public
//------------------------------------------------------------------------
/**
* Returns the entity with updated id as generated by the database
* @param id The auto increment generated entity id
* @return the entity with updated id as generated by the database
*/
def copyWithNewId(id : PK)(implicit mkLens: MkFieldLens.Aux[E, Symbol @@ Witness.`"id"`.T, PK]) : E = {
(lens[E] >> 'id).set(self)(id)
}
}
|
bravegag/play-authenticate-usage-scala
|
app/dao/generic/EntityAutoInc.scala
|
Scala
|
apache-2.0
| 890 |
package org.concurrency.ch2
import scala.collection._
object SynchronizedPool extends App {
private val tasks = mutable.Queue[() => Unit]()
object Worker extends java.lang.Thread {
setDaemon(true)
def poll() = tasks.synchronized {
while(tasks.isEmpty) tasks.wait()
tasks.dequeue()
}
override def run() = while(true) {
val task = poll()
task()
}
}
Worker.start()
def asynchronous(body: => Unit) = tasks.synchronized {
tasks.enqueue(() => body)
tasks.notify()
}
asynchronous {println("Hello ")}
asynchronous {println("World!")}
java.lang.Thread.sleep(500)
}
|
marcos-sb/concurrent-programming-scala
|
src/main/scala-2.11/org/concurrency/ch2/SynchronizedPool.scala
|
Scala
|
apache-2.0
| 628 |
package org.f100ded.play.fakews
import org.specs2.mutable._
class FakeRequestExtractorsSpec extends Specification {
"extractors" should {
"extract GET method" in {
val request = FakeRequest("GET", "http://localhost")
GET.unapply(request) must beSome(request)
POST.unapply(request) must beNone
}
"extract POST method" in {
val request = FakeRequest("POST", "http://localhost")
POST.unapply(request) must beSome(request)
GET.unapply(request) must beNone
}
"extract PUT" in {
val request = FakeRequest("PUT", "http://localhost")
PUT.unapply(request) must beSome(request)
POST.unapply(request) must beNone
}
"extract DELETE" in {
val request = FakeRequest("DELETE", "http://localhost")
DELETE.unapply(request) must beSome(request)
POST.unapply(request) must beNone
}
"extract OPTIONS" in {
val request = FakeRequest("OPTIONS", "http://localhost")
OPTIONS.unapply(request) must beSome(request)
POST.unapply(request) must beNone
}
"extract HEAD" in {
val request = FakeRequest("HEAD", "http://localhost")
HEAD.unapply(request) must beSome(request)
POST.unapply(request) must beNone
}
}
"string interpolation" should {
"extract a variable from FakeRequest url" in {
FakeRequest("GET", "http://localhost/stores/123") match {
case url"http://localhost/stores/$id" => id must beEqualTo("123")
}
}
"extract multiple variables from FakeRequest url" in {
FakeRequest("GET", "http://localhost/stores/123/orders/234/invoices") match {
case url"http://localhost/stores/$storeId/orders/$orderId/invoices" =>
storeId must beEqualTo("123")
orderId must beEqualTo("234")
}
}
"extract a variable with a curly brace from url" in {
FakeRequest("GET", "http://localhost/stores/123/") match {
case url"http://localhost/stores/${id}/" => id must beEqualTo("123")
}
}
}
}
|
f100ded/play-fake-ws-standalone
|
src/test/scala/org/f100ded/play/fakews/FakeRequestExtractorsSpec.scala
|
Scala
|
apache-2.0
| 2,023 |
package nak.liblinear
/**
* Configure the options for Liblinear training.
*/
case class LiblinearConfig(
solverType: SolverType = SolverType.L2R_LR,
cost: Double = 1.0,
eps: Double = 0.01,
showDebug: Boolean = false)
/**
* Set up a problem to be solved.
*/
object LiblinearProblem {
def apply(responses: Array[Double], observations: Array[Array[Feature]], numFeats: Int) = {
val problem = new Problem
problem.y = responses
problem.x = observations
problem.l = responses.length
problem.n = numFeats
problem
}
}
/**
* An object to help with solver descriptions.
*/
object Solver {
/**
* The set of all valid solver types.
*/
lazy val solverTypes = Set(
"L2R_LR", "L2R_L2LOSS_SVC_DUAL", "L2R_L2LOSS_SVC", "L2R_L1LOSS_SVC_DUAL",
"MCSVM_CS","L1R_L2LOSS_SVC", "L1R_LR",
"L2R_LR_DUAL", "L2R_L2LOSS_SVR", "L2R_L2LOSS_SVR_DUAL", "L2R_L1LOSS_SVR_DUAL")
/**
* Select the right solver given the textual description.
*/
def apply(solverDescription: String) = solverDescription match {
case "L2R_LR" => SolverType.L2R_LR
case "L2R_L2LOSS_SVC_DUAL" => SolverType.L2R_L2LOSS_SVC_DUAL
case "L2R_L2LOSS_SVC" => SolverType.L2R_L2LOSS_SVC
case "L2R_L1LOSS_SVC_DUAL" => SolverType.L2R_L1LOSS_SVC_DUAL
case "MCSVM_CS" => SolverType.MCSVM_CS
case "L1R_L2LOSS_SVC" => SolverType.L1R_L2LOSS_SVC
case "L1R_LR" => SolverType.L1R_LR
case "L2R_LR_DUAL" => SolverType.L2R_LR_DUAL
case "L2R_L2LOSS_SVR" => SolverType.L2R_L2LOSS_SVR
case "L2R_L2LOSS_SVR_DUAL" => SolverType.L2R_L2LOSS_SVR_DUAL
case "L2R_L1LOSS_SVR_DUAL" => SolverType.L2R_L1LOSS_SVR_DUAL
case invalid => throw new MatchError("No solver with the name " + invalid)
}
}
/**
* Helper functions for working with Liblinear.
*/
object LiblinearUtil {
/**
* Convert tuples into Liblinear Features, basically.
*/
def createLiblinearMatrix(observations: Seq[Seq[(Int,Double)]]): Array[Array[Feature]] =
observations.map { features =>
features.map{ case(a,v) => new FeatureNode(a,v).asInstanceOf[Feature] }.toArray
}.toArray
/**
* Convert tuples into Liblinear Features, basically.
*
* TODO: Condense so there is just one createLiblinearMatrix.
*/
def createLiblinearMatrix(observations: Array[Array[(Int,Float)]]): Array[Array[Feature]] =
observations.map { features => {
features
.sortBy(_._1)
.map{ case(a,v) => new FeatureNode(a,v).asInstanceOf[Feature] }
}}
}
/**
* Train a Liblinear classifier from data.
*
* @author jasonbaldridge
*/
class LiblinearTrainer(config: LiblinearConfig) {
import LiblinearUtil._
if (!config.showDebug) Linear.disableDebugOutput
val param = new Parameter(config.solverType, config.cost, config.eps)
/**
* Train a liblinear model given the responses (the y's), the observations (the x's),
* and the number of features.
*/
def apply(
responses: Array[Double],
observations: Array[Array[Feature]],
numFeatures: Int
): Model = {
val problem = LiblinearProblem(responses, observations, numFeatures)
Linear.train(problem, param)
}
}
|
scalanlp/nak
|
src/main/scala/nak/liblinear/LiblinearClassifier.scala
|
Scala
|
apache-2.0
| 3,161 |
/*
* BSD License
*
* Copyright (c) 2015, University of Barcelona
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the copyright holder nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.ub.guillermoblascojimenez.gmx.impl
import edu.ub.guillermoblascojimenez.gmx.ClusterGraph
import edu.ub.guillermoblascojimenez.gmx.model.{Factor, Variable}
import org.apache.spark.{SparkContext, SparkConf}
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
/**
* Created by guillermoblascojimenez on 17/12/14.
*/
@RunWith(classOf[JUnitRunner])
class BeliefPropagationTest extends FlatSpec {
val error = 0.000000001
val conf = new SparkConf()
.setAppName("TestGmx")
.setMaster("local[1]")
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.ui.enabled", "false")
val a = Variable("a", 2)
val b = Variable("b", 2)
val c = Variable("c", 2)
val d = Variable("d", 2)
val e = Variable("e", 2)
"Belief Propagation" should "compute marginals properly for chains" in {
val sc: SparkContext = new SparkContext(conf)
// FACTORS
val phi1 = Factor(a)
phi1(Map(a -> 0)) = 0.2
phi1(Map(a -> 1)) = 0.8
val phi2 = Factor(a, b)
phi2(Map(a -> 0, b -> 0)) = 0.1
phi2(Map(a -> 1, b -> 0)) = 0.9
phi2(Map(a -> 0, b -> 1)) = 0.7
phi2(Map(a -> 1, b -> 1)) = 0.3
val phi3 = Factor(b, c)
phi3(Map(c -> 0, b -> 0)) = 0.9
phi3(Map(c -> 1, b -> 0)) = 0.1
phi3(Map(c -> 0, b -> 1)) = 0.75
phi3(Map(c -> 1, b -> 1)) = 0.25
val clusters = Map[Set[Variable], Set[Factor]](
Set(a) -> Set(phi1),
Set(a, b) -> Set(phi2),
Set(b,c) -> Set(phi3)
)
val edges = Set[(Set[Variable], Set[Variable])](
(Set(a) , Set(a, b)),
(Set(a,b), Set(b, c))
)
val clusterGraph = ClusterGraph(clusters, edges, sc)
val calibrated = clusterGraph.calibrated(100, 0.00000001)
assert (Set(a, b, c) equals calibrated.variables())
var psi1 = calibrated.factors().find(factor => phi1.scope.equals(factor.scope)).get
var psi2 = calibrated.factors().find(factor => phi2.scope.equals(factor.scope)).get
var psi3 = calibrated.factors().find(factor => phi3.scope.equals(factor.scope)).get
assert (psi1 != null)
assert (psi2 != null)
assert (psi3 != null)
psi1 = psi1.normalized()
psi2 = psi2.normalized()
psi3 = psi3.normalized()
assert(Math.abs(psi1(Map(a -> 0)) - 0.14285714285714285) < error)
assert(Math.abs(psi1(Map(a -> 1)) - 0.8571428571428572) < error)
assert(Math.abs(psi2(Map(a -> 0, b -> 0)) - 0.017857142857142856) < error)
assert(Math.abs(psi2(Map(a -> 1, b -> 0)) - 0.6428571428571429) < error)
assert(Math.abs(psi2(Map(a -> 0, b -> 1)) - 0.125) < error)
assert(Math.abs(psi2(Map(a -> 1, b -> 1)) - 0.21428571428571425) < error)
assert(Math.abs(psi3(Map(b -> 0, c -> 0)) - 0.5946428571428573) < error)
assert(Math.abs(psi3(Map(b -> 1, c -> 0)) - 0.2544642857142857) < error)
assert(Math.abs(psi3(Map(b -> 0, c -> 1)) - 0.06607142857142857) < error)
assert(Math.abs(psi3(Map(b -> 1, c -> 1)) - 0.08482142857142856) < error)
sc.stop()
}
"Belief Propagation" should "compute marginals properly for trees" in {
val sc: SparkContext = new SparkContext(conf)
// FACTORS
val phi1 = Factor(a)
phi1(Map(a -> 0)) = 0.6
phi1(Map(a -> 1)) = 0.4
val phi2 = Factor(b)
phi2(Map(b -> 0)) = 0.2
phi2(Map(b -> 1)) = 0.8
val phi3 = Factor(a, b)
phi3(Map(a -> 0, b -> 0)) = 0.1
phi3(Map(a -> 1, b -> 0)) = 0.9
phi3(Map(a -> 0, b -> 1)) = 0.7
phi3(Map(a -> 1, b -> 1)) = 0.3
val phi4 = Factor(b, c)
phi4(Map(c -> 0, b -> 0)) = 0.9
phi4(Map(c -> 1, b -> 0)) = 0.1
phi4(Map(c -> 0, b -> 1)) = 0.75
phi4(Map(c -> 1, b -> 1)) = 0.25
val phi5 = Factor(e, d)
phi5(Map(e -> 0, d -> 0)) = 0.5
phi5(Map(e -> 1, d -> 0)) = 0.5
phi5(Map(e -> 0, d -> 1)) = 0.9
phi5(Map(e -> 1, d -> 1)) = 0.1
val phi6 = Factor(b, e)
phi6(Map(e -> 0, b -> 0)) = 0.2
phi6(Map(e -> 1, b -> 0)) = 0.8
phi6(Map(e -> 0, b -> 1)) = 0.05
phi6(Map(e -> 1, b -> 1)) = 0.95
val clusters = Map[Set[Variable], Set[Factor]](
Set(a) -> Set(phi1),
Set(b) -> Set(phi2),
Set(a, b) -> Set(phi3),
Set(b, c) -> Set(phi4),
Set(e, d) -> Set(phi5),
Set(b, e) -> Set(phi6)
)
val edges = Set[(Set[Variable], Set[Variable])](
(Set(a) , Set(a, b)), // intersection: a
(Set(b) , Set(a, b)), // intersection: b
(Set(a, b) , Set(b, c)), // intersection: b
(Set(b, c) , Set(b, e)), // intersection: b
(Set(b, e) , Set(e, d)) // intersection: e
)
val clusterGraph = ClusterGraph(clusters, edges, sc)
val calibrated = clusterGraph.calibrated(100, 0.0000001)
assert (Set(a, b, c, d, e) equals calibrated.variables())
var psi1 = calibrated.factors().find(factor => phi1.scope.equals(factor.scope)).get
var psi2 = calibrated.factors().find(factor => phi2.scope.equals(factor.scope)).get
var psi3 = calibrated.factors().find(factor => phi3.scope.equals(factor.scope)).get
var psi4 = calibrated.factors().find(factor => phi4.scope.equals(factor.scope)).get
var psi5 = calibrated.factors().find(factor => phi5.scope.equals(factor.scope)).get
var psi6 = calibrated.factors().find(factor => phi6.scope.equals(factor.scope)).get
assert (psi1 != null)
assert (psi2 != null)
assert (psi3 != null)
assert (psi4 != null)
assert (psi5 != null)
assert (psi6 != null)
psi1 = psi1.normalized()
psi2 = psi2.normalized()
psi3 = psi3.normalized()
psi4 = psi4.normalized()
psi5 = psi5.normalized()
psi6 = psi6.normalized()
assert(Math.abs(psi1(Map(a -> 0)) - 0.6586741889985895) < error)
assert(Math.abs(psi1(Map(a -> 1)) - 0.3413258110014105) < error)
assert(Math.abs(psi2(Map(b -> 0)) - 0.18758815232722154) < error)
assert(Math.abs(psi2(Map(b -> 1)) - 0.8124118476727785) < error)
assert(Math.abs(psi3(Map(a -> 0, b -> 0)) - 0.026798307475317355) < error)
assert(Math.abs(psi3(Map(a -> 1, b -> 0)) - 0.16078984485190417) < error)
assert(Math.abs(psi3(Map(a -> 0, b -> 1)) - 0.6318758815232721) < error)
assert(Math.abs(psi3(Map(a -> 1, b -> 1)) - 0.18053596614950637) < error)
assert(Math.abs(psi4(Map(b -> 0, c -> 0)) - 0.1688293370944994) < error)
assert(Math.abs(psi4(Map(b -> 1, c -> 0)) - 0.6093088857545839) < error)
assert(Math.abs(psi4(Map(b -> 0, c -> 1)) - 0.018758815232722156) < error)
assert(Math.abs(psi4(Map(b -> 1, c -> 1)) - 0.20310296191819463) < error)
assert(Math.abs(psi5(Map(d -> 0, e -> 0)) - 0.056417489421720736) < error)
assert(Math.abs(psi5(Map(d -> 1, e -> 0)) - 0.10155148095909734) < error)
assert(Math.abs(psi5(Map(d -> 0, e -> 1)) - 0.7016925246826515) < error)
assert(Math.abs(psi5(Map(d -> 1, e -> 1)) - 0.14033850493653033) < error)
assert(Math.abs(psi6(Map(b -> 0, e -> 0)) - 0.06911142454160793) < error)
assert(Math.abs(psi6(Map(b -> 1, e -> 0)) - 0.08885754583921013) < error)
assert(Math.abs(psi6(Map(b -> 0, e -> 1)) - 0.11847672778561362) < error)
assert(Math.abs(psi6(Map(b -> 1, e -> 1)) - 0.7235543018335683) < error)
sc.stop()
}
}
|
GuillermoBlasco/gmX
|
src/test/scala/edu/ub/guillermoblascojimenez/gmx/impl/BeliefPropagationTest.scala
|
Scala
|
bsd-3-clause
| 8,810 |
package services
import akka.actor.ActorSystem
import javax.inject.{Inject, Singleton}
import models.{ConversionStatus, StatusString, Task, Work}
import play.api.inject.ApplicationLifecycle
import play.api.{Configuration, Logger}
import org.slf4j.MDC
import scala.concurrent.duration.Duration
import scala.concurrent.{Await, ExecutionContext, Future, Promise}
import scala.util.{Failure, Success, Try}
/**
* Service that runs workers in an infinite loop.
*/
@Singleton
class SimpleWorkerService @Inject()(implicit context: ExecutionContext,
configuration: Configuration,
worker: Worker,
poller: Poller,
system: ActorSystem,
stateService: StateService,
appLifecycle: ApplicationLifecycle)
extends WorkerService {
private val logger = Logger(this.getClass)
private val restart = configuration.get[Boolean]("worker.restart")
/** Promise of application shutdown. */
var shutdownPromise: Option[Promise[Unit]] = None
appLifecycle.addStopHook { () =>
logger.info("Shutdown requested")
shutdownPromise
.orElse(Some(Promise[Unit]()))
.map { promise =>
if (promise.isCompleted) {
logger.info("Shutdown promise is already completed, return immediately")
Future.successful(())
} else {
logger.info("Run shutdown process")
Future.sequence(
List(
promise.future.map { v =>
logger.info(s"shutdown promise.future result $v"); v
},
// Future { worker.shutdown() },
poller.unregister().map { v =>
logger.info(s"unregister result $v"); v
}
)
)
}
}
.getOrElse(Future.successful(()))
}
worker.addStopHook { () =>
val promise = Promise[Unit]()
promise.success(())
shutdownPromise = Some(promise)
}
logger.info("Start processing loop")
Await.ready(infiniteLoop(), Duration.Inf)
/** Infinite loop that processes DITA-OT. */
private def infiniteLoop(): Future[Unit] = {
val nf = loopTask()
nf.onComplete {
case Failure(_: NoSuchElementException) =>
logger.info("nested infiniteLoop: NoSuchElementException");
cleanup()
case _ => ()
}
nf.failed.foreach {
case e: java.lang.Error =>
logger.error("infinite loop failure: " + e.getMessage, e)
case e: Throwable =>
logger.error("infinite loop throwable: " + e.getMessage, e)
}
val f = Future(nf).map(_ => ())
// f.onComplete {
// case Failure(_: NoSuchElementException) =>
// logger.info("infiniteLoop: NoSuchElementException");
// cleanup()
// case _ => ()
// }
f
}
private def loopTask(): Future[Unit] = {
// logger.info("loopTask")
val f = run()
.filter((u: Unit) => {
logger.debug(s"Shutdown = ${shutdownPromise.isDefined}")
shutdownPromise.isEmpty
})
.flatMap(_ => loopTask())
// f.onComplete {
// case Failure(t: NoSuchElementException) => logger.info("loopTask: NoSuchElementException")
// case Failure(t) => t.printStackTrace(); logger.error(s"Failure in loopTask $t")
// case _ => ()
// }
f.failed.foreach {
case e: java.lang.Error =>
logger.error("loopTask failure: " + e.getMessage, e)
case e: Throwable =>
logger.error("loopTask throwable: " + e.getMessage, e)
}
f
}
private def getWork(): Future[Try[Task]] = {
stateService.get()
.flatMap { oldTask: Option[Task] =>
oldTask
.map { t: Task =>
logger.info("Using old job")
if (restart) {
Future(Success(t))
} else {
Future(Failure(new ProcessorException(new Exception("Unexpected worker shutdown"), t.copy(status = StatusString.Error))))
}
}
.getOrElse(poller.getWork())
.map { w: Try[Task] =>
w.foreach { task =>
MDC.put("id", task.id)
}
w
}
}
}
private def run(): Future[Unit] = {
// logger.info("run")
if (shutdownPromise.isDefined) {
logger.info("Shutdown requested, return immediately")
Future(())
} else {
val f: Future[Try[Work]] = for {
// _ <- lock()
response <- getWork()
ser <- stateService.persist(response)
res <- worker.process(ser)
submitRes <- poller.submitResults(res)
clean <- stateService.cleanJob(submitRes)
} yield clean
MDC.remove("id")
f.foreach {
case Failure(UnavailableException(msg, cause)) => {
logger.debug("Queue unavailable, wait and retry: " + msg);
Thread.sleep(5000)
()
}
case Failure(UnauthorizedException(msg)) => {
logger.info("Unauthorized, wait and retry: " + msg);
Thread.sleep(5000)
()
}
case Failure(NoWorkException()) => {
logger.debug("No work");
()
}
case Failure(e) => {
logger.error("Failure: " + e.getMessage, e);
()
}
case _ => ()
}
f.failed.foreach {
case e: java.lang.Error => {
// logger.error("Got error and will re-throw: " + e.getMessage)
// e.printStackTrace()
logger.error("Error in run: " + e.getMessage, e)
throw e;
}
case t: Throwable => {
// t.printStackTrace()
logger.error(s"Failure in run: ${t.getMessage}", t)
}
case _ => ()
}
// FIXME pass results out
f.map(_ => ())
}
}
private def cleanup(): Unit = {
logger.info("Worker cleanup")
shutdownPromise.foreach(p => p.complete(Success(())))
}
override def status: ConversionStatus = poller.status
override def log(offset: Int): Seq[String] = worker.log(offset)
}
|
kuhnuri/kuhnuri-worker
|
common/app/services/SimpleWorkerService.scala
|
Scala
|
apache-2.0
| 6,200 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.deprecated
import org.scalatest._
// SKIP-SCALATESTJS,NATIVE-START
import org.scalatestplus.junit.JUnit3Suite
import org.scalatestplus.junit.JUnitSuite
import org.junit.Test
import org.testng.annotations.{Test => TestNG }
import org.scalatestplus.testng.TestNGSuite
import org.scalatest.refspec.RefSpec
// SKIP-SCALATESTJS,NATIVE-END
class DeprecatedTestNameProp extends AllSuiteProp {
type FixtureServices = TestNameFixtureServices
// SKIP-SCALATESTJS,NATIVE-START
def spec = new ExampleTestNameSpec
def junit3Suite = new ExampleTestNameJUnit3Suite
def junitSuite = new ExampleTestNameJUnitSuite
def testngSuite = new ExampleTestNameTestNGSuite
// SKIP-SCALATESTJS,NATIVE-END
def funSuite = new ExampleTestNameFunSuite
def fixtureFunSuite = new ExampleTestNameFixtureFunSuite
def funSpec = new ExampleTestNameFunSpec
def fixtureFunSpec = new ExampleTestNameFixtureFunSpec
def featureSpec = new ExampleTestNameFeatureSpec
def fixtureFeatureSpec = new ExampleTestNameFixtureFeatureSpec
def flatSpec = new ExampleTestNameFlatSpec
def fixtureFlatSpec = new ExampleTestNameFixtureFlatSpec
def freeSpec = new ExampleTestNameFreeSpec
def fixtureFreeSpec = new ExampleTestNameFixtureFreeSpec
def propSpec = new ExampleTestNamePropSpec
def fixturePropSpec = new ExampleTestNameFixturePropSpec
def wordSpec = new ExampleTestNameWordSpec
def fixtureWordSpec = new ExampleTestNameFixtureWordSpec
def pathFreeSpec = new ExampleTestNamePathFreeSpec
def pathFunSpec = new ExampleTestNamePathFunSpec
test("test name will be constructed by concatennating scopes, outer to inner, followed by the test text, separated by a space after each component is trimmed.") {
forAll(examples) { s => s.assertTestNames() }
}
}
trait TestNameFixtureServices { suite: Suite =>
val expectedTestNames: Set[String]
def assertTestNames(): Unit = {
val expectedSet = expectedTestNames
val testNameSet = testNames
assert(expectedSet.size === testNameSet.size)
expectedSet.foreach { tn =>
assert(testNameSet contains tn, "Unable to find test name: '" + tn + "', testNames is: \n" + testNameSet.map("'" + _ + "'").mkString("\n"))
}
}
}
// SKIP-SCALATESTJS,NATIVE-START
@DoNotDiscover
class ExampleTestNameSpec extends RefSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
object `Testing 1` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 2 ` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 3` {
object ` Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 4` {
object `Scala code ` {
def `should be fun`: Unit = {}
}
}
object `Testing 5` {
object `Scala code` {
def ` should be fun`: Unit = {}
}
}
object ` Testing 6` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 7` {
object `Scala code` {
def `should be fun `: Unit = {}
}
}
object `Testing 8 ` {
object ` Scala code` {
def `should be fun`: Unit = {}
}
}
object `Testing 9 ` {
object `Scala code` {
def `should be fun`: Unit = {}
}
}
}
@DoNotDiscover
class ExampleTestNameJUnit3Suite extends JUnit3Suite with TestNameFixtureServices {
val expectedTestNames =
Set(
"testingShouldBeFun"
)
def testingShouldBeFun(): Unit = { }
}
@DoNotDiscover
class ExampleTestNameJUnitSuite extends JUnitSuite with TestNameFixtureServices {
val expectedTestNames =
Set(
"testingShouldBeFun"
)
@Test
def testingShouldBeFun(): Unit = {}
}
@DoNotDiscover
class ExampleTestNameTestNGSuite extends TestNGSuite with TestNameFixtureServices {
val expectedTestNames =
Set(
"testingShouldBeFun"
)
@TestNG
def testingShouldBeFun(): Unit = {}
}
// SKIP-SCALATESTJS,NATIVE-END
@DoNotDiscover
protected[scalatest] class ExampleTestNameFunSuite extends FunSuite with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 should be fun",
"Testing 2 should be fun",
"Testing 3 should be fun",
"Testing 4 should be fun",
"Testing 5 should be fun"
)
test("Testing 1 should be fun") {}
test(" Testing 2 should be fun") {}
test("Testing 3 should be fun ") {}
test(" Testing 4 should be fun") {}
test("Testing 5 should be fun ") {}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFunSuite extends fixture.FunSuite with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 should be fun",
"Testing 2 should be fun",
"Testing 3 should be fun",
"Testing 4 should be fun",
"Testing 5 should be fun"
)
test("Testing 1 should be fun") { s => }
test(" Testing 2 should be fun") { s => }
test("Testing 3 should be fun ") { s => }
test(" Testing 4 should be fun") { s => }
test("Testing 5 should be fun ") { s => }
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFunSpec extends FunSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
describe("Testing 1") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 2 ") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 3") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 4") {
describe("Scala code ") {
it("should be fun") {}
}
}
describe("Testing 5") {
describe("Scala code") {
it(" should be fun") {}
}
}
describe(" Testing 6") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 7") {
describe("Scala code") {
it("should be fun ") {}
}
}
describe("Testing 8 ") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 9 ") {
describe("Scala code") {
it("should be fun") {}
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFunSpec extends fixture.FunSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
describe("Testing 1") {
describe("Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 2 ") {
describe("Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 3") {
describe(" Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 4") {
describe("Scala code ") {
it("should be fun") { s => }
}
}
describe("Testing 5") {
describe("Scala code") {
it(" should be fun") { s => }
}
}
describe(" Testing 6") {
describe("Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 7") {
describe("Scala code") {
it("should be fun ") { s => }
}
}
describe("Testing 8 ") {
describe(" Scala code") {
it("should be fun") { s => }
}
}
describe("Testing 9 ") {
describe("Scala code") {
it("should be fun") { s => }
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFeatureSpec extends FeatureSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Feature: Testing 1 Scenario: Scala code should be fun",
"Feature: Testing 2 Scenario: Scala code should be fun",
"Feature: Testing 3 Scenario: Scala code should be fun",
"Feature: Testing 4 Scenario: Scala code should be fun",
"Feature: Testing 5 Scenario: Scala code should be fun",
"Feature: Testing 6 Scenario: Scala code should be fun",
"Feature: Testing 7 Scenario: Scala code should be fun"
)
feature("Testing 1") {
scenario("Scala code should be fun") {}
}
feature("Testing 2 ") {
scenario("Scala code should be fun") {}
}
feature("Testing 3") {
scenario(" Scala code should be fun") {}
}
feature("Testing 4") {
scenario("Scala code should be fun ") {}
}
feature(" Testing 5") {
scenario("Scala code should be fun") {}
}
feature("Testing 6 ") {
scenario(" Scala code should be fun") {}
}
feature("Testing 7 ") {
scenario("Scala code should be fun") {}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFeatureSpec extends fixture.FeatureSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Feature: Testing 1 Scenario: Scala code should be fun",
"Feature: Testing 2 Scenario: Scala code should be fun",
"Feature: Testing 3 Scenario: Scala code should be fun",
"Feature: Testing 4 Scenario: Scala code should be fun",
"Feature: Testing 5 Scenario: Scala code should be fun",
"Feature: Testing 6 Scenario: Scala code should be fun",
"Feature: Testing 7 Scenario: Scala code should be fun"
)
feature("Testing 1") {
scenario("Scala code should be fun") { s => }
}
feature("Testing 2 ") {
scenario("Scala code should be fun") { s => }
}
feature("Testing 3") {
scenario(" Scala code should be fun") { s => }
}
feature("Testing 4") {
scenario("Scala code should be fun ") { s => }
}
feature(" Testing 5") {
scenario("Scala code should be fun") { s => }
}
feature("Testing 6 ") {
scenario(" Scala code should be fun") { s => }
}
feature("Testing 7 ") {
scenario("Scala code should be fun") { s => }
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFlatSpec extends FlatSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 should be fun to code in Scala",
"Testing 2 should be fun to code in Scala",
"Testing 3 should be fun to code in Scala",
"Testing 4 should be fun to code in Scala",
"Testing 5 should be fun to code in Scala",
"Testing 6 should be fun to code in Scala",
"Testing 7 should be fun to code in Scala"
)
"Testing 1" should "be fun to code in Scala" in {
}
"Testing 2 " should "be fun to code in Scala" in {
}
"Testing 3" should " be fun to code in Scala" in {
}
"Testing 4" should "be fun to code in Scala " in {
}
" Testing 5" should "be fun to code in Scala" in {
}
"Testing 6 " should " be fun to code in Scala" in {
}
"Testing 7 " should "be fun to code in Scala" in {
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFlatSpec extends fixture.FlatSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 should be fun to code in Scala",
"Testing 2 should be fun to code in Scala",
"Testing 3 should be fun to code in Scala",
"Testing 4 should be fun to code in Scala",
"Testing 5 should be fun to code in Scala",
"Testing 6 should be fun to code in Scala",
"Testing 7 should be fun to code in Scala"
)
"Testing 1" should "be fun to code in Scala" in { s =>
}
"Testing 2 " should "be fun to code in Scala" in { s =>
}
"Testing 3" should " be fun to code in Scala" in { s =>
}
"Testing 4" should "be fun to code in Scala " in { s =>
}
" Testing 5" should "be fun to code in Scala" in { s =>
}
"Testing 6 " should " be fun to code in Scala" in { s =>
}
"Testing 7 " should "be fun to code in Scala" in { s =>
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFreeSpec extends FreeSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
"Testing 1" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 2 " - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 3" - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 4" - {
"Scala code " - {
"should be fun" in {}
}
}
"Testing 5" - {
"Scala code" - {
" should be fun" in {}
}
}
" Testing 6" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 7" - {
"Scala code" - {
"should be fun " in {}
}
}
"Testing 8 " - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 9 " - {
"Scala code" - {
"should be fun" in {}
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureFreeSpec extends fixture.FreeSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
"Testing 1" - {
"Scala code" - {
"should be fun" in { s => }
}
}
"Testing 2 " - {
"Scala code" - {
"should be fun" in { s => }
}
}
"Testing 3" - {
" Scala code" - {
"should be fun" in { s => }
}
}
"Testing 4" - {
"Scala code " - {
"should be fun" in { s => }
}
}
"Testing 5" - {
"Scala code" - {
" should be fun" in { s => }
}
}
" Testing 6" - {
"Scala code" - {
"should be fun" in { s => }
}
}
"Testing 7" - {
"Scala code" - {
"should be fun " in { s => }
}
}
"Testing 8 " - {
" Scala code" - {
"should be fun" in { s => }
}
}
"Testing 9 " - {
"Scala code" - {
"should be fun" in { s => }
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNamePropSpec extends PropSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun"
)
property("Testing 1 Scala code should be fun") {}
property(" Testing 2 Scala code should be fun") {}
property("Testing 3 Scala code should be fun ") {}
property(" Testing 4 Scala code should be fun") {}
property("Testing 5 Scala code should be fun ") {}
property(" Testing 6 Scala code should be fun ") {}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixturePropSpec extends fixture.PropSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun"
)
property("Testing 1 Scala code should be fun") { s => }
property(" Testing 2 Scala code should be fun") { s => }
property("Testing 3 Scala code should be fun ") { s => }
property(" Testing 4 Scala code should be fun") { s => }
property("Testing 5 Scala code should be fun ") { s => }
property(" Testing 6 Scala code should be fun ") { s => }
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameWordSpec extends WordSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 should test Scala code should be fun",
"Testing 2 should test Scala code should be fun",
"Testing 3 should test Scala code should be fun",
"Testing 4 should test Scala code should be fun",
"Testing 5 should test Scala code should be fun",
"Testing 6 should test Scala code should be fun",
"Testing 7 should test Scala code should be fun",
"Testing 8 should test Scala code should be fun",
"Testing 9 should test Scala code should be fun"
)
"Testing 1" should {
"test Scala code" should {
"be fun" in {}
}
}
"Testing 2 " should {
"test Scala code" should {
"be fun" in {}
}
}
"Testing 3" should {
" test Scala code" should {
"be fun" in {}
}
}
"Testing 4" should {
"test Scala code " should {
"be fun" in {}
}
}
"Testing 5" should {
"test Scala code" should {
" be fun" in {}
}
}
" Testing 6" should {
"test Scala code" should {
"be fun" in {}
}
}
"Testing 7" should {
"test Scala code" should {
"be fun " in {}
}
}
"Testing 8 " should {
" test Scala code" should {
"be fun" in {}
}
}
"Testing 9 " should {
"test Scala code" should {
"be fun" in {}
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNameFixtureWordSpec extends fixture.WordSpec with TestNameFixtureServices with StringFixture {
val expectedTestNames =
Set(
"Testing 1 should test Scala code should be fun",
"Testing 2 should test Scala code should be fun",
"Testing 3 should test Scala code should be fun",
"Testing 4 should test Scala code should be fun",
"Testing 5 should test Scala code should be fun",
"Testing 6 should test Scala code should be fun",
"Testing 7 should test Scala code should be fun",
"Testing 8 should test Scala code should be fun",
"Testing 9 should test Scala code should be fun"
)
"Testing 1" should {
"test Scala code" should {
"be fun" in { s => }
}
}
"Testing 2 " should {
"test Scala code" should {
"be fun" in { s => }
}
}
"Testing 3" should {
" test Scala code" should {
"be fun" in { s => }
}
}
"Testing 4" should {
"test Scala code " should {
"be fun" in { s => }
}
}
"Testing 5" should {
"test Scala code" should {
" be fun" in { s => }
}
}
" Testing 6" should {
"test Scala code" should {
"be fun" in { s => }
}
}
"Testing 7" should {
"test Scala code" should {
"be fun " in { s => }
}
}
"Testing 8 " should {
" test Scala code" should {
"be fun" in { s => }
}
}
"Testing 9 " should {
"test Scala code" should {
"be fun" in { s => }
}
}
}
@DoNotDiscover
protected[scalatest] class ExampleTestNamePathFreeSpec extends path.FreeSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
"Testing 1" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 2 " - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 3" - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 4" - {
"Scala code " - {
"should be fun" in {}
}
}
"Testing 5" - {
"Scala code" - {
" should be fun" in {}
}
}
" Testing 6" - {
"Scala code" - {
"should be fun" in {}
}
}
"Testing 7" - {
"Scala code" - {
"should be fun " in {}
}
}
"Testing 8 " - {
" Scala code" - {
"should be fun" in {}
}
}
"Testing 9 " - {
"Scala code" - {
"should be fun" in {}
}
}
override def newInstance: path.FreeSpecLike = new ExampleTestNamePathFreeSpec
}
@DoNotDiscover
protected[scalatest] class ExampleTestNamePathFunSpec extends path.FunSpec with TestNameFixtureServices {
val expectedTestNames =
Set(
"Testing 1 Scala code should be fun",
"Testing 2 Scala code should be fun",
"Testing 3 Scala code should be fun",
"Testing 4 Scala code should be fun",
"Testing 5 Scala code should be fun",
"Testing 6 Scala code should be fun",
"Testing 7 Scala code should be fun",
"Testing 8 Scala code should be fun",
"Testing 9 Scala code should be fun"
)
describe("Testing 1") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 2 ") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 3") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 4") {
describe("Scala code ") {
it("should be fun") {}
}
}
describe("Testing 5") {
describe("Scala code") {
it(" should be fun") {}
}
}
describe(" Testing 6") {
describe("Scala code") {
it("should be fun") {}
}
}
describe("Testing 7") {
describe("Scala code") {
it("should be fun ") {}
}
}
describe("Testing 8 ") {
describe(" Scala code") {
it("should be fun") {}
}
}
describe("Testing 9 ") {
describe("Scala code") {
it("should be fun") {}
}
}
override def newInstance: path.FunSpecLike = new ExampleTestNamePathFunSpec
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/DeprecatedTestNameProp.scala
|
Scala
|
apache-2.0
| 23,385 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.dataload
import java.io.File
import java.math.BigDecimal
import org.apache.spark.sql.test.util.QueryTest
import org.apache.spark.sql.types._
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.TableIdentifier
import org.scalatest.BeforeAndAfterAll
class TestLoadDataFrame extends QueryTest with BeforeAndAfterAll {
var df: DataFrame = _
var dataFrame: DataFrame = _
var df2: DataFrame = _
var df3: DataFrame = _
var booldf:DataFrame = _
def buildTestData() = {
import sqlContext.implicits._
df = sqlContext.sparkContext.parallelize(1 to 32000)
.map(x => ("a", "b", x))
.toDF("c1", "c2", "c3")
val rdd = sqlContext.sparkContext.parallelize(
Row(52.23, BigDecimal.valueOf(1234.4440), "Warsaw") ::
Row(42.30, BigDecimal.valueOf(9999.9990), "Corte") :: Nil)
val schema = StructType(
StructField("double", DoubleType, nullable = false) ::
StructField("decimal", DecimalType(9, 2), nullable = false) ::
StructField("string", StringType, nullable = false) :: Nil)
dataFrame = sqlContext.createDataFrame(rdd, schema)
df2 = sqlContext.sparkContext.parallelize(1 to 1000)
.map(x => ("key_" + x, "str_" + x, x, x * 2, x * 3))
.toDF("c1", "c2", "c3", "c4", "c5")
df3 = sqlContext.sparkContext.parallelize(1 to 3)
.map(x => (x.toString + "te,s\\nt", x))
.toDF("c1", "c2")
val boolrdd = sqlContext.sparkContext.parallelize(
Row("anubhav",true) ::
Row("prince",false) :: Nil)
val boolSchema = StructType(
StructField("name", StringType, nullable = false) ::
StructField("isCarbonEmployee",BooleanType,nullable = false)::Nil)
booldf = sqlContext.createDataFrame(boolrdd,boolSchema)
}
def dropTable() = {
sql("DROP TABLE IF EXISTS carbon1")
sql("DROP TABLE IF EXISTS carbon2")
sql("DROP TABLE IF EXISTS carbon3")
sql("DROP TABLE IF EXISTS carbon4")
sql("DROP TABLE IF EXISTS carbon5")
sql("DROP TABLE IF EXISTS carbon6")
sql("DROP TABLE IF EXISTS carbon7")
sql("DROP TABLE IF EXISTS carbon8")
sql("DROP TABLE IF EXISTS carbon9")
sql("DROP TABLE IF EXISTS carbon10")
sql("DROP TABLE IF EXISTS carbon11")
sql("DROP TABLE IF EXISTS carbon12")
sql("DROP TABLE IF EXISTS df_write_sort_column_not_specified")
sql("DROP TABLE IF EXISTS df_write_specify_sort_column")
sql("DROP TABLE IF EXISTS df_write_empty_sort_column")
sql("DROP TABLE IF EXISTS carbon_table_df")
sql("DROP TABLE IF EXISTS carbon_table_df1")
}
override def beforeAll {
dropTable
buildTestData
}
test("test the boolean data type"){
booldf.write
.format("carbondata")
.option("tableName", "carbon0")
.option("tempCSV", "true")
.option("compress", "true")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("SELECT * FROM CARBON0"),
Seq(Row("anubhav", true), Row("prince", false)))
}
test("test load dataframe with saving compressed csv files") {
// save dataframe to carbon file
df.write
.format("carbondata")
.option("tableName", "carbon1")
.option("tempCSV", "true")
.option("compress", "true")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("select count(*) from carbon1 where c3 > 500"), Row(31500)
)
sql(s"describe formatted carbon1").show(true)
}
test("test load dataframe with saving csv uncompressed files") {
// save dataframe to carbon file
df.write
.format("carbondata")
.option("tableName", "carbon2")
.option("tempCSV", "true")
.option("compress", "false")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("select count(*) from carbon2 where c3 > 500"), Row(31500)
)
}
test("test load dataframe without saving csv files") {
// save dataframe to carbon file
df.write
.format("carbondata")
.option("tableName", "carbon3")
.option("tempCSV", "false")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("select count(*) from carbon3 where c3 > 500"), Row(31500)
)
}
test("test decimal values for dataframe load"){
dataFrame.write
.format("carbondata")
.option("tableName", "carbon4")
.option("compress", "true")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("SELECT decimal FROM carbon4"),Seq(Row(BigDecimal.valueOf(10000.00)),Row(BigDecimal.valueOf(1234.44))))
}
test("test loading data if the data count is multiple of page size"){
checkAnswer(
sql("SELECT count(*) FROM carbon2"),Seq(Row(32000)))
}
test("test load dataframe with integer columns included in the dictionary"){
df2.write
.format("carbondata")
.option("tableName", "carbon5")
.option("compress", "true")
.option("dictionary_include","c3,c4")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("select count(*) from carbon5 where c3 > 300"), Row(700)
)
}
test("test load dataframe with string column excluded from the dictionary"){
df2.write
.format("carbondata")
.option("tableName", "carbon6")
.option("compress", "true")
.option("dictionary_exclude","c2")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("select count(*) from carbon6 where c3 > 300"), Row(700)
)
}
test("test load dataframe with both dictionary include and exclude specified"){
df2.write
.format("carbondata")
.option("tableName", "carbon7")
.option("compress", "true")
.option("dictionary_include","c3,c4")
.option("dictionary_exclude","c2")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("select count(*) from carbon7 where c3 > 300"), Row(700)
)
}
test("test load dataframe with single pass enabled") {
// save dataframe to carbon file
df2.write
.format("carbondata")
.option("tableName", "carbon8")
.option("tempCSV", "false")
.option("single_pass", "true")
.option("compress", "false")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("select count(*) from carbon8 where c3 > 500"), Row(500)
)
}
test("test load dataframe with single pass disabled") {
// save dataframe to carbon file
df2.write
.format("carbondata")
.option("tableName", "carbon9")
.option("tempCSV", "true")
.option("single_pass", "false")
.option("compress", "false")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("select count(*) from carbon9 where c3 > 500"), Row(500)
)
}
test("test datasource table with specified table path") {
val path = "./source"
df2.write
.format("carbondata")
.option("tableName", "carbon10")
.option("tablePath", path)
.mode(SaveMode.Overwrite)
.save()
assert(new File(path).exists())
checkAnswer(
sql("select count(*) from carbon10 where c3 > 500"), Row(500)
)
sql("drop table carbon10")
assert(!new File(path).exists())
assert(intercept[AnalysisException](
sql("select count(*) from carbon10 where c3 > 500"))
.message
.contains("not found"))
}
test("test streaming Table") {
dataFrame.write
.format("carbondata")
.option("tableName", "carbon11")
.option("tempCSV", "true")
.option("single_pass", "false")
.option("compress", "false")
.option("streaming", "true")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("SELECT decimal FROM carbon11"),Seq(Row(BigDecimal.valueOf(10000.00)),Row(BigDecimal.valueOf(1234.44))))
val descResult =sql("desc formatted carbon11")
val isStreaming: String = descResult.collect().find(row=>row(0).asInstanceOf[String].trim.equalsIgnoreCase("streaming")).get.get(1).asInstanceOf[String]
assert(isStreaming.contains("true"))
}
test("test datasource table with specified char") {
df3.write
.format("carbondata")
.option("tableName", "carbon12")
.option("tempCSV", "true")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("select count(*) from carbon12"), Row(3)
)
}
private def getSortColumnValue(tableName: String): Array[String] = {
val desc = sql(s"desc formatted $tableName")
val sortColumnRow = desc.collect.find(r =>
r(0).asInstanceOf[String].trim.equalsIgnoreCase("SORT_COLUMNS")
)
assert(sortColumnRow.isDefined)
sortColumnRow.get.get(1).asInstanceOf[String].split(",")
.map(_.trim.toLowerCase).filter(_.length > 0)
}
private def getDefaultWriter(tableName: String): DataFrameWriter[Row] = {
df2.write
.format("carbondata")
.option("tableName", tableName)
.option("tempCSV", "false")
.option("single_pass", "false")
.option("table_blocksize", "256")
.option("compress", "false")
.mode(SaveMode.Overwrite)
}
test("test load dataframe with sort_columns not specified," +
" by default all string columns will be sort_columns") {
// all string column will be sort_columns by default
getDefaultWriter("df_write_sort_column_not_specified").save()
checkAnswer(
sql("select count(*) from df_write_sort_column_not_specified where c3 > 500"), Row(500)
)
val sortColumnValue = getSortColumnValue("df_write_sort_column_not_specified")
assert(sortColumnValue.sameElements(Array("c1", "c2")))
}
test("test load dataframe with sort_columns specified") {
// only specify c1 as sort_columns
getDefaultWriter("df_write_specify_sort_column").option("sort_columns", "c1").save()
checkAnswer(
sql("select count(*) from df_write_specify_sort_column where c3 > 500"), Row(500)
)
val sortColumnValue = getSortColumnValue("df_write_specify_sort_column")
assert(sortColumnValue.sameElements(Array("c1")))
}
test("test load dataframe with sort_columns specified empty") {
// specify empty sort_column
getDefaultWriter("df_write_empty_sort_column").option("sort_columns", "").save()
checkAnswer(
sql("select count(*) from df_write_empty_sort_column where c3 > 500"), Row(500)
)
val sortColumnValue = getSortColumnValue("df_write_empty_sort_column")
assert(sortColumnValue.isEmpty)
}
test("test load dataframe while giving already created table") {
sql(s"create table carbon_table_df(c1 string, c2 string, c3 int) stored by 'carbondata'")
// save dataframe to carbon file
df.write
.format("carbondata")
.option("tableName", "carbon_table_df")
.option("tempCSV", "false")
.mode(SaveMode.Overwrite)
.save()
df.write
.format("carbondata")
.option("tableName", "carbon_table_df")
.option("tempCSV", "false")
.mode(SaveMode.Overwrite)
.save()
checkAnswer(
sql("select count(*) from carbon_table_df where c3 > 500"), Row(31500)
)
}
test("test load dataframe while giving already created table with delete segment") {
sql(s"create table carbon_table_df1(c1 string, c2 string, c3 int) stored by 'carbondata'")
val table = CarbonEnv.getCarbonTable(TableIdentifier("carbon_table_df1"))(sqlContext.sparkSession)
// save dataframe to carbon file
df.write
.format("carbondata")
.option("tableName", "carbon_table_df1")
.option("tempCSV", "false")
.mode(SaveMode.Overwrite)
.save()
assert(CarbonEnv.getCarbonTable(TableIdentifier("carbon_table_df1"))(sqlContext.sparkSession)
.getTableInfo.getFactTable.equals(table.getTableInfo.getFactTable))
sql("delete from table carbon_table_df1 where segment.id in (0)")
df.write
.format("carbondata")
.option("tableName", "carbon_table_df1")
.option("tempCSV", "false")
.mode(SaveMode.Overwrite)
.save()
assert(CarbonEnv.getCarbonTable(TableIdentifier("carbon_table_df1"))(sqlContext.sparkSession)
.getTableInfo.getFactTable.equals(table.getTableInfo.getFactTable))
checkAnswer(
sql("select count(*) from carbon_table_df1 where c3 > 500"), Row(31500)
)
}
override def afterAll {
dropTable
}
}
|
sgururajshetty/carbondata
|
integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataFrame.scala
|
Scala
|
apache-2.0
| 13,049 |
package com.seanshubin.schulze.server.data_transfer
case class AlternativesAndPaths(alternatives: Seq[Seq[Any]], paths: Seq[Seq[Any]])
|
SeanShubin/schulze
|
server/src/main/scala/com/seanshubin/schulze/server/data_transfer/AlternativesAndPaths.scala
|
Scala
|
unlicense
| 136 |
package com.aergonaut.lifeaquatic.tileentity
import com.aergonaut.lib.core.TInitializer
import com.aergonaut.lifeaquatic.constants.Names
import cpw.mods.fml.common.registry.GameRegistry
object ModTileEntities extends TInitializer {
override def preInit(): Boolean = {
GameRegistry.registerTileEntity(classOf[TileEntityVat], Names.TileEntities.Vat)
GameRegistry.registerTileEntity(classOf[TileEntityCalcinator], Names.TileEntities.Calcinator)
true
}
}
|
aergonaut/LifeAquatic
|
src/main/scala/com/aergonaut/lifeaquatic/tileentity/ModTileEntities.scala
|
Scala
|
mit
| 470 |
package slogger.services.processing.aggregation
import scala.util.Try
import scala.util.Failure
import play.api.libs.json.JsObject
import scala.util.Success
import scala.util.control.NonFatal
trait AggregatorResolver {
def resolve(aggregatorClass: String, config: JsObject): Try[Aggregator]
}
class AggregatorResolverImpl(
aggregatorsClassLoader: ClassLoader
) extends AggregatorResolver{
override def resolve(aggregatorClass: String, config: JsObject): Try[Aggregator] = {
Try {
val clazz = aggregatorsClassLoader.loadClass(aggregatorClass)
val constructors = clazz.getConstructors()
if (constructors.length != 1) {
throw new IllegalArgumentException("Aggregator class should have exactly one constructor")
} else if (constructors(0).getParameterTypes().length != 1 || constructors(0).getParameterTypes()(0) != classOf[JsObject]) {
throw new IllegalArgumentException("Aggregator constructor should accept config:JsObject")
} else {
constructors(0).newInstance(config).asInstanceOf[Aggregator]
}
}
}
}
|
IvanMykhailov/stats-logger
|
core/src/main/scala/slogger/services/processing/aggregation/AggregatorResolver.scala
|
Scala
|
mit
| 1,096 |
package iso8601
|
softprops/iso8601
|
src/main/scala/show.scala
|
Scala
|
mit
| 16 |
package org.scalameter
import java.util.Date
import org.scalameter.picklers.Pickler
import org.scalameter.utils.Tree
import scala.language.existentials
import scala.util.DynamicVariable
/** Abstract required for the [[org.scalameter.ScalaMeterFramework]] to find all performance tests.
*/
sealed trait AbstractPerformanceTest {
def executeTests(): Boolean
}
abstract class BasePerformanceTest[U] extends AbstractPerformanceTest {
import BasePerformanceTest._
setupzipper.value = Tree.Zipper.root[Setup[_]](measurer.prepareContext(currentContext ++ defaultConfig))
protected case class Scope(name: String, context: Context) {
def config(kvs: KeyValue*): Scope = config(context ++ Context(kvs: _*))
def config(ctx: Context): Scope = Scope(name, context ++ ctx)
def in(block: =>Unit): Unit = {
val oldscope = context(Key.dsl.scope)
descendInScope(name, context + (Key.dsl.scope -> (name :: oldscope))) {
block
}
}
}
protected case class Using[T](benchmark: Setup[T]) {
def beforeTests(block: =>Any) = Using(benchmark.copy(setupbeforeall = Some(() => block)))
def setUp(block: T => Any) = Using(benchmark.copy(setup = Some(block)))
def tearDown(block: T => Any) = Using(benchmark.copy(teardown = Some(block)))
def afterTests(block: =>Any) = Using(benchmark.copy(teardownafterall = Some(() => block)))
def warmUp(block: =>Any) = Using(benchmark.copy(customwarmup = Some(() => block)))
def curve(name: String) = Using(benchmark.copy(context = benchmark.context + (Key.dsl.curve -> name)))
def config(kvs: KeyValue*): Using[T] = config(Context(kvs: _*))
def config(ctx: Context): Using[T] = Using(benchmark.copy(context = benchmark.context ++ ctx))
def in(block: T => Any) {
setupzipper.value = setupzipper.value.addItem(benchmark.copy(snippet = block))
}
}
private def setupFilter(setup: Setup[_]): Boolean = {
val sf = currentContext(Key.scopeFilter)
val fullname = setup.context.scope + "." + setup.context.curve
val regex = sf.r
regex.findFirstIn(fullname) != None
}
type SameType
def isModule = this.getClass.getSimpleName.endsWith("$")
def defaultConfig: Context = Context.empty
/** Allows rebuilding of setup zipper after test initialization.
*
* Default implementation is empty.
*/
def rebuildSetupZipper(): Unit = {}
/** Runs all the tests in this test class or singleton object.
*/
final def executeTests(): Boolean = {
rebuildSetupZipper()
val datestart: Option[Date] = Some(new Date)
val rawsetuptree = BasePerformanceTest.setupzipper.value.result
val setuptree = rawsetuptree.filter(setupFilter)
measurer.beforeExecution(setuptree.context)
val resulttree = executor.run(setuptree.asInstanceOf[Tree[Setup[SameType]]], reporter, persistor)
measurer.afterExecution(setuptree.context)
val dateend: Option[Date] = Some(new Date)
val datedtree = resulttree.copy(context = resulttree.context + (Key.reports.startDate -> datestart) + (Key.reports.endDate -> dateend))
reporter.report(datedtree, persistor)
}
/** The optional executor assigned to a particular body of DSL code.
*/
def executor: Executor[U]
def measurer: Measurer[U]
def reporter: Reporter[U]
def persistor: Persistor
}
object BasePerformanceTest {
private[scalameter] val setupzipper = new DynamicVariable(Tree.Zipper.root[Setup[_]](currentContext))
private[scalameter] def descendInScope(name: String, context: Context)(body: =>Unit) {
setupzipper.value = setupzipper.value.descend.setContext(context)
body
setupzipper.value = setupzipper.value.ascend
}
private[scalameter] val curveNameCount = new java.util.concurrent.atomic.AtomicInteger(0)
private[scalameter] def freshCurveName(): String = "Test-" + curveNameCount.getAndIncrement()
}
|
kjanosz/scalameter
|
src/main/scala/org/scalameter/performanceTest.scala
|
Scala
|
bsd-3-clause
| 3,863 |
package lila.security
object Spam {
def detect(texts: String*) = {
val text = texts mkString " "
blacklist exists text.contains
}
private val blacklist = List(
/* While links to other chess websites are welcome,
* refer links grant the referrer money,
* effectively inducing spam */
"velocitychess.com/ref/",
"chess24.com?ref=",
"chess.com/register?refId="
)
def replace(text: String) = replacements.foldLeft(text) {
case (t, (regex, rep)) => regex.replaceAllIn(t, rep)
}
private val replacements = List(
"""velocitychess.com/ref/\\w+""".r -> "velocitychess.com",
"""chess24.com?ref=\\w+""".r -> "chess24.com",
"""chess.com/register?refId=\\w+""".r -> "chess.com/register"
)
}
|
JimmyMow/lila
|
modules/security/src/main/Spam.scala
|
Scala
|
mit
| 745 |
package mesosphere.marathon.core.task.tracker.impl.steps
import mesosphere.marathon.MarathonTestHelper
import mesosphere.marathon.core.launchqueue.LaunchQueue
import mesosphere.marathon.core.task.bus.MarathonTaskStatus
import mesosphere.marathon.core.task.bus.TaskStatusObservables.TaskStatusUpdate
import mesosphere.marathon.state.{ Timestamp, PathId }
import mesosphere.marathon.tasks.TaskIdUtil
import mesosphere.marathon.test.Mockito
import org.apache.mesos.Protos.{ TaskState, TaskStatus, SlaveID }
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{ GivenWhenThen, Matchers, FunSuite }
import scala.concurrent.Future
class NotifyLaunchQueueStepImplTest extends FunSuite with Matchers with GivenWhenThen with Mockito with ScalaFutures {
test("name") {
new Fixture().step.name should equal("notifyLaunchQueue")
}
test("notifying launch queue") {
val f = new Fixture
val status = runningTaskStatus
val expectedUpdate = TaskStatusUpdate(updateTimestamp, taskId, MarathonTaskStatus(status))
Given("a status update")
f.launchQueue.notifyOfTaskUpdate(expectedUpdate) returns Future.successful(None)
When("calling processUpdate")
f.step.processUpdate(
updateTimestamp,
appId,
task = MarathonTestHelper.dummyTask(appId),
status = status
).futureValue
Then("the update is passed to the LaunchQueue")
verify(f.launchQueue).notifyOfTaskUpdate(expectedUpdate)
}
private[this] val slaveId = SlaveID.newBuilder().setValue("slave1")
private[this] val appId = PathId("/test")
private[this] val taskId = TaskIdUtil.newTaskId(appId)
private[this] val updateTimestamp = Timestamp(100)
private[this] val taskStatusMessage = "some update"
private[this] val runningTaskStatus =
TaskStatus
.newBuilder()
.setState(TaskState.TASK_RUNNING)
.setTaskId(taskId)
.setSlaveId(slaveId)
.setMessage(taskStatusMessage)
.build()
class Fixture {
val launchQueue = mock[LaunchQueue]
val step = new NotifyLaunchQueueStepImpl(launchQueue = launchQueue)
}
}
|
Kosta-Github/marathon
|
src/test/scala/mesosphere/marathon/core/task/tracker/impl/steps/NotifyLaunchQueueStepImplTest.scala
|
Scala
|
apache-2.0
| 2,090 |
package org.crashstars.spark.streaming
import org.apache.spark.streaming.StreamingContextState._
import org.apache.spark.streaming.{Duration, Seconds, StreamingContext}
import org.apache.spark.{SparkConf, SparkContext}
import org.crashstars.common.Logging
/**
* Created by anavidad on 13/10/15.
*/
class SparkStreamingUtils(conf: SparkConf, batchDuration: Duration = Seconds(1),
checkPointDir: String = "/tmp/D1F1CULT_R3P3T") extends Logging {
def withSparkStreamingContext(testCode: (SparkContext, StreamingContext) => Any): Unit = {
val sc = new SparkContext(conf)
val ssc = new StreamingContext(sc, batchDuration)
ssc.checkpoint(checkPointDir)
testCode(sc, ssc)
if (ssc != null) {
ssc.getState() match {
case INITIALIZED =>
logWarning("StreamingContext has not been started yet")
case STOPPED =>
logWarning("StreamingContext has already been stopped")
case ACTIVE =>
ssc.stop()
logInfo("StreamingContext stopped successfully")
}
}
}
}
|
anavidad3/PoC-spark-scala-maven
|
src/main/scala/org/crashstars/spark/streaming/SparkStreamingUtils.scala
|
Scala
|
apache-2.0
| 1,075 |
package com.wincom.dcim.sharded
import akka.actor.{Actor, ActorRef, Props}
import akka.cluster.sharding.{ClusterSharding, ClusterShardingSettings}
import akka.event.Logging
import com.wincom.dcim.domain.Settings
import com.wincom.dcim.message.common.Command
object ShardedDevices {
def props = Props(new ShardedDevices)
def name = "sharded-devices"
}
class ShardedDevices extends Actor {
val settings = Settings(context.system)
ShardedDevice.numberOfShards = settings.actor.numberOfShards
val log = Logging(context.system.eventStream, ShardedDevices.name)
def shardedSignal(): ActorRef = {
ClusterSharding(context.system).shardRegion(ShardedSignal.shardName)
}
def shardedAlarm(): ActorRef = {
ClusterSharding(context.system).shardRegion(ShardedAlarm.shardName)
}
def shardedDevice(): ActorRef = {
ClusterSharding(context.system).shardRegion(ShardedDevice.shardName)
}
ClusterSharding(context.system).start(
ShardedDevice.shardName,
ShardedDevice.props(shardedSignal, shardedAlarm, shardedDevice),
ClusterShardingSettings(context.system),
ShardedDevice.extractEntityId,
ShardedDevice.extractShardId
)
override def receive: Receive = {
case cmd: Command =>
shardedDevice forward cmd
case x => log.info("COMMAND: {} {}", this, x)
}
}
|
xtwxy/mysc
|
dcim-cluster/cluster/src/main/scala/com/wincom/dcim/sharded/ShardedDevices.scala
|
Scala
|
apache-2.0
| 1,318 |
/*
* Copyright (c) 2016, Innoave.com
* All rights reserved.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL INNOAVE.COM OR ITS CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package io.scalatestfx.zzznolongerused
case class SfxRobotException(
val message: String
) extends Exception(message)
|
haraldmaida/ScalaTestFX
|
scalatestfx/src/main/scala/io/scalatestfx/zzznolongerused/SfxRobotException.scala
|
Scala
|
apache-2.0
| 977 |
package org.sisioh.aws4s.sts.model
import com.amazonaws.services.securitytoken.model.{AssumedRoleUser, Credentials, AssumeRoleResult}
import org.sisioh.aws4s.PimpedType
class RichAssumeRoleResult(val underlying: AssumeRoleResult)
extends AnyVal with PimpedType[AssumeRoleResult] {
def credentialsOpt: Option[Credentials] =
Option(underlying.getCredentials)
def withCredentials(credentials: Option[Credentials]): AssumeRoleResult =
underlying.withCredentials(credentials.orNull)
def assumedRoleUserOpt: Option[AssumedRoleUser] =
Option(underlying.getAssumedRoleUser)
def withAssumedRoleUser(assumedRoleUser: Option[AssumedRoleUser]): AssumeRoleResult =
underlying.withAssumedRoleUser(assumedRoleUser.orNull)
def packedPolicySizeOpt: Option[Int] =
Option(underlying.getPackedPolicySize)
def withPackedPolicySize(packedPolicySize: Option[Int]): AssumeRoleResult =
underlying.withPackedPolicySize(packedPolicySize.map(_.asInstanceOf[java.lang.Integer]).orNull)
}
|
everpeace/aws4s
|
aws4s-sts/src/main/scala/org/sisioh/aws4s/sts/model/RichAssumeRoleResult.scala
|
Scala
|
mit
| 1,008 |
package com.redislabs.provider.redis.streaming
import java.util.AbstractMap.SimpleEntry
import com.redislabs.provider.redis.util.PipelineUtils.foreachWithPipeline
import com.redislabs.provider.redis.util.{Logging, StreamUtils}
import com.redislabs.provider.redis.{ReadWriteConfig, RedisConfig}
import org.apache.curator.utils.ThreadUtils
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.receiver.Receiver
import org.sparkproject.guava.util.concurrent.RateLimiter
import redis.clients.jedis.{Jedis, StreamEntry, StreamEntryID}
import scala.collection.JavaConversions._
/**
* Receives messages from Redis Stream
*/
class RedisStreamReceiver(consumersConfig: Seq[ConsumerConfig],
redisConfig: RedisConfig,
readWriteConfig: ReadWriteConfig,
storageLevel: StorageLevel)
extends Receiver[StreamItem](storageLevel) with Logging {
override def onStart(): Unit = {
logInfo("Starting Redis Stream Receiver")
val executorPool = ThreadUtils.newFixedThreadPool(consumersConfig.size, "RedisStreamMessageHandler")
try {
// start consumers in separate threads
for (c <- consumersConfig) {
executorPool.submit(new MessageHandler(c, redisConfig, readWriteConfig))
}
} finally {
// terminate threads after the work is done
executorPool.shutdown()
}
}
override def onStop(): Unit = {
}
private class MessageHandler(conf: ConsumerConfig,
redisConfig: RedisConfig,
implicit val readWriteConfig: ReadWriteConfig) extends Runnable {
val jedis: Jedis = redisConfig.connectionForKey(conf.streamKey)
val rateLimiterOpt: Option[RateLimiter] = conf.rateLimitPerConsumer.map(r => RateLimiter.create(r))
override def run(): Unit = {
logInfo(s"Starting MessageHandler $conf")
try {
createConsumerGroupIfNotExist()
receiveUnacknowledged()
receiveNewMessages()
} catch {
case e: Exception =>
restart("Error handling message. Restarting.", e)
}
}
def createConsumerGroupIfNotExist(): Unit = {
val entryId = conf.offset match {
case Earliest => new StreamEntryID(0, 0)
case Latest => StreamEntryID.LAST_ENTRY
case IdOffset(v1, v2) => new StreamEntryID(v1, v2)
}
StreamUtils.createConsumerGroupIfNotExist(jedis, conf.streamKey, conf.groupName, entryId)
}
def receiveUnacknowledged(): Unit = {
logInfo(s"Starting receiving unacknowledged messages for key ${conf.streamKey}")
var continue = true
val unackId = new SimpleEntry(conf.streamKey, new StreamEntryID(0, 0))
while (!isStopped && continue) {
val response = jedis.xreadGroup(
conf.groupName,
conf.consumerName,
conf.batchSize,
conf.block,
false,
unackId)
val unackMessagesMap = response.map(e => (e.getKey, e.getValue)).toMap
val entries = unackMessagesMap(conf.streamKey)
if (entries.isEmpty) {
continue = false
}
storeAndAck(conf.streamKey, entries)
}
}
def receiveNewMessages(): Unit = {
logInfo(s"Starting receiving new messages for key ${conf.streamKey}")
val newMessId = new SimpleEntry(conf.streamKey, StreamEntryID.UNRECEIVED_ENTRY)
while (!isStopped) {
val response = jedis.xreadGroup(
conf.groupName,
conf.consumerName,
conf.batchSize,
conf.block,
false,
newMessId)
if (response != null) {
for (streamMessages <- response) {
val key = streamMessages.getKey
val entries = streamMessages.getValue
storeAndAck(key, entries)
}
}
}
}
def storeAndAck(streamKey: String, entries: Seq[StreamEntry]): Unit = {
if (entries.nonEmpty) {
// limit the rate if it's enabled
rateLimiterOpt.foreach(_.acquire(entries.size))
val streamItems = entriesToItems(streamKey, entries)
// call store(multiple-records) to reliably store in Spark memory
store(streamItems.iterator)
// ack redis
foreachWithPipeline(jedis, entries) { (pipeline, entry) =>
pipeline.xack(streamKey, conf.groupName, entry.getID)
}
}
}
def entriesToItems(key: String, entries: Seq[StreamEntry]): Seq[StreamItem] = {
entries.map { e =>
val itemId = ItemId(e.getID.getTime, e.getID.getSequence)
StreamItem(key, itemId, e.getFields.toMap)
}
}
}
}
/**
* @param streamKey redis stream key
* @param groupName consumer group name
* @param consumerName consumer name
* @param offset stream offset
* @param rateLimitPerConsumer maximum retrieved messages per second per single consumer
* @param batchSize maximum number of pulled items in a read API call
* @param block time in milliseconds to wait for data in a blocking read API call
*/
case class ConsumerConfig(streamKey: String,
groupName: String,
consumerName: String,
offset: Offset = Latest,
rateLimitPerConsumer: Option[Int] = None,
batchSize: Int = 100,
block: Long = 500)
/**
* Represents an offset in the stream
*/
sealed trait Offset
/**
* Latest offset, known as a '$' special id
*/
case object Latest extends Offset
/**
* Earliest offset, '0-0' id
*/
case object Earliest extends Offset
/**
* Specific id in the form of 'v1-v2'
*
* @param v1 first token of the id
* @param v2 second token of the id
*/
case class IdOffset(v1: Long, v2: Long) extends Offset
/**
* Item id in the form of 'v1-v2'
*
* @param v1 first token of the id
* @param v2 second token of the id
*/
case class ItemId(v1: Long, v2: Long)
/**
* Represent an item in the stream
*
* @param streamKey stream key
* @param id item(entry) id
* @param fields key/value map of item fields
*/
case class StreamItem(streamKey: String, id: ItemId, fields: Map[String, String])
|
RedisLabs/spark-redis
|
src/main/scala/com/redislabs/provider/redis/streaming/RedisStreamReceiver.scala
|
Scala
|
bsd-3-clause
| 6,325 |
package ar.edu.unq.tpi.qsim.parser
/**
* Copyright 2014 Tatiana Molinari.
* Copyright 2014 Susana Rosito
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
import ar.edu.unq.tpi.qsim.model.Programa
import scala.util.parsing.input.CharSequenceReader
import ar.edu.unq.tpi.qsim.exeptions.SyntaxErrorException
import org.uqbar.commons.utils.Observable
object Parser extends Ensamblador {
var arquitecturas = List(ArquitecturaQ("Q1", ensamblarQ1), ArquitecturaQ("Q2", ensamblarQ2), ArquitecturaQ("Q3", ensamblarQ3), ArquitecturaQ("Q4", ensamblarQ4), ArquitecturaQ("Q5", ensamblarQ5), ArquitecturaQ("Q6", ensamblarQ6))
def ensamblarQ1(code: String): Programa = result(parse(code, this.programQ1))
def ensamblarQ2(code: String): Programa = result(parse(code, this.programQ2))
def ensamblarQ3(code: String): Programa = result(parse(code, this.programQ3))
def ensamblarQ4(code: String): Programa = result(parse(code, this.programQ4))
def ensamblarQ5(code: String): Programa = result(parse(code, this.programQ5))
def ensamblarQ6(code: String): Programa = result(parse(code, this.programQ6))
def result(resultado: ParseResult[Programa]): Programa = resultado match {
case Success(result, _) ⇒ result
case Failure(msg, i) ⇒ {
var mensaje = createMessage(i)
throw new SyntaxErrorException(mensaje)
}
case Error(msg, i) ⇒ throw new SyntaxErrorException(msg)
}
def createMessage(output: Input): String = {
var characterCount = output.offset
var cadenaCaracteres = output.source
var lineas = output.source.toString().split("\n")
var numeroLinea = buscarLineaConError(lineas, characterCount)
return lineaResultado(numeroLinea, lineas)
}
def buscarLineaConError(lineas: Array[String], characterError: Int): Int = {
var lineaConErrorBordeIzq = 0
var cantCaracteres = 0
var encontreLinea = false
var i = 0
while (i < lineas.size && !(encontreLinea)) {
var linea = lineas(i)
cantCaracteres = linea.length() + cantCaracteres + 1
if (siLineasPrincipioSoloIntrSinOPoUnOpSonLineasConError(linea) || cantCaracteres > 0 && cantCaracteres > characterError ) {
lineaConErrorBordeIzq = i + 1
encontreLinea = true
}
i = i + 1
}
return lineaConErrorBordeIzq
}
def siLineasPrincipioSoloIntrSinOPoUnOpSonLineasConError(linea: String): Boolean = {
var estaNotoJMP = false
if (linea.length() <= 4) {
if (!(linea.trim().contains("RET")) || linea.trim().equals("JMP") || linea.trim().equals("NOT")) {
estaNotoJMP = true
}
}
return estaNotoJMP
}
def lineaResultado(numeroLinea: Int, lineas: Array[String]): String = {
var linea = lineas(numeroLinea - 1)
s"Ha ocurrido un error en la linea $numeroLinea : $linea"
}
}
@Observable
case class ArquitecturaQ(var name: String, parser: (String) ⇒ Programa) {
override def toString = name
}
object pruebaError extends App {
var string = """NOT
AND [R6], [0x0006]
OR [[0x0023]], 0x0300
AND R5, [0x0005]
OR R6, 0x0005"""
var parser = Parser
try {
parser.ensamblarQ6(string)
} catch {
case ex: SyntaxErrorException ⇒ {
print(ex.getMessage())
}
}
}
|
molinarirosito/QSim
|
src/main/scala/ar/edu/unq/tpi/qsim/parser/Parser.scala
|
Scala
|
gpl-3.0
| 3,803 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.graphx
import org.apache.spark._
import org.apache.spark.graphx.PartitionStrategy._
import org.apache.spark.graphx._
import org.apache.spark.graphx.lib._
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StorageLevel
import scala.collection.mutable
/**
* Driver program for running graph algorithms.
*/
object Analytics extends Logging {
def main(args: Array[String]): Unit = {
if (args.length < 2) {
System.err.println(
"Usage: Analytics <taskType> <file> --numEPart=<num_edge_partitions> [other options]")
System.err.println("Supported 'taskType' as follows:")
System.err.println(" pagerank Compute PageRank")
System.err.println(" cc Compute the connected components of vertices")
System.err.println(" triangles Count the number of triangles")
System.exit(1)
}
val taskType = args(0)
val fname = args(1)
val optionsList = args.drop(2).map { arg =>
arg.dropWhile(_ == '-').split('=') match {
case Array(opt, v) => (opt -> v)
case _ => throw new IllegalArgumentException("Invalid argument: " + arg)
}
}
val options = mutable.Map(optionsList: _*)
val conf = new SparkConf()
GraphXUtils.registerKryoClasses(conf)
val numEPart = options.remove("numEPart").map(_.toInt).getOrElse {
println("Set the number of edge partitions using --numEPart.")
sys.exit(1)
}
val partitionStrategy: Option[PartitionStrategy] = options.remove("partStrategy")
.map(PartitionStrategy.fromString(_))
val edgeStorageLevel = options.remove("edgeStorageLevel")
.map(StorageLevel.fromString(_)).getOrElse(StorageLevel.MEMORY_ONLY)
val vertexStorageLevel = options.remove("vertexStorageLevel")
.map(StorageLevel.fromString(_)).getOrElse(StorageLevel.MEMORY_ONLY)
taskType match {
case "pagerank" =>
val tol = options.remove("tol").map(_.toFloat).getOrElse(0.001F)
val outFname = options.remove("output").getOrElse("")
val numIterOpt = options.remove("numIter").map(_.toInt)
options.foreach {
case (opt, _) => throw new IllegalArgumentException("Invalid option: " + opt)
}
println("======================================")
println("| PageRank |")
println("======================================")
val sc = new SparkContext(conf.setAppName("PageRank(" + fname + ")"))
val unpartitionedGraph = GraphLoader.edgeListFile(sc, fname,
numEdgePartitions = numEPart,
edgeStorageLevel = edgeStorageLevel,
vertexStorageLevel = vertexStorageLevel).cache()
val graph = partitionStrategy.foldLeft(unpartitionedGraph)(_.partitionBy(_))
println("GRAPHX: Number of vertices " + graph.vertices.count)
println("GRAPHX: Number of edges " + graph.edges.count)
val pr = (numIterOpt match {
case Some(numIter) => PageRank.run(graph, numIter)
case None => PageRank.runUntilConvergence(graph, tol)
}).vertices.cache()
println("GRAPHX: Total rank: " + pr.map(_._2).reduce(_ + _))
if (!outFname.isEmpty) {
logWarning("Saving pageranks of pages to " + outFname)
pr.map { case (id, r) => id + "\\t" + r }.saveAsTextFile(outFname)
}
sc.stop()
case "cc" =>
options.foreach {
case (opt, _) => throw new IllegalArgumentException("Invalid option: " + opt)
}
println("======================================")
println("| Connected Components |")
println("======================================")
val sc = new SparkContext(conf.setAppName("ConnectedComponents(" + fname + ")"))
val unpartitionedGraph = GraphLoader.edgeListFile(sc, fname,
numEdgePartitions = numEPart,
edgeStorageLevel = edgeStorageLevel,
vertexStorageLevel = vertexStorageLevel).cache()
val graph = partitionStrategy.foldLeft(unpartitionedGraph)(_.partitionBy(_))
val cc = ConnectedComponents.run(graph)
println("Components: " + cc.vertices.map { case (vid, data) => data }.distinct())
sc.stop()
case "triangles" =>
options.foreach {
case (opt, _) => throw new IllegalArgumentException("Invalid option: " + opt)
}
println("======================================")
println("| Triangle Count |")
println("======================================")
val sc = new SparkContext(conf.setAppName("TriangleCount(" + fname + ")"))
val graph = GraphLoader.edgeListFile(sc, fname,
canonicalOrientation = true,
numEdgePartitions = numEPart,
edgeStorageLevel = edgeStorageLevel,
vertexStorageLevel = vertexStorageLevel)
// TriangleCount requires the graph to be partitioned
.partitionBy(partitionStrategy.getOrElse(RandomVertexCut)).cache()
val triangles = TriangleCount.run(graph)
println("Triangles: " + triangles.vertices.map {
case (vid, data) => data.toLong
}.reduce(_ + _) / 3)
sc.stop()
case _ =>
println("Invalid task type.")
}
}
}
// scalastyle:on println
|
chgm1006/spark-app
|
src/main/scala/org/apache/spark/examples/graphx/Analytics.scala
|
Scala
|
apache-2.0
| 6,900 |
package sectery.producers
import org.slf4j.LoggerFactory
import sectery.Producer
import sectery.Rx
import sectery.Tx
import zio.Clock
import zio.Queue
import zio.ZIO
class Help(producers: List[Producer]) extends Producer:
private val usage = """^@help\s+(.+)\s*$""".r
private val helpMessage: String =
s"""${producers
.flatMap(p => p.help().map(_.name))
.sorted
.mkString(", ")}"""
private val usageMap: Map[String, String] =
producers.flatMap(_.help().map(i => i.name -> i.usage)).toMap
override def help(): Iterable[Info] =
None
override def apply(m: Rx): ZIO[Any, Nothing, Iterable[Tx]] =
ZIO.succeed {
m match
case Rx(channel, _, "@help") =>
Some(Tx(m.channel, helpMessage))
case Rx(channel, _, usage(name)) =>
usageMap.get(name) match
case Some(usage) =>
Some(Tx(channel, s"Usage: ${usage}"))
case None =>
Some(Tx(channel, s"I don't know anything about ${name}"))
case _ =>
None
}
object Help {
def apply(producers: List[Producer]): List[Producer] =
new Help(producers) :: producers
}
|
earldouglas/sectery
|
modules/producers/src/main/scala/sectery/producers/Help.scala
|
Scala
|
mit
| 1,170 |
package euler.contfract
/**
* Iterator for convergents of a generalized continued fraction:
*
* @see http://en.wikipedia.org/wiki/Convergent_(continued_fraction)
*/
class ConvergentsIterator(asIt: Iterator[Int], bsIt: Iterator[Int]) extends Iterator[(BigInt, BigInt)] {
def this(as: Seq[Int], bs: Seq[Int]) = this(as.iterator, bs.iterator)
var _hasNext = asIt.hasNext && bsIt.hasNext
def hasNext: Boolean = _hasNext
var _Acurr = BigInt(bsIt.next)
var _Bcurr = BigInt(1)
var (_Anext, _Bnext) = {
val b1 = BigInt(bsIt.next)
(b1 * _Acurr + asIt.next, b1)
}
def next(): (BigInt, BigInt) = {
val (a_n, b_n) = (asIt.next, bsIt.next)
//A_n=b_n*A_n-1 + a_n*A_n-2
val A_n = b_n * _Anext + a_n * _Acurr
//B_n=b_n*B_n-1 + a_n*B_n-2
val B_n = b_n * _Bnext + a_n * _Bcurr
val result = (_Acurr, _Bcurr)
_Acurr = _Anext
_Bcurr = _Bnext
_Anext = A_n
_Bnext = B_n
_hasNext = bsIt.hasNext && asIt.hasNext
result
}
}
|
TrustNoOne/Euler
|
scala/src/main/scala/euler/contfract/ConvergentsIterator.scala
|
Scala
|
mit
| 981 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database.test.behavior
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.database.NoDocumentException
import org.apache.openwhisk.core.entity.{ActivationId, WhiskActivation}
import scala.util.Random
trait ActivationStoreCRUDBehaviors extends ActivationStoreBehaviorBase {
protected def checkStoreActivation(activation: WhiskActivation)(implicit transid: TransactionId): Unit = {
store(activation, context) shouldBe activation.docinfo
}
protected def checkDeleteActivation(activation: WhiskActivation)(implicit transid: TransactionId): Unit = {
activationStore.delete(ActivationId(activation.docid.asString), context).futureValue shouldBe true
}
protected def checkGetActivation(activation: WhiskActivation)(implicit transid: TransactionId): Unit = {
activationStore.get(ActivationId(activation.docid.asString), context).futureValue shouldBe activation
}
behavior of s"${storeType}ActivationStore store"
it should "put activation and get docinfo" in {
implicit val tid: TransactionId = transId()
val namespace = s"ns_${Random.alphanumeric.take(4).mkString}"
val action = s"action1_${Random.alphanumeric.take(4).mkString}"
val activation = newActivation(namespace, action, 1L)
checkStoreActivation(activation)
}
behavior of s"${storeType}ActivationStore delete"
it should "deletes existing activation" in {
implicit val tid: TransactionId = transId()
val namespace = s"ns_${Random.alphanumeric.take(4).mkString}"
val action = s"action1_${Random.alphanumeric.take(4).mkString}"
val activation = newActivation(namespace, action, 1L)
store(activation, context)
checkDeleteActivation(activation)
}
it should "throws NoDocumentException when activation does not exist" in {
implicit val tid: TransactionId = transId()
activationStore.delete(ActivationId("non-existing-doc"), context).failed.futureValue shouldBe a[NoDocumentException]
}
behavior of s"${storeType}ActivationStore get"
it should "get existing activation matching id" in {
implicit val tid: TransactionId = transId()
val namespace = s"ns_${Random.alphanumeric.take(4).mkString}"
val action = s"action1_${Random.alphanumeric.take(4).mkString}"
val activation = newActivation(namespace, action, 1L)
store(activation, context)
checkGetActivation(activation)
}
it should "throws NoDocumentException when activation does not exist" in {
implicit val tid: TransactionId = transId()
activationStore.get(ActivationId("non-existing-doc"), context).failed.futureValue shouldBe a[NoDocumentException]
}
}
|
jeremiaswerner/openwhisk
|
tests/src/test/scala/org/apache/openwhisk/core/database/test/behavior/ActivationStoreCRUDBehaviors.scala
|
Scala
|
apache-2.0
| 3,468 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.structure
import java.util.UUID
import io.gatling.core.action.builder.{ ExitHereBuilder, StopInjectorBuilder, TryMaxBuilder }
import io.gatling.core.session._
private[structure] trait Errors[B] extends Execs[B] {
def exitBlockOnFail(chain: ChainBuilder): B = tryMax(1.expressionSuccess)(chain)
@SuppressWarnings(Array("org.wartremover.warts.DefaultArguments"))
def tryMax(times: Expression[Int], counterName: String = UUID.randomUUID.toString)(chain: ChainBuilder): B =
exec(new TryMaxBuilder(times, counterName, chain))
def exitHereIf(condition: Expression[Boolean]): B = exec(new ExitHereBuilder(condition))
def exitHere: B = exitHereIf(TrueExpressionSuccess)
def exitHereIfFailed: B = exec(ExitHereBuilder())
def stopInjector(message: Expression[String]): B = exec(new StopInjectorBuilder(message))
}
|
gatling/gatling
|
gatling-core/src/main/scala/io/gatling/core/structure/Errors.scala
|
Scala
|
apache-2.0
| 1,474 |
package eventstore
package operations
import scala.util.Try
private[eventstore] trait Inspection {
def expected: Class[_]
def pf: PartialFunction[Try[In], Inspection.Decision]
}
private[eventstore] object Inspection {
sealed trait Decision
object Decision {
case object Stop extends Decision
case object Retry extends Decision
case object Unexpected extends Decision
case class Fail(value: EsException) extends Decision
}
}
|
pawelkaczor/EventStore.JVM
|
src/main/scala/eventstore/operations/Inspection.scala
|
Scala
|
bsd-3-clause
| 453 |
/*
* Copyright 2012 OneCalendar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dao
import com.mongodb.casbah.Imports._
import configuration.injection.MongoConnectionPool
import configuration.injection.MongoProp._
import fr.scala.util.collection.CollectionsUtils
import models.{ICalStreamTypeClass, ICalStream}
object ICalStreamDao extends CollectionsUtils
with MongoOperations
with ICalStreamTypeClass
with MongoConnectionPool {
def findAll()(implicit dbName: MongoDbName): List[ICalStream] = find[ICalStream](MongoDBObject())
def saveICalStream(stream: ICalStream)(implicit dbName: MongoDbName) = save(stream)
}
|
mdia/OneCalendar
|
app/dao/ICalStreamDao.scala
|
Scala
|
apache-2.0
| 1,169 |
package com.github.btmorr.harmonia
import org.apache.spark._
import org.apache.spark.streaming._
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010._
import org.apache.kafka.common.serialization.StringDeserializer
import com.datastax.spark.connector.SomeColumns
import com.datastax.spark.connector.streaming._
import java.util.UUID
import models._
/* This doesn't currently do any real analysis--it just reads from the topic, gets the
* length of messages, and stores this in Cassandra with the message's UUID as the key.
*/
object Mastermind {
case class Message(message: String, response: String) {
val id = UUID.randomUUID()
}
def main(args: Array[String]) = {
import AppConfig.{ CassandraConfig, KafkaConfig, SparkConfig }
val ssc = getExectutionContext(SparkConfig.appName, SparkConfig.master, CassandraConfig.hostUri)
/* Modify this to put the message through a dag of models, possibly including human
* interfaces. At each point in the process, persist the stage input and output, along
* with meta-data about the model (id tag + version). The dag should include selective
* faculties along the way and/or at the end to choose from amongst many possible
* responses, and ultimately choose what to reply to the interlocutor.
*
* What I want is to be able to define a dag of models such that for each vertex, the collected
* outputs of its parents are a superset of its inputs, and ensure that the whole graph is
* a) acyclic, and b) collects to a single terminal vertex. Whenever a vertex returns, the
* graph runner should determine which un-started verticies are now available for execution
* and start them. All verticies should execute asychronously.
*
* I'm sure there's a way to force the type system to guarantee this, but the first version will
* probably have something to the effect of a GraphValidator to check these properties. I also
* appear to have just described Apache Apex almost exactly, but that seems like overkill for
* this toy...
*/
val stream = getInputStream(ssc, KafkaConfig.topics)
val msgs = stream.map( _.value )
// This pipe is currently dead-end at the dummy vw model
val sentences = msgs.flatMap( in => models.SentenceSegmenter( in ) )
val lemmaTagPairs = sentences.flatMap( s => LemmatizerTagger( s ) )
val vwPredictions = lemmaTagPairs.map{ case (word, tag) => (word, SearnPredictor( (word, tag) ) ) }
val simpleResponses = msgs.map( in => (in, SimpleLookup( in ) ) )
val responses = simpleResponses.map{
case (in, resp) => Message( in, SayIt( resp ) )
}
responses.saveToCassandra(CassandraConfig.keyspace, CassandraConfig.table, SomeColumns("id", "message", "response"))
ssc.start()
ssc.awaitTermination()
}
private def getExectutionContext(appName: String, sparkMaster: String, cassandraUri: String) = {
val conf = new SparkConf().
setAppName(appName).
setMaster(sparkMaster).
set("spark.cassandra.connection.host", cassandraUri)
new StreamingContext(conf, Seconds(1))
}
// have tried to make this polymorphic over deserializer type, but having trouble
// getting the right type for the fields
private def getInputStream(ssc: StreamingContext, topics: Array[String]) = {
val kafkaParams = Map[String, Object](
"bootstrap.servers" -> "localhost:9092",
"key.deserializer" -> classOf[StringDeserializer],
"value.deserializer" -> classOf[StringDeserializer],
"group.id" -> "use_a_separate_group_id_for_each_stream",
"auto.offset.reset" -> "latest",
"enable.auto.commit" -> (false: java.lang.Boolean)
)
KafkaUtils.createDirectStream[String, String](
ssc,
PreferConsistent,
Subscribe[String, String](topics, kafkaParams)
)
}
}
|
btmorr/ideal-spork
|
mastermind/src/main/scala/com/github/btmorr/harmonia/Mastermind.scala
|
Scala
|
gpl-3.0
| 3,992 |
package test
import com.sun.jdi.request.ExceptionRequest
import org.scaladebugger.api.lowlevel.exceptions.{ExceptionRequestInfo, ExceptionManager}
import org.scaladebugger.api.lowlevel.requests.JDIRequestArgument
import scala.util.Try
/**
* Test exception manager that merely invokes the provided exception manager
* underneath to make it easier to mock.
*
* @param exceptionManager The underlying exception manager used to execute
* all methods
*/
class TestExceptionManager(
private val exceptionManager: ExceptionManager
) extends ExceptionManager {
override def createExceptionRequestWithId(requestId: String, exceptionName: String, notifyCaught: Boolean, notifyUncaught: Boolean, extraArguments: JDIRequestArgument*): Try[String] =
exceptionManager.createExceptionRequestWithId(requestId, exceptionName, notifyCaught, notifyUncaught, extraArguments: _*)
override def exceptionRequestList: Seq[ExceptionRequestInfo] =
exceptionManager.exceptionRequestList
override def createCatchallExceptionRequestWithId(requestId: String, notifyCaught: Boolean, notifyUncaught: Boolean, extraArguments: JDIRequestArgument*): Try[String] =
exceptionManager.createCatchallExceptionRequestWithId(requestId, notifyCaught, notifyUncaught, extraArguments: _*)
override def exceptionRequestListById: Seq[String] =
exceptionManager.exceptionRequestListById
override def hasExceptionRequestWithId(requestId: String): Boolean =
exceptionManager.hasExceptionRequestWithId(requestId)
override def removeExceptionRequest(exceptionName: String): Boolean =
exceptionManager.removeExceptionRequest(exceptionName)
override def getExceptionRequestWithId(requestId: String): Option[Seq[ExceptionRequest]] =
exceptionManager.getExceptionRequestWithId(requestId)
override def getExceptionRequestInfoWithId(requestId: String): Option[ExceptionRequestInfo] =
exceptionManager.getExceptionRequestInfoWithId(requestId)
override def hasExceptionRequest(exceptionName: String): Boolean =
exceptionManager.hasExceptionRequest(exceptionName)
override def getExceptionRequest(exceptionName: String): Option[Seq[ExceptionRequest]] =
exceptionManager.getExceptionRequest(exceptionName)
override def removeExceptionRequestWithId(requestId: String): Boolean =
exceptionManager.removeExceptionRequestWithId(requestId)
}
|
ensime/scala-debugger
|
scala-debugger-api/src/test/scala/test/TestExceptionManager.scala
|
Scala
|
apache-2.0
| 2,377 |
package scuff
/**
* Finite state machines.
* This package was primarily made to
* have an easy DSL that maps well from
* diagram to code (and vice versa),
* and uses the type system to make sure
* the transitions are sound.
*/
package object fsm
|
nilskp/scuff
|
src/main/scala/scuff/fsm/package.scala
|
Scala
|
mit
| 261 |
package observatory
import java.lang.Math._
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Encoder, Encoders}
import scala.math.{acos => _, atan => _, cos => _, pow => _, sin => _, sinh => _, sqrt => _, toDegrees => _, _}
import scala.reflect.ClassTag
object Implicits {
implicit class F2C(f: Double) {
def toCelsius: Double = (f - 32) * 5 / 9
}
implicit def kryoEncoder[A](implicit ct: ClassTag[A]): Encoder[A] = Encoders.kryo[A](ct)
implicit def tuple3[A1, A2, A3](implicit e1: Encoder[A1], e2: Encoder[A2], e3: Encoder[A3]): Encoder[(A1, A2, A3)] =
Encoders.tuple[A1, A2, A3](e1, e2, e3)
}
/**
* Introduced in Week 1. Represents a location on the globe.
*
* @param lat Degrees of latitude, -90 ≤ lat ≤ 90
* @param lon Degrees of longitude, -180 ≤ lon ≤ 180
*/
case class Location(lat: Double, lon: Double) {
private val R = 6371000
private val minDistance = 1000
private val p = 6
def distanceTo(loc: Location): Double = {
val dLat = (loc.lat - lat).toRadians
val dLon = (loc.lon - lon).toRadians
val a = sin(dLat / 2) * sin(dLat / 2) + cos(lat.toRadians) * cos(loc.lat.toRadians) * sin(dLon / 2) * sin(dLon / 2)
val c = 2 * math.atan2(math.sqrt(a), sqrt(1 - a))
R * c
}
def idw(location: Location): Double = {
val distance = distanceTo(location)
1 / pow(math.max(distance, minDistance), p)
}
}
object Location {
def fromPixelIndex(index: Int): Location = {
val x = index % 360
val y = index / 360
Location(90 - y, x - 180)
}
}
/**
* Introduced in Week 3. Represents a tiled web map tile.
* See https://en.wikipedia.org/wiki/Tiled_web_map
* Based on http://wiki.openstreetmap.org/wiki/Slippy_map_tilenames
*
* @param x X coordinate of the tile
* @param y Y coordinate of the tile
* @param zoom Zoom level, 0 ≤ zoom ≤ 19
*/
case class Tile(x: Int, y: Int, zoom: Int) {
def toLocation: Location =
Location(
toDegrees(atan(sinh(Pi * (1.0 - 2.0 * y.toDouble / (1 << zoom))))),
x.toDouble / (1 << zoom) * 360.0 - 180.0)
}
object Tile {
def fromPixelIndex(index: Int, imgSize: Int, parent: Tile): Tile = {
val x = index % imgSize
val y = index / imgSize
val zoom = (math.log10(imgSize) / math.log10(2)).toInt
Tile(parent.x * imgSize + x, parent.y * imgSize + y, parent.zoom + zoom)
}
}
/**
* Introduced in Week 4. Represents a point on a grid composed of
* circles of latitudes and lines of longitude.
*
* @param lat Circle of latitude in degrees, -89 ≤ lat ≤ 90
* @param lon Line of longitude in degrees, -180 ≤ lon ≤ 179
*/
case class GridLocation(lat: Int, lon: Int)
/**
* Introduced in Week 5. Represents a point inside of a grid cell.
*
* @param x X coordinate inside the cell, 0 ≤ x ≤ 1
* @param y Y coordinate inside the cell, 0 ≤ y ≤ 1
*/
case class CellPoint(x: Double, y: Double)
/**
* Introduced in Week 2. Represents an RGB color.
*
* @param red Level of red, 0 ≤ red ≤ 255
* @param green Level of green, 0 ≤ green ≤ 255
* @param blue Level of blue, 0 ≤ blue ≤ 255
*/
case class Color(red: Int, green: Int, blue: Int)
case class Station(stn: Option[Int], wban: Option[Int], lat: Option[Double], lon: Option[Double])
object Station {
val struct = StructType(Seq(
StructField("stn", IntegerType, nullable = true),
StructField("wban", IntegerType, nullable = true),
StructField("lat", DoubleType, nullable = true),
StructField("lon", DoubleType, nullable = true)
))
}
case class Record(stn: Option[Int], wban: Option[Int], month: Byte, day: Byte, temp: Temperature)
object Record {
val struct = StructType(Seq(
StructField("stn", IntegerType, nullable = true),
StructField("wban", IntegerType, nullable = true),
StructField("month", ByteType, nullable = false),
StructField("day", ByteType, nullable = false),
StructField("temp", DoubleType, nullable = false)
))
}
|
yurii-khomenko/fpScalaSpec
|
c5w1observatory/src/main/scala/observatory/models.scala
|
Scala
|
gpl-3.0
| 3,980 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import java.io.IOException
import java.util.Locale
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.planning._
import org.apache.spark.sql.catalyst.plans.logical.{InsertIntoDir, InsertIntoTable, LogicalPlan,
ScriptTransformation}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.command.{CreateTableCommand, DDLUtils}
import org.apache.spark.sql.execution.datasources.CreateTable
import org.apache.spark.sql.hive.execution._
import org.apache.spark.sql.internal.{HiveSerDe, SQLConf}
/**
* Determine the database, serde/format and schema of the Hive serde table, according to the storage
* properties.
*/
class ResolveHiveSerdeTable(session: SparkSession) extends Rule[LogicalPlan] {
private def determineHiveSerde(table: CatalogTable): CatalogTable = {
if (table.storage.serde.nonEmpty) {
table
} else {
if (table.bucketSpec.isDefined) {
throw new AnalysisException("Creating bucketed Hive serde table is not supported yet.")
}
val defaultStorage = HiveSerDe.getDefaultStorage(session.sessionState.conf)
val options = new HiveOptions(table.storage.properties)
val fileStorage = if (options.fileFormat.isDefined) {
HiveSerDe.sourceToSerDe(options.fileFormat.get) match {
case Some(s) =>
CatalogStorageFormat.empty.copy(
inputFormat = s.inputFormat,
outputFormat = s.outputFormat,
serde = s.serde)
case None =>
throw new IllegalArgumentException(s"invalid fileFormat: '${options.fileFormat.get}'")
}
} else if (options.hasInputOutputFormat) {
CatalogStorageFormat.empty.copy(
inputFormat = options.inputFormat,
outputFormat = options.outputFormat)
} else {
CatalogStorageFormat.empty
}
val rowStorage = if (options.serde.isDefined) {
CatalogStorageFormat.empty.copy(serde = options.serde)
} else {
CatalogStorageFormat.empty
}
val storage = table.storage.copy(
inputFormat = fileStorage.inputFormat.orElse(defaultStorage.inputFormat),
outputFormat = fileStorage.outputFormat.orElse(defaultStorage.outputFormat),
serde = rowStorage.serde.orElse(fileStorage.serde).orElse(defaultStorage.serde),
properties = options.serdeProperties)
table.copy(storage = storage)
}
}
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case c @ CreateTable(t, _, query) if DDLUtils.isHiveTable(t) =>
// Finds the database name if the name does not exist.
val dbName = t.identifier.database.getOrElse(session.catalog.currentDatabase)
val table = t.copy(identifier = t.identifier.copy(database = Some(dbName)))
// Determines the serde/format of Hive tables
val withStorage = determineHiveSerde(table)
// Infers the schema, if empty, because the schema could be determined by Hive
// serde.
val withSchema = if (query.isEmpty) {
val inferred = HiveUtils.inferSchema(withStorage)
if (inferred.schema.length <= 0) {
throw new AnalysisException("Unable to infer the schema. " +
s"The schema specification is required to create the table ${inferred.identifier}.")
}
inferred
} else {
withStorage
}
c.copy(tableDesc = withSchema)
}
}
class DetermineTableStats(session: SparkSession) extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case relation @ HiveTableRelation(table, _, partitionCols)
if DDLUtils.isHiveTable(table) && table.stats.isEmpty =>
val conf = session.sessionState.conf
// For partitioned tables, the partition directory may be outside of the table directory.
// Which is expensive to get table size. Please see how we implemented it in the AnalyzeTable.
val sizeInBytes = if (conf.fallBackToHdfsForStatsEnabled && partitionCols.isEmpty) {
try {
val hadoopConf = session.sessionState.newHadoopConf()
val tablePath = new Path(table.location)
val fs: FileSystem = tablePath.getFileSystem(hadoopConf)
fs.getContentSummary(tablePath).getLength
} catch {
case e: IOException =>
logWarning("Failed to get table size from HDFS.", e)
conf.defaultSizeInBytes
}
} else {
conf.defaultSizeInBytes
}
val withStats = table.copy(stats = Some(CatalogStatistics(sizeInBytes = BigInt(sizeInBytes))))
relation.copy(tableMeta = withStats)
}
}
/**
* Replaces generic operations with specific variants that are designed to work with Hive.
*
* Note that, this rule must be run after `PreprocessTableCreation` and
* `PreprocessTableInsertion`.
*/
object HiveAnalysis extends Rule[LogicalPlan] {
override def apply(plan: LogicalPlan): LogicalPlan = plan resolveOperators {
case InsertIntoTable(r: HiveTableRelation, partSpec, query, overwrite, ifPartitionNotExists)
if DDLUtils.isHiveTable(r.tableMeta) =>
InsertIntoHiveTable(r.tableMeta, partSpec, query, overwrite,
ifPartitionNotExists, query.output.map(_.name))
case CreateTable(tableDesc, mode, None) if DDLUtils.isHiveTable(tableDesc) =>
CreateTableCommand(tableDesc, ignoreIfExists = mode == SaveMode.Ignore)
case CreateTable(tableDesc, mode, Some(query))
if DDLUtils.isHiveTable(tableDesc) && query.resolved =>
CreateHiveTableAsSelectCommand(tableDesc, query, query.output.map(_.name), mode)
case InsertIntoDir(isLocal, storage, provider, child, overwrite)
if DDLUtils.isHiveTable(provider) && child.resolved =>
val outputPath = new Path(storage.locationUri.get)
if (overwrite) DDLUtils.verifyNotReadPath(child, outputPath)
InsertIntoHiveDirCommand(isLocal, storage, child, overwrite, child.output.map(_.name))
}
}
/**
* Relation conversion from metastore relations to data source relations for better performance
*
* - When writing to non-partitioned Hive-serde Parquet/Orc tables
* - When scanning Hive-serde Parquet/ORC tables
*
* This rule must be run before all other DDL post-hoc resolution rules, i.e.
* `PreprocessTableCreation`, `PreprocessTableInsertion`, `DataSourceAnalysis` and `HiveAnalysis`.
*/
case class RelationConversions(
conf: SQLConf,
sessionCatalog: HiveSessionCatalog) extends Rule[LogicalPlan] {
private def isConvertible(relation: HiveTableRelation): Boolean = {
isConvertible(relation.tableMeta)
}
private def isConvertible(tableMeta: CatalogTable): Boolean = {
val serde = tableMeta.storage.serde.getOrElse("").toLowerCase(Locale.ROOT)
serde.contains("parquet") && SQLConf.get.getConf(HiveUtils.CONVERT_METASTORE_PARQUET) ||
serde.contains("orc") && SQLConf.get.getConf(HiveUtils.CONVERT_METASTORE_ORC)
}
private val metastoreCatalog = sessionCatalog.metastoreCatalog
override def apply(plan: LogicalPlan): LogicalPlan = {
plan resolveOperators {
// Write path
case InsertIntoTable(r: HiveTableRelation, partition, query, overwrite, ifPartitionNotExists)
// Inserting into partitioned table is not supported in Parquet/Orc data source (yet).
if query.resolved && DDLUtils.isHiveTable(r.tableMeta) &&
!r.isPartitioned && isConvertible(r) =>
InsertIntoTable(metastoreCatalog.convert(r), partition,
query, overwrite, ifPartitionNotExists)
// Read path
case relation: HiveTableRelation
if DDLUtils.isHiveTable(relation.tableMeta) && isConvertible(relation) =>
metastoreCatalog.convert(relation)
// CTAS
case CreateTable(tableDesc, mode, Some(query))
if DDLUtils.isHiveTable(tableDesc) && tableDesc.partitionColumnNames.isEmpty &&
isConvertible(tableDesc) && SQLConf.get.getConf(HiveUtils.CONVERT_METASTORE_CTAS) =>
// validation is required to be done here before relation conversion.
DDLUtils.checkDataColNames(tableDesc.copy(schema = query.schema))
OptimizedCreateHiveTableAsSelectCommand(
tableDesc, query, query.output.map(_.name), mode)
}
}
}
private[hive] trait HiveStrategies {
// Possibly being too clever with types here... or not clever enough.
self: SparkPlanner =>
val sparkSession: SparkSession
object Scripts extends Strategy {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case ScriptTransformation(input, script, output, child, ioschema) =>
val hiveIoSchema = HiveScriptIOSchema(ioschema)
ScriptTransformationExec(input, script, output, planLater(child), hiveIoSchema) :: Nil
case _ => Nil
}
}
/**
* Retrieves data using a HiveTableScan. Partition pruning predicates are also detected and
* applied.
*/
object HiveTableScans extends Strategy {
def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
case PhysicalOperation(projectList, predicates, relation: HiveTableRelation) =>
// Filter out all predicates that only deal with partition keys, these are given to the
// hive table scan operator to be used for partition pruning.
val partitionKeyIds = AttributeSet(relation.partitionCols)
val (pruningPredicates, otherPredicates) = predicates.partition { predicate =>
!predicate.references.isEmpty &&
predicate.references.subsetOf(partitionKeyIds)
}
pruneFilterProject(
projectList,
otherPredicates,
identity[Seq[Expression]],
HiveTableScanExec(_, relation, pruningPredicates)(sparkSession)) :: Nil
case _ =>
Nil
}
}
}
|
kiszk/spark
|
sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveStrategies.scala
|
Scala
|
apache-2.0
| 10,794 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2.parquet
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.parquet.hadoop.ParquetInputFormat
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.connector.read.PartitionReaderFactory
import org.apache.spark.sql.execution.datasources.PartitioningAwareFileIndex
import org.apache.spark.sql.execution.datasources.parquet.{ParquetReadSupport, ParquetWriteSupport}
import org.apache.spark.sql.execution.datasources.v2.FileScan
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.Filter
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.util.CaseInsensitiveStringMap
import org.apache.spark.util.SerializableConfiguration
case class ParquetScan(
sparkSession: SparkSession,
hadoopConf: Configuration,
fileIndex: PartitioningAwareFileIndex,
dataSchema: StructType,
readDataSchema: StructType,
readPartitionSchema: StructType,
pushedFilters: Array[Filter],
options: CaseInsensitiveStringMap,
partitionFilters: Seq[Expression] = Seq.empty) extends FileScan {
override def isSplitable(path: Path): Boolean = true
override def createReaderFactory(): PartitionReaderFactory = {
val readDataSchemaAsJson = readDataSchema.json
hadoopConf.set(ParquetInputFormat.READ_SUPPORT_CLASS, classOf[ParquetReadSupport].getName)
hadoopConf.set(
ParquetReadSupport.SPARK_ROW_REQUESTED_SCHEMA,
readDataSchemaAsJson)
hadoopConf.set(
ParquetWriteSupport.SPARK_ROW_SCHEMA,
readDataSchemaAsJson)
hadoopConf.set(
SQLConf.SESSION_LOCAL_TIMEZONE.key,
sparkSession.sessionState.conf.sessionLocalTimeZone)
hadoopConf.setBoolean(
SQLConf.NESTED_SCHEMA_PRUNING_ENABLED.key,
sparkSession.sessionState.conf.nestedSchemaPruningEnabled)
hadoopConf.setBoolean(
SQLConf.CASE_SENSITIVE.key,
sparkSession.sessionState.conf.caseSensitiveAnalysis)
ParquetWriteSupport.setSchema(readDataSchema, hadoopConf)
// Sets flags for `ParquetToSparkSchemaConverter`
hadoopConf.setBoolean(
SQLConf.PARQUET_BINARY_AS_STRING.key,
sparkSession.sessionState.conf.isParquetBinaryAsString)
hadoopConf.setBoolean(
SQLConf.PARQUET_INT96_AS_TIMESTAMP.key,
sparkSession.sessionState.conf.isParquetINT96AsTimestamp)
val broadcastedConf = sparkSession.sparkContext.broadcast(
new SerializableConfiguration(hadoopConf))
ParquetPartitionReaderFactory(sparkSession.sessionState.conf, broadcastedConf,
dataSchema, readDataSchema, readPartitionSchema, pushedFilters)
}
override def equals(obj: Any): Boolean = obj match {
case p: ParquetScan =>
super.equals(p) && dataSchema == p.dataSchema && options == p.options &&
equivalentFilters(pushedFilters, p.pushedFilters)
case _ => false
}
override def hashCode(): Int = getClass.hashCode()
override def description(): String = {
super.description() + ", PushedFilters: " + seqToString(pushedFilters)
}
override def withPartitionFilters(partitionFilters: Seq[Expression]): FileScan =
this.copy(partitionFilters = partitionFilters)
}
|
ptkool/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/parquet/ParquetScan.scala
|
Scala
|
apache-2.0
| 4,074 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.arrow
import java.io.{ByteArrayOutputStream, DataOutputStream, File}
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import java.text.SimpleDateFormat
import java.util.Locale
import com.google.common.io.Files
import org.apache.arrow.memory.RootAllocator
import org.apache.arrow.vector.{VectorLoader, VectorSchemaRoot}
import org.apache.arrow.vector.ipc.JsonFileReader
import org.apache.arrow.vector.util.{ByteArrayReadableSeekableByteChannel, Validator}
import org.apache.spark.{SparkException, TaskContext}
import org.apache.spark.sql.{DataFrame, Row}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types.{BinaryType, Decimal, IntegerType, StructField, StructType}
import org.apache.spark.sql.util.ArrowUtils
import org.apache.spark.util.Utils
class ArrowConvertersSuite extends SharedSparkSession {
import testImplicits._
private var tempDataPath: String = _
override def beforeAll(): Unit = {
super.beforeAll()
tempDataPath = Utils.createTempDir(namePrefix = "arrow").getAbsolutePath
}
test("collect to arrow record batch") {
val indexData = (1 to 6).toDF("i")
val arrowBatches = indexData.toArrowBatchRdd.collect()
assert(arrowBatches.nonEmpty)
assert(arrowBatches.length == indexData.rdd.getNumPartitions)
val allocator = new RootAllocator(Long.MaxValue)
val arrowRecordBatches = arrowBatches.map(ArrowConverters.loadBatch(_, allocator))
val rowCount = arrowRecordBatches.map(_.getLength).sum
assert(rowCount === indexData.count())
arrowRecordBatches.foreach(batch => assert(batch.getNodes.size() > 0))
arrowRecordBatches.foreach(_.close())
allocator.close()
}
test("short conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_s",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 16
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "b_s",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 16
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 6,
| "columns" : [ {
| "name" : "a_s",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1, -1, 2, -2, 32767, -32768 ]
| }, {
| "name" : "b_s",
| "count" : 6,
| "VALIDITY" : [ 1, 0, 0, 1, 0, 1 ],
| "DATA" : [ 1, 0, 0, -2, 0, -32768 ]
| } ]
| } ]
|}
""".stripMargin
val a_s = List[Short](1, -1, 2, -2, 32767, -32768)
val b_s = List[Option[Short]](Some(1), None, None, Some(-2), None, Some(-32768))
val df = a_s.zip(b_s).toDF("a_s", "b_s")
collectAndValidate(df, json, "integer-16bit.json")
}
test("int conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_i",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "b_i",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 6,
| "columns" : [ {
| "name" : "a_i",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1, -1, 2, -2, 2147483647, -2147483648 ]
| }, {
| "name" : "b_i",
| "count" : 6,
| "VALIDITY" : [ 1, 0, 0, 1, 0, 1 ],
| "DATA" : [ 1, 0, 0, -2, 0, -2147483648 ]
| } ]
| } ]
|}
""".stripMargin
val a_i = List[Int](1, -1, 2, -2, 2147483647, -2147483648)
val b_i = List[Option[Int]](Some(1), None, None, Some(-2), None, Some(-2147483648))
val df = a_i.zip(b_i).toDF("a_i", "b_i")
collectAndValidate(df, json, "integer-32bit.json")
}
test("long conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_l",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 64
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "b_l",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 64
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 6,
| "columns" : [ {
| "name" : "a_l",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1, -1, 2, -2, 9223372036854775807, -9223372036854775808 ]
| }, {
| "name" : "b_l",
| "count" : 6,
| "VALIDITY" : [ 1, 0, 0, 1, 0, 1 ],
| "DATA" : [ 1, 0, 0, -2, 0, -9223372036854775808 ]
| } ]
| } ]
|}
""".stripMargin
val a_l = List[Long](1, -1, 2, -2, 9223372036854775807L, -9223372036854775808L)
val b_l = List[Option[Long]](Some(1), None, None, Some(-2), None, Some(-9223372036854775808L))
val df = a_l.zip(b_l).toDF("a_l", "b_l")
collectAndValidate(df, json, "integer-64bit.json")
}
test("float conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_f",
| "type" : {
| "name" : "floatingpoint",
| "precision" : "SINGLE"
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "b_f",
| "type" : {
| "name" : "floatingpoint",
| "precision" : "SINGLE"
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 6,
| "columns" : [ {
| "name" : "a_f",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1.0, 2.0, 0.01, 200.0, 0.0001, 20000.0 ]
| }, {
| "name" : "b_f",
| "count" : 6,
| "VALIDITY" : [ 1, 0, 0, 1, 0, 1 ],
| "DATA" : [ 1.1, 0.0, 0.0, 2.2, 0.0, 3.3 ]
| } ]
| } ]
|}
""".stripMargin
val a_f = List(1.0f, 2.0f, 0.01f, 200.0f, 0.0001f, 20000.0f)
val b_f = List[Option[Float]](Some(1.1f), None, None, Some(2.2f), None, Some(3.3f))
val df = a_f.zip(b_f).toDF("a_f", "b_f")
collectAndValidate(df, json, "floating_point-single_precision.json")
}
test("double conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_d",
| "type" : {
| "name" : "floatingpoint",
| "precision" : "DOUBLE"
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "b_d",
| "type" : {
| "name" : "floatingpoint",
| "precision" : "DOUBLE"
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 6,
| "columns" : [ {
| "name" : "a_d",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1.0, 2.0, 0.01, 200.0, 1.0E-4, 20000.0 ]
| }, {
| "name" : "b_d",
| "count" : 6,
| "VALIDITY" : [ 1, 0, 0, 1, 0, 1 ],
| "DATA" : [ 1.1, 0.0, 0.0, 2.2, 0.0, 3.3 ]
| } ]
| } ]
|}
""".stripMargin
val a_d = List(1.0, 2.0, 0.01, 200.0, 0.0001, 20000.0)
val b_d = List[Option[Double]](Some(1.1), None, None, Some(2.2), None, Some(3.3))
val df = a_d.zip(b_d).toDF("a_d", "b_d")
collectAndValidate(df, json, "floating_point-double_precision.json")
}
test("decimal conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_d",
| "type" : {
| "name" : "decimal",
| "precision" : 38,
| "scale" : 18
| },
| "nullable" : true,
| "children" : [ ]
| }, {
| "name" : "b_d",
| "type" : {
| "name" : "decimal",
| "precision" : 38,
| "scale" : 18
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 7,
| "columns" : [ {
| "name" : "a_d",
| "count" : 7,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1, 1 ],
| "DATA" : [
| "1000000000000000000",
| "2000000000000000000",
| "10000000000000000",
| "200000000000000000000",
| "100000000000000",
| "20000000000000000000000",
| "30000000000000000000" ]
| }, {
| "name" : "b_d",
| "count" : 7,
| "VALIDITY" : [ 1, 0, 0, 1, 0, 1, 0 ],
| "DATA" : [
| "1100000000000000000",
| "0",
| "0",
| "2200000000000000000",
| "0",
| "3300000000000000000",
| "0" ]
| } ]
| } ]
|}
""".stripMargin
val a_d = List(1.0, 2.0, 0.01, 200.0, 0.0001, 20000.0, 30.0).map(Decimal(_))
val b_d = List(Some(Decimal(1.1)), None, None, Some(Decimal(2.2)), None, Some(Decimal(3.3)),
Some(Decimal("123456789012345678901234567890")))
val df = a_d.zip(b_d).toDF("a_d", "b_d")
collectAndValidate(df, json, "decimalData.json")
}
test("index conversion") {
val data = List[Int](1, 2, 3, 4, 5, 6)
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "i",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : false,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 6,
| "columns" : [ {
| "name" : "i",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1, 2, 3, 4, 5, 6 ]
| } ]
| } ]
|}
""".stripMargin
val df = data.toDF("i")
collectAndValidate(df, json, "indexData-ints.json")
}
test("mixed numeric type conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 16
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "b",
| "type" : {
| "name" : "floatingpoint",
| "precision" : "SINGLE"
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "c",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "d",
| "type" : {
| "name" : "floatingpoint",
| "precision" : "DOUBLE"
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "e",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 64
| },
| "nullable" : false,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 6,
| "columns" : [ {
| "name" : "a",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1, 2, 3, 4, 5, 6 ]
| }, {
| "name" : "b",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 ]
| }, {
| "name" : "c",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1, 2, 3, 4, 5, 6 ]
| }, {
| "name" : "d",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 ]
| }, {
| "name" : "e",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1, 2, 3, 4, 5, 6 ]
| } ]
| } ]
|}
""".stripMargin
val data = List(1, 2, 3, 4, 5, 6)
val data_tuples = for (d <- data) yield {
(d.toShort, d.toFloat, d.toInt, d.toDouble, d.toLong)
}
val df = data_tuples.toDF("a", "b", "c", "d", "e")
collectAndValidate(df, json, "mixed_numeric_types.json")
}
test("string type conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "upper_case",
| "type" : {
| "name" : "utf8"
| },
| "nullable" : true,
| "children" : [ ]
| }, {
| "name" : "lower_case",
| "type" : {
| "name" : "utf8"
| },
| "nullable" : true,
| "children" : [ ]
| }, {
| "name" : "null_str",
| "type" : {
| "name" : "utf8"
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 3,
| "columns" : [ {
| "name" : "upper_case",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 1 ],
| "OFFSET" : [ 0, 1, 2, 3 ],
| "DATA" : [ "A", "B", "C" ]
| }, {
| "name" : "lower_case",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 1 ],
| "OFFSET" : [ 0, 1, 2, 3 ],
| "DATA" : [ "a", "b", "c" ]
| }, {
| "name" : "null_str",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 0 ],
| "OFFSET" : [ 0, 2, 5, 5 ],
| "DATA" : [ "ab", "CDE", "" ]
| } ]
| } ]
|}
""".stripMargin
val upperCase = Seq("A", "B", "C")
val lowerCase = Seq("a", "b", "c")
val nullStr = Seq("ab", "CDE", null)
val df = (upperCase, lowerCase, nullStr).zipped.toList
.toDF("upper_case", "lower_case", "null_str")
collectAndValidate(df, json, "stringData.json")
}
test("boolean type conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_bool",
| "type" : {
| "name" : "bool"
| },
| "nullable" : false,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 4,
| "columns" : [ {
| "name" : "a_bool",
| "count" : 4,
| "VALIDITY" : [ 1, 1, 1, 1 ],
| "DATA" : [ true, true, false, true ]
| } ]
| } ]
|}
""".stripMargin
val df = Seq(true, true, false, true).toDF("a_bool")
collectAndValidate(df, json, "boolData.json")
}
test("byte type conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_byte",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 8
| },
| "nullable" : false,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 4,
| "columns" : [ {
| "name" : "a_byte",
| "count" : 4,
| "VALIDITY" : [ 1, 1, 1, 1 ],
| "DATA" : [ 1, -1, 64, 127 ]
| } ]
| } ]
|}
|
""".stripMargin
val df = List[Byte](1.toByte, (-1).toByte, 64.toByte, Byte.MaxValue).toDF("a_byte")
collectAndValidate(df, json, "byteData.json")
}
test("binary type conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_binary",
| "type" : {
| "name" : "binary"
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 3,
| "columns" : [ {
| "name" : "a_binary",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 1 ],
| "OFFSET" : [ 0, 3, 4, 6 ],
| "DATA" : [ "616263", "64", "6566" ]
| } ]
| } ]
|}
""".stripMargin
val data = Seq("abc", "d", "ef")
val rdd = sparkContext.parallelize(data.map(s => Row(s.getBytes("utf-8"))))
val df = spark.createDataFrame(rdd, StructType(Seq(StructField("a_binary", BinaryType))))
collectAndValidate(df, json, "binaryData.json")
}
test("date type conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "date",
| "type" : {
| "name" : "date",
| "unit" : "DAY"
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 4,
| "columns" : [ {
| "name" : "date",
| "count" : 4,
| "VALIDITY" : [ 1, 1, 1, 1 ],
| "DATA" : [ -1, 0, 16533, 382607 ]
| } ]
| } ]
|}
""".stripMargin
val d1 = DateTimeUtils.toJavaDate(-1) // "1969-12-31"
val d2 = DateTimeUtils.toJavaDate(0) // "1970-01-01"
val d3 = Date.valueOf("2015-04-08")
val d4 = Date.valueOf("3017-07-18")
val df = Seq(d1, d2, d3, d4).toDF("date")
collectAndValidate(df, json, "dateData.json")
}
test("timestamp type conversion") {
withSQLConf(SQLConf.SESSION_LOCAL_TIMEZONE.key -> "America/Los_Angeles") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "timestamp",
| "type" : {
| "name" : "timestamp",
| "unit" : "MICROSECOND",
| "timezone" : "America/Los_Angeles"
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 4,
| "columns" : [ {
| "name" : "timestamp",
| "count" : 4,
| "VALIDITY" : [ 1, 1, 1, 1 ],
| "DATA" : [ -1234, 0, 1365383415567000, 33057298500000000 ]
| } ]
| } ]
|}
""".stripMargin
val sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS z", Locale.US)
val ts1 = DateTimeUtils.toJavaTimestamp(-1234L)
val ts2 = DateTimeUtils.toJavaTimestamp(0L)
val ts3 = new Timestamp(sdf.parse("2013-04-08 01:10:15.567 UTC").getTime)
val ts4 = new Timestamp(sdf.parse("3017-07-18 14:55:00.000 UTC").getTime)
val data = Seq(ts1, ts2, ts3, ts4)
val df = data.toDF("timestamp")
collectAndValidate(df, json, "timestampData.json", "America/Los_Angeles")
}
}
test("floating-point NaN") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "NaN_f",
| "type" : {
| "name" : "floatingpoint",
| "precision" : "SINGLE"
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "NaN_d",
| "type" : {
| "name" : "floatingpoint",
| "precision" : "DOUBLE"
| },
| "nullable" : false,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 2,
| "columns" : [ {
| "name" : "NaN_f",
| "count" : 2,
| "VALIDITY" : [ 1, 1 ],
| "DATA" : [ 1.2000000476837158, NaN ]
| }, {
| "name" : "NaN_d",
| "count" : 2,
| "VALIDITY" : [ 1, 1 ],
| "DATA" : [ NaN, 1.2 ]
| } ]
| } ]
|}
""".stripMargin
val fnan = Seq(1.2F, Float.NaN)
val dnan = Seq(Double.NaN, 1.2)
val df = fnan.zip(dnan).toDF("NaN_f", "NaN_d")
collectAndValidate(df, json, "nanData-floating_point.json")
}
test("array type conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_arr",
| "nullable" : true,
| "type" : {
| "name" : "list"
| },
| "children" : [ {
| "name" : "element",
| "nullable" : false,
| "type" : {
| "name" : "int",
| "bitWidth" : 32,
| "isSigned" : true
| },
| "children" : [ ]
| } ]
| }, {
| "name" : "b_arr",
| "nullable" : true,
| "type" : {
| "name" : "list"
| },
| "children" : [ {
| "name" : "element",
| "nullable" : false,
| "type" : {
| "name" : "int",
| "bitWidth" : 32,
| "isSigned" : true
| },
| "children" : [ ]
| } ]
| }, {
| "name" : "c_arr",
| "nullable" : true,
| "type" : {
| "name" : "list"
| },
| "children" : [ {
| "name" : "element",
| "nullable" : true,
| "type" : {
| "name" : "int",
| "bitWidth" : 32,
| "isSigned" : true
| },
| "children" : [ ]
| } ]
| }, {
| "name" : "d_arr",
| "nullable" : true,
| "type" : {
| "name" : "list"
| },
| "children" : [ {
| "name" : "element",
| "nullable" : true,
| "type" : {
| "name" : "list"
| },
| "children" : [ {
| "name" : "element",
| "nullable" : false,
| "type" : {
| "name" : "int",
| "bitWidth" : 32,
| "isSigned" : true
| },
| "children" : [ ]
| } ]
| } ]
| } ]
| },
| "batches" : [ {
| "count" : 4,
| "columns" : [ {
| "name" : "a_arr",
| "count" : 4,
| "VALIDITY" : [ 1, 1, 1, 1 ],
| "OFFSET" : [ 0, 2, 4, 4, 5 ],
| "children" : [ {
| "name" : "element",
| "count" : 5,
| "VALIDITY" : [ 1, 1, 1, 1, 1 ],
| "DATA" : [ 1, 2, 3, 4, 5 ]
| } ]
| }, {
| "name" : "b_arr",
| "count" : 4,
| "VALIDITY" : [ 1, 0, 1, 0 ],
| "OFFSET" : [ 0, 2, 2, 2, 2 ],
| "children" : [ {
| "name" : "element",
| "count" : 2,
| "VALIDITY" : [ 1, 1 ],
| "DATA" : [ 1, 2 ]
| } ]
| }, {
| "name" : "c_arr",
| "count" : 4,
| "VALIDITY" : [ 1, 1, 1, 1 ],
| "OFFSET" : [ 0, 2, 4, 4, 5 ],
| "children" : [ {
| "name" : "element",
| "count" : 5,
| "VALIDITY" : [ 1, 1, 1, 0, 1 ],
| "DATA" : [ 1, 2, 3, 0, 5 ]
| } ]
| }, {
| "name" : "d_arr",
| "count" : 4,
| "VALIDITY" : [ 1, 1, 1, 1 ],
| "OFFSET" : [ 0, 1, 3, 3, 4 ],
| "children" : [ {
| "name" : "element",
| "count" : 4,
| "VALIDITY" : [ 1, 1, 1, 1 ],
| "OFFSET" : [ 0, 2, 3, 3, 4 ],
| "children" : [ {
| "name" : "element",
| "count" : 4,
| "VALIDITY" : [ 1, 1, 1, 1 ],
| "DATA" : [ 1, 2, 3, 5 ]
| } ]
| } ]
| } ]
| } ]
|}
""".stripMargin
val aArr = Seq(Seq(1, 2), Seq(3, 4), Seq(), Seq(5))
val bArr = Seq(Some(Seq(1, 2)), None, Some(Seq()), None)
val cArr = Seq(Seq(Some(1), Some(2)), Seq(Some(3), None), Seq(), Seq(Some(5)))
val dArr = Seq(Seq(Seq(1, 2)), Seq(Seq(3), Seq()), Seq(), Seq(Seq(5)))
val df = aArr.zip(bArr).zip(cArr).zip(dArr).map {
case (((a, b), c), d) => (a, b, c, d)
}.toDF("a_arr", "b_arr", "c_arr", "d_arr")
collectAndValidate(df, json, "arrayData.json")
}
test("struct type conversion") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_struct",
| "nullable" : false,
| "type" : {
| "name" : "struct"
| },
| "children" : [ {
| "name" : "i",
| "nullable" : false,
| "type" : {
| "name" : "int",
| "bitWidth" : 32,
| "isSigned" : true
| },
| "children" : [ ]
| } ]
| }, {
| "name" : "b_struct",
| "nullable" : true,
| "type" : {
| "name" : "struct"
| },
| "children" : [ {
| "name" : "i",
| "nullable" : false,
| "type" : {
| "name" : "int",
| "bitWidth" : 32,
| "isSigned" : true
| },
| "children" : [ ]
| } ]
| }, {
| "name" : "c_struct",
| "nullable" : false,
| "type" : {
| "name" : "struct"
| },
| "children" : [ {
| "name" : "i",
| "nullable" : true,
| "type" : {
| "name" : "int",
| "bitWidth" : 32,
| "isSigned" : true
| },
| "children" : [ ]
| } ]
| }, {
| "name" : "d_struct",
| "nullable" : true,
| "type" : {
| "name" : "struct"
| },
| "children" : [ {
| "name" : "nested",
| "nullable" : true,
| "type" : {
| "name" : "struct"
| },
| "children" : [ {
| "name" : "i",
| "nullable" : true,
| "type" : {
| "name" : "int",
| "bitWidth" : 32,
| "isSigned" : true
| },
| "children" : [ ]
| } ]
| } ]
| } ]
| },
| "batches" : [ {
| "count" : 3,
| "columns" : [ {
| "name" : "a_struct",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 1 ],
| "children" : [ {
| "name" : "i",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 1 ],
| "DATA" : [ 1, 2, 3 ]
| } ]
| }, {
| "name" : "b_struct",
| "count" : 3,
| "VALIDITY" : [ 1, 0, 1 ],
| "children" : [ {
| "name" : "i",
| "count" : 3,
| "VALIDITY" : [ 1, 0, 1 ],
| "DATA" : [ 1, 2, 3 ]
| } ]
| }, {
| "name" : "c_struct",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 1 ],
| "children" : [ {
| "name" : "i",
| "count" : 3,
| "VALIDITY" : [ 1, 0, 1 ],
| "DATA" : [ 1, 2, 3 ]
| } ]
| }, {
| "name" : "d_struct",
| "count" : 3,
| "VALIDITY" : [ 1, 0, 1 ],
| "children" : [ {
| "name" : "nested",
| "count" : 3,
| "VALIDITY" : [ 1, 0, 0 ],
| "children" : [ {
| "name" : "i",
| "count" : 3,
| "VALIDITY" : [ 1, 0, 0 ],
| "DATA" : [ 1, 2, 0 ]
| } ]
| } ]
| } ]
| } ]
|}
""".stripMargin
val aStruct = Seq(Row(1), Row(2), Row(3))
val bStruct = Seq(Row(1), null, Row(3))
val cStruct = Seq(Row(1), Row(null), Row(3))
val dStruct = Seq(Row(Row(1)), null, Row(null))
val data = aStruct.zip(bStruct).zip(cStruct).zip(dStruct).map {
case (((a, b), c), d) => Row(a, b, c, d)
}
val rdd = sparkContext.parallelize(data)
val schema = new StructType()
.add("a_struct", new StructType().add("i", IntegerType, nullable = false), nullable = false)
.add("b_struct", new StructType().add("i", IntegerType, nullable = false), nullable = true)
.add("c_struct", new StructType().add("i", IntegerType, nullable = true), nullable = false)
.add("d_struct", new StructType().add("nested", new StructType().add("i", IntegerType)))
val df = spark.createDataFrame(rdd, schema)
collectAndValidate(df, json, "structData.json")
}
test("partitioned DataFrame") {
val json1 =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "b",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : false,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 3,
| "columns" : [ {
| "name" : "a",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 1 ],
| "DATA" : [ 1, 1, 2 ]
| }, {
| "name" : "b",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 1 ],
| "DATA" : [ 1, 2, 1 ]
| } ]
| } ]
|}
""".stripMargin
val json2 =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "b",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : false,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 3,
| "columns" : [ {
| "name" : "a",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 1 ],
| "DATA" : [ 2, 3, 3 ]
| }, {
| "name" : "b",
| "count" : 3,
| "VALIDITY" : [ 1, 1, 1 ],
| "DATA" : [ 2, 1, 2 ]
| } ]
| } ]
|}
""".stripMargin
val arrowBatches = testData2.toArrowBatchRdd.collect()
// NOTE: testData2 should have 2 partitions -> 2 arrow batches
assert(arrowBatches.length === 2)
val schema = testData2.schema
val tempFile1 = new File(tempDataPath, "testData2-ints-part1.json")
val tempFile2 = new File(tempDataPath, "testData2-ints-part2.json")
Files.write(json1, tempFile1, StandardCharsets.UTF_8)
Files.write(json2, tempFile2, StandardCharsets.UTF_8)
validateConversion(schema, arrowBatches(0), tempFile1)
validateConversion(schema, arrowBatches(1), tempFile2)
}
test("empty frame collect") {
val arrowBatches = spark.emptyDataFrame.toArrowBatchRdd.collect()
assert(arrowBatches.isEmpty)
val filteredDF = List[Int](1, 2, 3, 4, 5, 6).toDF("i")
val filteredArrowBatches = filteredDF.filter("i < 0").toArrowBatchRdd.collect()
assert(filteredArrowBatches.isEmpty)
}
test("empty partition collect") {
val emptyPart = spark.sparkContext.parallelize(Seq(1), 2).toDF("i")
val arrowBatches = emptyPart.toArrowBatchRdd.collect()
assert(arrowBatches.length === 1)
val allocator = new RootAllocator(Long.MaxValue)
val arrowRecordBatches = arrowBatches.map(ArrowConverters.loadBatch(_, allocator))
assert(arrowRecordBatches.head.getLength == 1)
arrowRecordBatches.foreach(_.close())
allocator.close()
}
test("max records in batch conf") {
val totalRecords = 10
val maxRecordsPerBatch = 3
spark.conf.set(SQLConf.ARROW_EXECUTION_MAX_RECORDS_PER_BATCH.key, maxRecordsPerBatch)
val df = spark.sparkContext.parallelize(1 to totalRecords, 2).toDF("i")
val arrowBatches = df.toArrowBatchRdd.collect()
assert(arrowBatches.length >= 4)
val allocator = new RootAllocator(Long.MaxValue)
val arrowRecordBatches = arrowBatches.map(ArrowConverters.loadBatch(_, allocator))
var recordCount = 0
arrowRecordBatches.foreach { batch =>
assert(batch.getLength > 0)
assert(batch.getLength <= maxRecordsPerBatch)
recordCount += batch.getLength
batch.close()
}
assert(recordCount == totalRecords)
allocator.close()
spark.conf.unset(SQLConf.ARROW_EXECUTION_MAX_RECORDS_PER_BATCH.key)
}
testQuietly("unsupported types") {
def runUnsupported(block: => Unit): Unit = {
val msg = intercept[UnsupportedOperationException] {
block
}
assert(msg.getMessage.contains("is not supported"))
}
runUnsupported { calenderIntervalData.toDF().toArrowBatchRdd.collect() }
}
test("test Arrow Validator") {
val json =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "a_i",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : false,
| "children" : [ ]
| }, {
| "name" : "b_i",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : true,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 6,
| "columns" : [ {
| "name" : "a_i",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1, -1, 2, -2, 2147483647, -2147483648 ]
| }, {
| "name" : "b_i",
| "count" : 6,
| "VALIDITY" : [ 1, 0, 0, 1, 0, 1 ],
| "DATA" : [ 1, 0, 0, -2, 0, -2147483648 ]
| } ]
| } ]
|}
""".stripMargin
val json_diff_col_order =
s"""
|{
| "schema" : {
| "fields" : [ {
| "name" : "b_i",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : true,
| "children" : [ ]
| }, {
| "name" : "a_i",
| "type" : {
| "name" : "int",
| "isSigned" : true,
| "bitWidth" : 32
| },
| "nullable" : false,
| "children" : [ ]
| } ]
| },
| "batches" : [ {
| "count" : 6,
| "columns" : [ {
| "name" : "a_i",
| "count" : 6,
| "VALIDITY" : [ 1, 1, 1, 1, 1, 1 ],
| "DATA" : [ 1, -1, 2, -2, 2147483647, -2147483648 ]
| }, {
| "name" : "b_i",
| "count" : 6,
| "VALIDITY" : [ 1, 0, 0, 1, 0, 1 ],
| "DATA" : [ 1, 0, 0, -2, 0, -2147483648 ]
| } ]
| } ]
|}
""".stripMargin
val a_i = List[Int](1, -1, 2, -2, 2147483647, -2147483648)
val b_i = List[Option[Int]](Some(1), None, None, Some(-2), None, Some(-2147483648))
val df = a_i.zip(b_i).toDF("a_i", "b_i")
// Different schema
intercept[IllegalArgumentException] {
collectAndValidate(df, json_diff_col_order, "validator_diff_schema.json")
}
// Different values
intercept[IllegalArgumentException] {
collectAndValidate(df.sort($"a_i".desc), json, "validator_diff_values.json")
}
}
test("roundtrip arrow batches") {
val inputRows = (0 until 9).map { i =>
InternalRow(i)
} :+ InternalRow(null)
val schema = StructType(Seq(StructField("int", IntegerType, nullable = true)))
val ctx = TaskContext.empty()
val batchIter = ArrowConverters.toBatchIterator(inputRows.toIterator, schema, 5, null, ctx)
val outputRowIter = ArrowConverters.fromBatchIterator(batchIter, schema, null, ctx)
var count = 0
outputRowIter.zipWithIndex.foreach { case (row, i) =>
if (i != 9) {
assert(row.getInt(0) == i)
} else {
assert(row.isNullAt(0))
}
count += 1
}
assert(count == inputRows.length)
}
test("ArrowBatchStreamWriter roundtrip") {
val inputRows = (0 until 9).map(InternalRow(_)) :+ InternalRow(null)
val schema = StructType(Seq(StructField("int", IntegerType, nullable = true)))
val ctx = TaskContext.empty()
val batchIter = ArrowConverters.toBatchIterator(inputRows.toIterator, schema, 5, null, ctx)
// Write batches to Arrow stream format as a byte array
val out = new ByteArrayOutputStream()
Utils.tryWithResource(new DataOutputStream(out)) { dataOut =>
val writer = new ArrowBatchStreamWriter(schema, dataOut, null)
writer.writeBatches(batchIter)
writer.end()
}
// Read Arrow stream into batches, then convert back to rows
val in = new ByteArrayReadableSeekableByteChannel(out.toByteArray)
val readBatches = ArrowConverters.getBatchesFromStream(in)
val outputRowIter = ArrowConverters.fromBatchIterator(readBatches, schema, null, ctx)
var count = 0
outputRowIter.zipWithIndex.foreach { case (row, i) =>
if (i != 9) {
assert(row.getInt(0) == i)
} else {
assert(row.isNullAt(0))
}
count += 1
}
assert(count == inputRows.length)
}
/** Test that a converted DataFrame to Arrow record batch equals batch read from JSON file */
private def collectAndValidate(
df: DataFrame, json: String, file: String, timeZoneId: String = null): Unit = {
// NOTE: coalesce to single partition because can only load 1 batch in validator
val batchBytes = df.coalesce(1).toArrowBatchRdd.collect().head
val tempFile = new File(tempDataPath, file)
Files.write(json, tempFile, StandardCharsets.UTF_8)
validateConversion(df.schema, batchBytes, tempFile, timeZoneId)
}
private def validateConversion(
sparkSchema: StructType,
batchBytes: Array[Byte],
jsonFile: File,
timeZoneId: String = null): Unit = {
val allocator = new RootAllocator(Long.MaxValue)
val jsonReader = new JsonFileReader(jsonFile, allocator)
val arrowSchema = ArrowUtils.toArrowSchema(sparkSchema, timeZoneId)
val jsonSchema = jsonReader.start()
Validator.compareSchemas(arrowSchema, jsonSchema)
val arrowRoot = VectorSchemaRoot.create(arrowSchema, allocator)
val vectorLoader = new VectorLoader(arrowRoot)
val arrowRecordBatch = ArrowConverters.loadBatch(batchBytes, allocator)
vectorLoader.load(arrowRecordBatch)
val jsonRoot = jsonReader.read()
Validator.compareVectorSchemaRoot(arrowRoot, jsonRoot)
jsonRoot.close()
jsonReader.close()
arrowRecordBatch.close()
arrowRoot.close()
allocator.close()
}
}
|
goldmedal/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/arrow/ArrowConvertersSuite.scala
|
Scala
|
apache-2.0
| 44,232 |
package com.ttpresentation.endpoint
import spray.routing.HttpService
import com.typesafe.scalalogging.slf4j.Logging
import spray.httpx.Json4sJacksonSupport
import scala.concurrent.ExecutionContext
import spray.http.MediaTypes._
import spray.http.StatusCodes._
import akka.actor.Actor
import com.ttpresentation.dao.UserDao
import com.ttpresentation.spraylib.LocalPathMatchers
import com.ttpresentation.mongo.MongoAuthSupport
import com.ttpresentation.model.User
/**
* Created by ctcarrier on 3/3/14.
*/
trait UserActor extends Actor with UserEndpoint {
val imageDirectoryDao: UserDao
def actorRefFactory = context
def receive = runRoute(userRoute)
}
trait UserEndpoint extends HttpService with Logging with Json4sJacksonSupport with LocalPathMatchers with MongoAuthSupport {
import ExecutionContext.Implicits.global
val directGetUser = get & authenticate(httpMongo())
val postUser = post & respondWithStatus(Created) & entity(as[User])
val userDao: UserDao
def userRoute =
respondWithMediaType(`application/json`) {
pathPrefix("users") {
directGetUser { user =>
complete {
user
}
} ~
postUser { user =>
detach() {complete {
userDao.save(user)
}}
}
}
}
}
|
ctcarrier/ttpresentation
|
src/main/scala/com/ttpresentation/endpoint/UserEndpoint.scala
|
Scala
|
mit
| 1,300 |
package com.brkyvz.spark.linalg
import com.holdenkarau.spark.testing.PerTestSparkContext
import org.scalatest.FunSuite
import org.apache.spark.mllib.linalg.{DenseMatrix, Matrices}
class MatricesSuite extends FunSuite with PerTestSparkContext {
private val a = Matrices.dense(2, 2, Array(1, 2, 3, 4))
private val b = new DenseMatrix(2, 2, Array(0, -2, 0, -2))
private val c = Matrices.sparse(2, 2, Array(0, 1, 1), Array(0), Array(1.0))
private val x = Matrices.sparse(3, 2, Array(0, 1, 2), Array(0, 2), Array(0.5, 2.0))
test("basic arithmetic") {
val buffer = new Array[Double](4)
val wrapper = new DenseMatrix(2, 2, buffer)
wrapper := a + b
assert(wrapper.values.toSeq === Seq(1.0, 0.0, 3.0, 2.0))
assert(buffer.toSeq === Seq(1.0, 0.0, 3.0, 2.0))
val buffer2 = new Array[Double](4)
(a + b).compute(Option(buffer2))
assert(buffer2.toSeq === Seq(1.0, 0.0, 3.0, 2.0))
wrapper := a * 2
assert(wrapper.values.toSeq === Seq(2.0, 4.0, 6.0, 8.0))
wrapper := a - c
assert(wrapper.values.toSeq === Seq(0.0, 2.0, 3.0, 4.0))
val d = b.copy
d += -2
assert(d.values.toSeq === Seq(-2.0, -4.0, -2.0, -4.0))
}
test("requires right buffer size") {
val wrongSizedBuffer = new Array[Double](5)
intercept[IllegalArgumentException]((a + b).compute(Option(wrongSizedBuffer)))
}
test("size mismatch throws error") {
intercept[IllegalArgumentException]((a + x).compute())
}
test("scalar op") {
val buffer = new Array[Double](4)
(a + 2).compute(Option(buffer))
assert(buffer.toSeq === Seq(3.0, 4.0, 5.0, 6.0))
(c + 2).compute(Option(buffer))
assert(buffer.toSeq === Seq(3.0, 2.0, 2.0, 2.0))
val sparseBuffer = new Array[Double](6)
(x * 3).compute(Option(sparseBuffer))
assert(sparseBuffer.toSeq === Seq(1.5, 0.0, 0.0, 0.0, 0.0, 6.0))
}
test("funcs") {
import com.brkyvz.spark.linalg.funcs._
val buffer = new Array[Double](4)
val buffer2 = new Array[Double](6)
pow(a, c).compute(Option(buffer))
assert(buffer.toSeq === Seq(1.0, 1.0, 1.0, 1.0))
val sparseBuffer = new Array[Double](6)
exp(x).compute(Option(sparseBuffer))
assert(sparseBuffer.toSeq ===
Seq(java.lang.Math.exp(0.5), 1.0, 1.0, 1.0, 1.0, java.lang.Math.exp(2.0)))
apply(a, c, (m: Double, n: Double) => m + n).compute(Option(buffer))
assert(buffer.toSeq === Seq(2.0, 2.0, 3.0, 4.0))
}
test("blas methods") {
var d = new DenseMatrixWrapper(2, 2, a.copy.toArray)
d += a * 3
val e = (a * 4).compute()
assert(d.asInstanceOf[DenseMatrix].values.toSeq === e.asInstanceOf[DenseMatrix].values.toSeq)
val A = DenseMatrix.eye(2)
A += c * a
assert(A.values.toSeq === Seq(2.0, 0.0, 3.0, 1.0))
val B = DenseMatrix.zeros(2, 2)
B := a * b
val firstVals = B.values.clone().toSeq
B := a * b
assert(B.values.toSeq === firstVals)
}
test("rdd methods") {
val rdd = sc.parallelize(Seq(a, b, c))
val Array(res1, res2, res3) =
rdd.map(v => (v + 2).compute().asInstanceOf[DenseMatrix]).collect()
assert(res1.values.toSeq === Seq(3.0, 4.0, 5.0, 6.0))
assert(res2.values.toSeq === Seq(2.0, 0.0, 2.0, 0.0))
assert(res3.values.toSeq === Seq(3.0, 2.0, 2.0, 2.0))
val Array(res4, res5, res6) = rdd.map(v => v + 2).map(_ - 1).collect()
assert(res4.compute().asInstanceOf[DenseMatrix].values.toSeq === Seq(2.0, 3.0, 4.0, 5.0))
assert(res5.compute().asInstanceOf[DenseMatrix].values.toSeq === Seq(1.0, -1.0, 1.0, -1.0))
assert(res6.compute().asInstanceOf[DenseMatrix].values.toSeq === Seq(2.0, 1.0, 1.0, 1.0))
val sum = rdd.aggregate(DenseMatrix.zeros(2, 2))(
seqOp = (base, element) => base += element,
combOp = (base1, base2) => base1 += base2
)
assert(sum.values.toSeq === Seq(2.0, 0.0, 3.0, 2.0))
val sum2 = rdd.aggregate(DenseMatrix.zeros(2, 2))(
seqOp = (base, element) => base += element * 2 - 1,
combOp = (base1, base2) => base1 += base2
)
assert(sum2.values.toSeq === Seq(1.0, -3.0, 3.0, 1.0))
}
}
|
brkyvz/lazy-linalg
|
src/test/scala/com/brkyvz/spark/linalg/MatricesSuite.scala
|
Scala
|
apache-2.0
| 4,052 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.app.nlp.parse
import cc.factorie.app.nlp.load._
import org.junit.Test
import org.junit.Before
import org.junit.Assert
class TestTransitionBasedParser {
val nullTokenString = "<NULL>"
val rootTokenString = "<ROOT>"
val testFileName = this.getClass.getResource("/parser-test-input").getPath()
var parser: TransitionBasedParser = _
@Before
def setUp() = {
parser = new TransitionBasedParser()
}
@Test
def testDepToken() = {
val numThreads = 1
/* This file contains just one sentence right now */
val testDoc = LoadOntonotes5.fromFilename(testFileName).head
val testSentences = testDoc.sentences
val parseDecisions = testSentences.map(s => parser.generateDecisions(Seq(s), parser.ParserConstants.TRAINING, numThreads))
/* Check that the relations between tokens are correct */
parseDecisions.map(_.last).zip(testSentences).foreach(ds => {
val parseTree = ds._2.attr[ParseTree]
println(s"Sentence: ${ds._2.tokens.map(_.string).mkString(" ")}")
val tokens = ds._1.state.sentenceTokens
tokens.takeRight(tokens.length - 2).foreach(tok => {
val tokTreeIdx = tok.thisIdx - 1
val thisHead = if (tok.hasHead) tok.head.depToken else null
val trueHead = parseTree.parent(tokTreeIdx)
if (trueHead == null || thisHead == null) {
// if one has no head then neither should, and this should be the root
if (thisHead != null) {
Assert.assertEquals(s"Head of token ``${tok.form}'' incorrect.", rootTokenString, thisHead.form)
Assert.assertEquals(s"Label of token ``${tok.form}'' incorrect.", "root", tok.head.label)
} else {
Assert.assertNotNull(s"Head of token ``${tok.form}'' incorrect.", thisHead)
}
} else {
// should be the same word
Assert.assertEquals(s"Head of token ``${tok.form}'' incorrect.", trueHead.string, thisHead.form)
// labels should be the same
Assert.assertEquals(s"Label of token ``${tok.form}'' incorrect.", parseTree.label(tokTreeIdx).categoryValue, tok.head.label)
// grandheads should be the same
val thisGrandHead = if (tok.hasGrandHead) tok.grandHead.depToken else null
val trueGrandHead = parseTree.parent(trueHead.positionInSentence)
if (trueGrandHead == null || thisGrandHead == null) {
// if one has no head then neither should, and this should be the root
if (thisGrandHead != null) {
Assert.assertEquals(s"Grandhead of token ``${tok.form}'' incorrect.", rootTokenString, thisGrandHead.form)
Assert.assertEquals(s"Label of grandhead of ``${tok.form}'' incorrect.", "root", thisHead.head.label)
} else {
Assert.assertNotNull(s"Grandhead of token ``${tok.form}'' incorrect.", thisGrandHead)
}
} else {
// should be the same word
Assert.assertEquals(s"Grandhead of token ``${tok.form}'' incorrect.", trueGrandHead.string, thisGrandHead.form)
// labels should be the same
Assert.assertEquals(s"Label of grandhead of ``${tok.form}'' incorrect.", parseTree.label(trueHead.positionInSentence).categoryValue, thisHead.head.label)
}
// leftmost dependents should be the same
val thisLeftmostDep = tok.leftmostDependent
val trueLeftmostDep = if (!parseTree.leftChildren(tokTreeIdx).isEmpty) parseTree.leftChildren(tokTreeIdx).head else null
if (thisLeftmostDep == null || trueLeftmostDep == null) {
// if one is null then they both should be
if (thisLeftmostDep != null)
Assert.assertEquals(s"Leftmost dependency of token ``${tok.form}'' incorrect.", nullTokenString, thisLeftmostDep.form)
else
Assert.assertNotNull(s"Leftmost dependency of token ``${tok.form}'' incorrect.", thisLeftmostDep)
} else {
// should be the same word
Assert.assertEquals(s"Leftmost dependency of token ``${tok.form}'' incorrect.", trueLeftmostDep.string, thisLeftmostDep.form)
// 2nd leftmost dependents should be the same
val thisLeftmostDep2 = tok.leftmostDependent2
val trueLeftmostDep2 = if (!trueLeftmostDep.parseLeftChildren.isEmpty) trueLeftmostDep.parseLeftChildren.head else null
if (thisLeftmostDep2 == null || trueLeftmostDep2 == null) {
// if one is null then they both should be
if (thisLeftmostDep != null)
Assert.assertEquals(s"2nd leftmost dependency of token ``${tok.form}'' incorrect.", nullTokenString, thisLeftmostDep2.form)
else
Assert.assertNotNull(s"2nd leftmost dependency of token ``${tok.form}'' incorrect.", thisLeftmostDep2)
} else {
// should be same word
Assert.assertEquals(s"2nd leftmost dependency of token ``${tok.form}'' incorrect.", trueLeftmostDep2.string, thisLeftmostDep2.form)
}
}
// rightmost dependents should be the same
val thisRightmostDep = tok.rightmostDependent
val trueRightmostDep = if (!parseTree.rightChildren(tokTreeIdx).isEmpty) parseTree.rightChildren(tokTreeIdx).last else null
if (thisRightmostDep == null || trueRightmostDep == null) {
// if one is null then they both should be
if (thisRightmostDep != null)
Assert.assertEquals(s"Rightmost dependency of token ``${tok.form}'' incorrect.", nullTokenString, thisRightmostDep.form)
else
Assert.assertNotNull(s"Rightmost dependency of token ``${tok.form}'' incorrect.", thisRightmostDep)
} else {
// should be the same word
Assert.assertEquals(s"Rightmost dependency of token ``${tok.form}'' incorrect.", trueRightmostDep.string, thisRightmostDep.form)
// 2nd leftmost dependents should be the same
val thisRightmostDep2 = tok.rightmostDependent2
val trueRightmostDep2 = if (!trueRightmostDep.parseRightChildren.isEmpty) trueRightmostDep.parseRightChildren.last else null
if (thisRightmostDep2 == null || trueRightmostDep2 == null) {
// if one is null then they both should be
if (thisRightmostDep2 != null)
Assert.assertEquals(s"2nd rightmost dependency of token ``${tok.form}'' incorrect.", nullTokenString, thisRightmostDep2.form)
else
Assert.assertNotNull(s"2nd rightmost dependency of token ``${tok.form}'' incorrect.", thisRightmostDep2)
} else {
// should be same word
Assert.assertEquals(s"2nd rightmost dependency of token ``${tok.form}'' incorrect.", trueRightmostDep2.string, thisRightmostDep2.form)
}
}
// left-nearest siblings should be the same
val thisLeftNearestSib = tok.leftNearestSibling
val trueParentIdx = parseTree.sentence(tokTreeIdx).parseParentIndex
val trueLeftNearestSib = {
var i = tokTreeIdx - 1
while (i >= 0 && parseTree.sentence(i).parseParentIndex != trueParentIdx) i -= 1
if (i == -1) null else parseTree.sentence(i)
}
if (trueLeftNearestSib == null || thisLeftNearestSib == null) {
// if one is null then they both should be
if (thisLeftNearestSib != null)
Assert.assertEquals(s"Left nearest sibling of token ``${tok.form}'' incorrect.", nullTokenString, thisLeftNearestSib.form)
else
Assert.assertNotNull(s"Left nearest sibling of token ``${tok.form}'' incorrect.", thisLeftNearestSib)
} else {
// should be same word
Assert.assertEquals(s"Left nearest sibling of token ``${tok.form}'' incorrect.", trueLeftNearestSib.string, thisLeftNearestSib.form)
}
// right-nearest siblings should be the same
val thisRightNearestSib = tok.rightNearestSibling
val trueRightNearestSib = {
var i = tokTreeIdx + 1
while (i < parseTree.sentence.size && parseTree.sentence(i).parseParentIndex != trueParentIdx) i += 1
if (i == parseTree.sentence.size) null else parseTree.sentence(i)
}
if (trueRightNearestSib == null || thisRightNearestSib == null) {
// if one is null then they both should be
if (thisRightNearestSib != null)
Assert.assertEquals(s"Right nearest sibling of token ``${tok.form}'' incorrect.", nullTokenString, thisRightNearestSib.form)
else
Assert.assertNotNull(s"Right nearest sibling of token ``${tok.form}'' incorrect.", thisRightNearestSib)
} else {
// should be same word
Assert.assertEquals(s"Right nearest sibling of token ``${tok.form}'' incorrect.", trueRightNearestSib.string, thisRightNearestSib.form)
}
}
})
})
/* Print out the features for the first sentence */
parseDecisions.head.foreach(decision => {
print(s"${ // convert decision to a nice verbose string (rather than ints)
val transition = decision.categoryValue.split(" ")
transition.take(2).map(x => parser.ParserConstants.getString(x.toInt)).mkString(" ") + " " + transition(2)
}; ")
println(s"feats: ${decision.features.domain.dimensionDomain.categories.zip(decision.features.value.toSeq).filter(_._2 == 1.0).map(_._1).mkString(" ")}")
println()
})
}
}
|
hlin117/factorie
|
src/test/scala/cc/factorie/app/nlp/parse/TestTransitionBasedParser.scala
|
Scala
|
apache-2.0
| 10,382 |
package org.broadinstitute.dsde.firecloud
import java.io.{File, FileNotFoundException, FileOutputStream, InputStream}
import java.net.{HttpURLConnection, URL}
import java.text.SimpleDateFormat
import java.util.zip.{ZipEntry, ZipException, ZipFile}
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.{HttpResponse, StatusCodes}
import com.typesafe.scalalogging.LazyLogging
import org.broadinstitute.dsde.firecloud.EntityService._
import org.broadinstitute.dsde.firecloud.FireCloudConfig.Rawls
import org.broadinstitute.dsde.firecloud.dataaccess.ImportServiceFiletypes.{FILETYPE_PFB, FILETYPE_RAWLS}
import org.broadinstitute.dsde.firecloud.dataaccess.{GoogleServicesDAO, ImportServiceDAO, RawlsDAO}
import org.broadinstitute.dsde.firecloud.model.ModelJsonProtocol._
import org.broadinstitute.dsde.firecloud.model.{ModelSchema, _}
import org.broadinstitute.dsde.firecloud.service.PerRequest.{PerRequestMessage, RequestComplete}
import org.broadinstitute.dsde.firecloud.service.TsvTypes.TsvType
import org.broadinstitute.dsde.firecloud.service.{TSVFileSupport, TsvTypes}
import org.broadinstitute.dsde.firecloud.utils.TSVLoadFile
import org.broadinstitute.dsde.rawls.model._
import org.broadinstitute.dsde.workbench.model.google.{GcsBucketName, GcsObjectName}
import spray.json.DefaultJsonProtocol._
import java.nio.channels.Channels
import java.nio.charset.StandardCharsets
import java.util.concurrent.atomic.AtomicLong
import scala.collection.JavaConverters._
import scala.concurrent.{ExecutionContext, Future}
import scala.io.Source
import scala.language.postfixOps
import scala.sys.process._
import scala.util.{Failure, Success, Try}
object EntityService {
def constructor(app: Application)(modelSchema: ModelSchema)(implicit executionContext: ExecutionContext) =
new EntityService(app.rawlsDAO, app.importServiceDAO, app.googleServicesDAO, modelSchema)
def colNamesToAttributeNames(headers: Seq[String], requiredAttributes: Map[String, String]): Seq[(String, Option[String])] = {
headers.tail map { colName => (colName, requiredAttributes.get(colName))}
}
def backwardsCompatStripIdSuffixes(tsvLoadFile: TSVLoadFile, entityType: String, modelSchema: ModelSchema): TSVLoadFile = {
modelSchema.getTypeSchema(entityType) match {
case Failure(_) => tsvLoadFile // the failure will be handled during parsing
case Success(metaData) =>
val newHeaders = tsvLoadFile.headers.map { header =>
val headerSansId = header.stripSuffix("_id")
if (metaData.requiredAttributes.keySet.contains(headerSansId) || metaData.memberType.contains(headerSansId)) {
headerSansId
} else {
header
}
}
tsvLoadFile.copy(headers = newHeaders)
}
}
//returns (contents of participants.tsv, contents of samples.tsv)
def unzipTSVs(bagName: String, zipFile: ZipFile)(op: (Option[String], Option[String]) => Future[PerRequestMessage]): Future[PerRequestMessage] = {
val zipEntries = zipFile.entries.asScala
val rand = java.util.UUID.randomUUID.toString.take(8)
val participantsTmp = File.createTempFile(s"$rand-participants", ".tsv")
val samplesTmp = File.createTempFile(s"$rand-samples", ".tsv")
val unzippedFiles = zipEntries.foldLeft((None: Option[String], None: Option[String])){ (acc: (Option[String], Option[String]), ent: ZipEntry) =>
if(!ent.isDirectory && (ent.getName.contains("/participants.tsv") || ent.getName.equals("participants.tsv"))) {
acc._1 match {
case Some(_) => throw new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.BadRequest, s"More than one participants.tsv file found in bagit $bagName"))
case None =>
unzipSingleFile(zipFile.getInputStream(ent), participantsTmp)
(Some(participantsTmp.getPath), acc._2)
}
} else if(!ent.isDirectory && (ent.getName.contains("/samples.tsv") || ent.getName.equals("samples.tsv"))) {
acc._2 match {
case Some(_) => throw new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.BadRequest, s"More than one samples.tsv file found in bagit $bagName"))
case None =>
unzipSingleFile (zipFile.getInputStream (ent), samplesTmp)
(acc._1, Some (samplesTmp.getPath) )
}
} else {
acc
}
}
try {
op(unzippedFiles._1.map(f => Source.fromFile(f).mkString), unzippedFiles._2.map(f => Source.fromFile(f).mkString))
} catch {
case e: Exception => throw e
} finally {
participantsTmp.delete
samplesTmp.delete
}
}
private def unzipSingleFile(zis: InputStream, fileTarget: File): Unit = {
val fout = new FileOutputStream(fileTarget)
val buffer = new Array[Byte](1024)
Stream.continually(zis.read(buffer)).takeWhile(_ != -1).foreach(fout.write(buffer, 0, _))
}
}
class EntityService(rawlsDAO: RawlsDAO, importServiceDAO: ImportServiceDAO, googleServicesDAO: GoogleServicesDAO, modelSchema: ModelSchema)(implicit val executionContext: ExecutionContext)
extends TSVFileSupport with LazyLogging {
val format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZZ")
/**
* Returns the plural form of the entity type.
* Bails with a 400 Bad Request if the entity type is unknown to the schema and we are using firecloud model
* If using flexible model, just appends an 's' */
private def withPlural(entityType: String)(op: String => Future[PerRequestMessage]): Future[PerRequestMessage] = {
modelSchema.getPlural(entityType) match {
case Failure(regret) => Future(RequestCompleteWithErrorReport(BadRequest, regret.getMessage))
case Success(plural) => op(plural)
}
}
/**
* Verifies that the provided list of headers includes all attributes required by the schema for this entity type.
* Bails with a 400 Bad Request if the entity type is unknown or if some attributes are missing.
* Returns the list of required attributes if all is well. */
private def withRequiredAttributes(entityType: String, headers: Seq[String])(op: Map[String, String] => Future[PerRequestMessage]):Future[PerRequestMessage] = {
modelSchema.getRequiredAttributes(entityType) match {
case Failure(regret) => Future(RequestCompleteWithErrorReport(BadRequest, regret.getMessage))
case Success(requiredAttributes) =>
if( !requiredAttributes.keySet.subsetOf(headers.toSet) ) {
Future( RequestCompleteWithErrorReport(BadRequest,
"TSV is missing required attributes: " + (requiredAttributes.keySet -- headers).mkString(", ")) )
} else {
op(requiredAttributes)
}
}
}
/**
* Imports collection members into a collection type entity. */
private def importMembershipTSV(
workspaceNamespace: String, workspaceName: String, tsv: TSVLoadFile, entityType: String, userInfo: UserInfo, isAsync: Boolean ): Future[PerRequestMessage] = {
withMemberCollectionType(entityType, modelSchema) { memberTypeOpt =>
validateMembershipTSV(tsv, memberTypeOpt) {
withPlural(memberTypeOpt.get) { memberPlural =>
val rawlsCalls = (tsv.tsvData groupBy(_.head) map { case (entityName, rows) =>
val ops = rows map { row =>
//row(1) is the entity to add as a member of the entity in row.head
val attrRef = AttributeEntityReference(memberTypeOpt.get,row(1))
Map(addListMemberOperation,"attributeListName"->AttributeString(memberPlural),"newMember"->attrRef)
}
EntityUpdateDefinition(entityName,entityType,ops)
}).toSeq
maybeAsyncBatchUpdate(isAsync, true, workspaceNamespace, workspaceName, entityType, rawlsCalls, userInfo)
}
}
}
}
/**
* Creates or updates entities from an entity TSV. Required attributes must exist in column headers. */
private def importEntityTSV(
workspaceNamespace: String, workspaceName: String, tsv: TSVLoadFile, entityType: String, userInfo: UserInfo, isAsync: Boolean ): Future[PerRequestMessage] = {
//we're setting attributes on a bunch of entities
checkFirstColumnDistinct(tsv) {
withMemberCollectionType(entityType, modelSchema) { memberTypeOpt =>
checkNoCollectionMemberAttribute(tsv, memberTypeOpt) {
withRequiredAttributes(entityType, tsv.headers) { requiredAttributes =>
val colInfo = colNamesToAttributeNames(tsv.headers, requiredAttributes)
val rawlsCalls = tsv.tsvData.map(row => setAttributesOnEntity(entityType, memberTypeOpt, row, colInfo, modelSchema))
maybeAsyncBatchUpdate(isAsync, true, workspaceNamespace, workspaceName, entityType, rawlsCalls, userInfo)
}
}
}
}
}
/**
* Updates existing entities from TSV. All entities must already exist. */
private def importUpdateTSV(
workspaceNamespace: String, workspaceName: String, tsv: TSVLoadFile, entityType: String, userInfo: UserInfo, isAsync: Boolean ): Future[PerRequestMessage] = {
//we're setting attributes on a bunch of entities
checkFirstColumnDistinct(tsv) {
withMemberCollectionType(entityType, modelSchema) { memberTypeOpt =>
checkNoCollectionMemberAttribute(tsv, memberTypeOpt) {
modelSchema.getRequiredAttributes(entityType) match {
//Required attributes aren't required to be headers in update TSVs - they should already have been
//defined when the entity was created. But we still need the type information if the headers do exist.
case Failure(regret) => Future(RequestCompleteWithErrorReport(BadRequest, regret.getMessage))
case Success(requiredAttributes) =>
val colInfo = colNamesToAttributeNames(tsv.headers, requiredAttributes)
val rawlsCalls = tsv.tsvData.map(row => setAttributesOnEntity(entityType, memberTypeOpt, row, colInfo, modelSchema))
maybeAsyncBatchUpdate(isAsync, false, workspaceNamespace, workspaceName, entityType, rawlsCalls, userInfo)
}
}
}
}
}
private def maybeAsyncBatchUpdate(isAsync: Boolean, isUpsert: Boolean, workspaceNamespace: String, workspaceName: String,
entityType: String, rawlsCalls: Seq[EntityUpdateDefinition], userInfo: UserInfo): Future[PerRequestMessage] = {
if (isAsync) {
asyncImport(workspaceNamespace, workspaceName, isUpsert, rawlsCalls, userInfo).recover {
case e: Exception =>
RequestCompleteWithErrorReport(InternalServerError, "Unexpected error during async TSV import", e)
}
} else {
val rawlsResponse = if (isUpsert) {
rawlsDAO.batchUpsertEntities(workspaceNamespace, workspaceName, entityType, rawlsCalls)(userInfo)
} else {
rawlsDAO.batchUpdateEntities(workspaceNamespace, workspaceName, entityType, rawlsCalls)(userInfo)
}
handleBatchRawlsResponse(entityType, rawlsResponse)
}
}
private def asyncImport(workspaceNamespace: String, workspaceName: String, isUpsert: Boolean,
rawlsCalls: Seq[EntityUpdateDefinition], userInfo: UserInfo): Future[PerRequestMessage] = {
import spray.json._
// generate unique name for the file-to-upload
val fileToWrite = GcsObjectName(s"incoming/${java.util.UUID.randomUUID()}.json")
val bucketToWrite = GcsBucketName(FireCloudConfig.ImportService.bucket)
// write rawlsCalls to import service's bucket
val dataBytes = rawlsCalls.toJson.prettyPrint.getBytes(StandardCharsets.UTF_8)
val insertedObject = googleServicesDAO.writeObjectAsRawlsSA(bucketToWrite, fileToWrite, dataBytes)
val gcsPath = s"gs://${insertedObject.bucketName.value}/${insertedObject.objectName.value}"
val importRequest = AsyncImportRequest(gcsPath, FILETYPE_RAWLS)
importServiceDAO.importJob(workspaceNamespace, workspaceName, importRequest, isUpsert)(userInfo)
}
private def handleBatchRawlsResponse(entityType: String, response: Future[HttpResponse]): Future[PerRequestMessage] = {
response map { response =>
response.status match {
case NoContent =>
logger.debug("OK response")
RequestComplete(OK, entityType)
case _ =>
// Bubble up all other unmarshallable responses
logger.warn("Unanticipated response: " + response.status.defaultMessage)
RequestComplete(response)
}
} recover {
case e: Throwable => RequestCompleteWithErrorReport(InternalServerError, "Service API call failed", e)
}
}
private def importEntitiesFromTSVLoadFile(workspaceNamespace: String, workspaceName: String, tsv: TSVLoadFile, tsvType: TsvType, entityType: String, userInfo: UserInfo, isAsync: Boolean): Future[PerRequestMessage] = {
tsvType match {
case TsvTypes.MEMBERSHIP => importMembershipTSV(workspaceNamespace, workspaceName, tsv, entityType, userInfo, isAsync)
case TsvTypes.ENTITY => importEntityTSV(workspaceNamespace, workspaceName, tsv, entityType, userInfo, isAsync)
case TsvTypes.UPDATE => importUpdateTSV(workspaceNamespace, workspaceName, tsv, entityType, userInfo, isAsync)
case _ => Future(RequestCompleteWithErrorReport(BadRequest, "Invalid TSV type.")) //We should never get to this case
}
}
/**
* Determines the TSV type from the first column header and routes it to the correct import function. */
def importEntitiesFromTSV(workspaceNamespace: String, workspaceName: String, tsvString: String, userInfo: UserInfo, isAsync: Boolean = false): Future[PerRequestMessage] = {
def stripEntityType(entityTypeString: String): String = {
val entityType = entityTypeString.stripSuffix("_id")
if (entityType == entityTypeString)
throw new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.BadRequest, "Invalid first column header, entity type should end in _id"))
entityType
}
withTSVFile(tsvString) { tsv =>
val (tsvType, entityType) = tsv.firstColumnHeader.split(":") match {
case Array(entityTypeString) => (TsvTypes.ENTITY, stripEntityType(entityTypeString))
case Array(tsvTypeString, entityTypeString) =>
val tsvType = Try(TsvTypes.withName(tsvTypeString)) match {
case Success(t) => t
case Failure(err) => throw new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.BadRequest, err.toString))
}
(tsvType, stripEntityType(entityTypeString))
case _ => throw new FireCloudExceptionWithErrorReport(errorReport = ErrorReport(StatusCodes.BadRequest, "Invalid first column header, should look like tsvType:entity_type_id"))
}
val strippedTsv = if (modelSchema.supportsBackwardsCompatibleIds) {
backwardsCompatStripIdSuffixes(tsv, entityType, modelSchema)
} else {
tsv
}
importEntitiesFromTSVLoadFile(workspaceNamespace, workspaceName, strippedTsv, tsvType, entityType, userInfo, isAsync)
}
}
def importBagit(workspaceNamespace: String, workspaceName: String, bagitRq: BagitImportRequest, userInfo: UserInfo): Future[PerRequestMessage] = {
if(bagitRq.format != "TSV") {
Future.successful(RequestCompleteWithErrorReport(StatusCodes.BadRequest, "Invalid format; for now, you must place the string \\"TSV\\" here"))
} else {
//Java URL handles http, https, ftp, file, and jar protocols.
//We're only OK with https to avoid MITM attacks.
val bagitURL = new URL(bagitRq.bagitURL.replace(" ", "%20"))
val acceptableProtocols = Seq("https") //for when we inevitably change our mind and need to support others
if (!acceptableProtocols.contains(bagitURL.getProtocol)) {
Future.successful(RequestCompleteWithErrorReport(StatusCodes.BadRequest, "Invalid bagitURL protocol: must be https only"))
} else {
val rand = java.util.UUID.randomUUID.toString.take(8)
val bagItFile = File.createTempFile(s"$rand-samples", ".tsv")
var bytesDownloaded = new AtomicLong(-1) // careful, this is a var
try {
val conn = bagitURL.openConnection()
val length = conn.getContentLength
conn.asInstanceOf[HttpURLConnection].disconnect()
if (length == 0) {
Future.successful(RequestCompleteWithErrorReport(StatusCodes.BadRequest, s"BDBag has content-length 0"))
} else if (length > Rawls.entityBagitMaximumSize) {
Future.successful(RequestCompleteWithErrorReport(StatusCodes.BadRequest, s"BDBag size is too large."))
} else {
// download the file
val readFromBagit = Channels.newChannel(bagitURL.openStream())
val writeToTemp = new FileOutputStream(bagItFile)
try {
bytesDownloaded.set(writeToTemp.getChannel.transferFrom(readFromBagit, 0, length))
} finally {
readFromBagit.close()
writeToTemp.close()
}
val zipFile = new ZipFile(bagItFile.getAbsolutePath)
if (!zipFile.entries().hasMoreElements) {
Future(RequestCompleteWithErrorReport(StatusCodes.BadRequest, s"BDBag has no entries."))
} else {
//make two big strings containing the participants and samples TSVs
//if i could turn back time this would use streams to save memory, but hopefully this will all go away when entity service comes along
unzipTSVs(bagitRq.bagitURL, zipFile) { (participantsStr, samplesStr) =>
(participantsStr, samplesStr) match {
case (None, None) =>
Future.successful(RequestCompleteWithErrorReport(StatusCodes.BadRequest, "You must have either (or both) participants.tsv and samples.tsv in the zip file"))
case _ =>
for {
// This should vomit back errors from rawls.
participantResult <- participantsStr.map(ps => importEntitiesFromTSV(workspaceNamespace, workspaceName, ps, userInfo)).getOrElse(Future.successful(RequestComplete(OK)))
sampleResult <- samplesStr.map(ss => importEntitiesFromTSV(workspaceNamespace, workspaceName, ss, userInfo)).getOrElse(Future.successful(RequestComplete(OK)))
} yield {
participantResult match {
case RequestComplete((OK, _)) => sampleResult
case _ => participantResult
}
}
}
}
}
}
} catch {
case _:FileNotFoundException =>
Future.successful(RequestCompleteWithErrorReport(StatusCodes.NotFound, s"BDBag ${bagitRq.bagitURL} was not found."))
case ze:ZipException =>
logger.info(s"ZipException: ${ze.getMessage} - ${bagItFile.getAbsolutePath} has length ${bagItFile.length}. " +
s"We originally downloaded $bytesDownloaded bytes.")
Future.successful(RequestCompleteWithErrorReport(StatusCodes.BadRequest, s"Problem with BDBag: ${ze.getMessage}"))
case e: Exception =>
throw e
} finally {
bagItFile.delete()
}
}
}
}
def importJob(workspaceNamespace: String, workspaceName: String, importRequest: AsyncImportRequest, userInfo: UserInfo): Future[PerRequestMessage] = {
// validate that filetype exists in the importRequest
if (importRequest.filetype.isEmpty)
throw new FireCloudExceptionWithErrorReport(ErrorReport(BadRequest, "filetype must be specified"))
importServiceDAO.importJob(workspaceNamespace, workspaceName, importRequest, isUpsert = true)(userInfo)
}
def getEntitiesWithType(workspaceNamespace: String, workspaceName: String, userInfo: UserInfo): Future[PerRequestMessage] = {
rawlsDAO.getEntityTypes(workspaceNamespace, workspaceName)(userInfo).flatMap { entityTypeResponse =>
val entityTypes = entityTypeResponse.keys.toList
val entitiesForTypes = Future.traverse(entityTypes) { entityType =>
rawlsDAO.fetchAllEntitiesOfType(workspaceNamespace, workspaceName, entityType)(userInfo)
}
entitiesForTypes.map { result =>
RequestComplete(OK, result.flatten)
}
}
}
}
|
broadinstitute/firecloud-orchestration
|
src/main/scala/org/broadinstitute/dsde/firecloud/EntityService.scala
|
Scala
|
bsd-3-clause
| 20,370 |
package is.hail.types.physical.stypes.interfaces
import is.hail.annotations.Region
import is.hail.asm4s.{Settable, TypeInfo, Value}
import is.hail.expr.ir.EmitCodeBuilder
import is.hail.expr.ir.streams.StreamProducer
import is.hail.types.physical.PType
import is.hail.types.physical.stypes._
import is.hail.types.virtual.{TStream, Type}
import is.hail.types.{RIterable, TypeWithRequiredness}
final case class SStream(elementEmitType: EmitType) extends SType {
def elementType: SType = elementEmitType.st
override def _coerceOrCopy(cb: EmitCodeBuilder, region: Value[Region], value: SValue, deepCopy: Boolean): SValue = {
if (deepCopy) throw new NotImplementedError()
assert(value.st == this)
value
}
override def settableTupleTypes(): IndexedSeq[TypeInfo[_]] = throw new NotImplementedError()
override def fromSettables(settables: IndexedSeq[Settable[_]]): SSettable = throw new NotImplementedError()
override def fromValues(values: IndexedSeq[Value[_]]): SValue = throw new NotImplementedError()
override def storageType(): PType = throw new NotImplementedError()
override def copiedType: SType = throw new NotImplementedError()
override def containsPointers: Boolean = throw new NotImplementedError()
override def virtualType: Type = TStream(elementType.virtualType)
override def castRename(t: Type): SType = throw new UnsupportedOperationException("rename on stream")
override def _typeWithRequiredness: TypeWithRequiredness = RIterable(elementEmitType.typeWithRequiredness.r)
}
object SStreamValue{
def apply(producer: StreamProducer): SStreamValue = SStreamValue(SStream(producer.element.emitType), producer)
}
final case class SStreamValue(st: SStream, producer: StreamProducer) extends SUnrealizableValue {
def valueTuple: IndexedSeq[Value[_]] = throw new NotImplementedError()
}
|
hail-is/hail
|
hail/src/main/scala/is/hail/types/physical/stypes/interfaces/SStream.scala
|
Scala
|
mit
| 1,842 |
package com.meteorcode.pathway.io
import org.scalactic.{AbstractStringUniformity, Uniformity}
import scala.language.postfixOps
/**
* Scalactic normalizations for strings that represent paths
*
* Created by hawk on 2/2/16.
*/
trait PathNormalizations {
/**
* Produces a <code>Uniformity[String]</code> whose <code>normalized</code>
* method removes the trailing slash
*
* @return a <code>Uniformity[String]</code> that normalizes by
* pruning the trailing slash
*/
val trailingSlashNormed: Uniformity[String]
= new AbstractStringUniformity {
def normalized(s: String): String
= s withoutTrailingSlash
override def toString: String = "trailingSlashNormed"
}
val extensionTrimmed: Uniformity[String]
= new AbstractStringUniformity {
def normalized(s: String): String
= s split '.' dropRight 1 mkString "."
override def toString: String = "extensionTrimmed"
}
}
|
MeteorCode/Pathway
|
src/main/scala/com/meteorcode/pathway/io/PathNormalizations.scala
|
Scala
|
mit
| 978 |
package org.scalaide.ui.internal.editor.decorators.indentguide
import org.eclipse.jface.preference.PreferenceConverter
import org.eclipse.jface.text.source.ISourceViewer
import org.eclipse.swt.SWT
import org.eclipse.swt.events.PaintEvent
import org.eclipse.swt.graphics.Color
import org.eclipse.swt.graphics.GC
import org.eclipse.swt.widgets.Display
import org.scalaide.ui.internal.editor.decorators.EditorPainter
import org.scalaide.ui.internal.preferences.EditorPreferencePage
/**
* Contains the UI-related code of the indent guide component.
*
* It tries to paint the guides only when necessary. This means:
* - Only the area that is shown in the editor obtains guide markers. This is the
* case when a new file is opened, when the editor loses and retrieves back the
* focus, when it is saved or when scrolling happens.
* - On text change, only the current line is redrawn.
*
* This component watches the preference store to find out whether its configuration
* has changed.
*/
class IndentGuidePainter(viewer: ISourceViewer)
extends EditorPainter(viewer, EditorPreferencePage.INDENT_GUIDE_ENABLE)
with IndentGuideGenerator {
private object config {
val LineStyle = SWT.LINE_DOT
val LineWidth = 1
/**
* Offset to move the guides to the right. This achieves that the guide
* and the caret do not completely overlay
*/
val GuideShift = 2
}
/** The width of a space in pixel */
private val spaceWidth = withGC { _.getAdvanceWidth(' ') }
private var color: Color = _
override def paintByReason(reason: Int): Unit = {}
override def paintByEvent(e: PaintEvent): Unit = {
val (y, h, gc) = (e.y, e.height, e.gc)
val startLine = widget.getLineIndex(y)
val endLine = widget.getLineIndex(y + h - 1)
gc.setLineStyle(config.LineStyle)
gc.setLineWidth(config.LineWidth)
gc.setForeground(color)
def drawLine(guide: Guide) = {
val p = widget.getLocationAtOffset(widget.getOffsetAtLine(guide.line))
val x = p.x + guide.column * spaceWidth + config.GuideShift
val h = widget.getLineHeight(guide.line)
gc.drawLine(x, p.y, x, p.y + h)
}
guidesOfRange(startLine, endLine) foreach drawLine
}
override def dispose(): Unit = {
if (color != null)
color.dispose()
}
override def textOfLine(line: Int): String = widget.getLine(line)
override def lineCount: Int = widget.getLineCount()
override def indentWidth: Int = widget.getTabs()
override def loadPreferences(): Unit = {
val rgb = PreferenceConverter.getColor(store, EditorPreferencePage.INDENT_GUIDE_COLOR)
if (color != null)
color.dispose()
color = new Color(Display.getCurrent(), rgb)
}
private def withGC[A](f: GC => A): A = {
val gc = new GC(widget)
val res = f(gc)
gc.dispose()
res
}
}
/**
* Contains the UI-less logic of the indent guide component.
*
* The algorithm used to calculate the needed indent guides is based on heuristics
* because there are some ambigous cases which can only be determined
* correctly by anlayzing the whole file semantically. Because this has to run
* in the UI-Thread a semantic analysis is not an option.
*
* These are the main points for the algorithm:
* - When the line is not empty indent until non whitespace text is found, but
* stop one indent width before.
* - When the line is empty is has to found out if the line is inside a block
* (e.g. a function) or not (e.g. a class body). In the latter case indent
* guides should never occur.
* - There exists another case where no guidance should happen. In
*
* def f(i: Int) =
* 0
*
* the rhs should be guided, but not the line after. One way to detect this
* case is to use a heuristic that checks if a rhs ends with a brace or similar
* symbols and if this is not the case all lines with whitespace that occur
* after the end of the rhs (which is the last line that contains non whitespace
* text) should not be guided.
* - When indentation in a single line changes it could happen that the guides of
* some other lines which contain only whitespace have to be invalidated. One
* possible case is
*
* def f: Unit = {
*
* def g = 0
* }
*
* where `g` is on the wrong indentaton depth. After increasing its indentation,
* the guides of the line before have to be renewed as well.
* - Multi line comments are guided as well.
* - The first character of each line doesn't get a guide.
*/
trait IndentGuideGenerator {
/** The first index of `line` is 0, `column` represents the number of whitespace */
case class Guide(line: Int, column: Int)
/** This assumes that the first line has index 0 */
def textOfLine(line: Int): String
/** The number of lines of a document */
def lineCount: Int
/** The number of characters of one indent level */
def indentWidth: Int
def guidesOfRange(startLine: Int, endLine: Int): Seq[Guide] = {
/* indentation depth in number of characters */
def indentDepth(text: String) = {
val (sum, _) = text.takeWhile(c => c == ' ' || c == '\t').foldLeft((0, 0)) {
case ((sum, len), c) =>
val reminder = indentWidth - len % indentWidth
if (c == ' ') (sum + 1, len) else (sum + reminder, len + reminder)
}
sum
}
def decreaseFrom(line: Int) =
Iterator.iterate(line)(_ - 1).takeWhile(_ > 0)
def increaseFrom(line: Int) =
Iterator.iterate(line)(_ + 1).takeWhile(_ < lineCount)
def iterate(iter: Iterator[Int], f: Int => Seq[Guide])(p: Int => Boolean) =
iter.find(p).fold(Seq[Guide]())(f)
def calcGuides(line: Int) = {
val startLineDepth = indentDepth(textOfLine(line - 1))
def calcGuides(endLine: Int) = {
val endLineDepth = indentDepth(textOfLine(endLine))
def isProbablyClosingBlock =
textOfLine(endLine).matches("[ \t]*[})\\]].*")
def guidesOfRange(end: Int) =
for (line <- line to endLine - 1; i <- indentWidth to end by indentWidth)
yield Guide(line, i)
def forNextDepth =
guidesOfRange(startLineDepth)
def forSameDepth =
guidesOfRange(endLineDepth - (if (isProbablyClosingBlock) 0 else indentWidth))
if (startLineDepth < endLineDepth)
forNextDepth
else if (endLineDepth > 0)
forSameDepth
else
Nil
}
if (startLineDepth == 0)
Nil
else
iterate(increaseFrom(line), calcGuides) { line =>
textOfLine(line).trim().nonEmpty
}
}
def guidesForNonEmptyLine(line: Int, text: String) =
for (i <- indentWidth until indentDepth(text) by indentWidth)
yield Guide(line, i)
def guidesForEmptyLines(line: Int) =
iterate(decreaseFrom(line), calcGuides) { line =>
textOfLine(line - 1).trim().nonEmpty
}
def guidesForLine(line: Int) = {
val text = textOfLine(line)
if (text.trim().nonEmpty)
guidesForNonEmptyLine(line, text)
else
guidesForEmptyLines(line)
}
startLine to endLine flatMap guidesForLine
}
}
|
romanowski/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/ui/internal/editor/decorators/indentguide/IndentGuidePainter.scala
|
Scala
|
bsd-3-clause
| 7,146 |
package memnets.model
import scala.collection.mutable.ArrayBuffer
trait GoalHandler {
def goalStart(g: Goal): Unit
def goalOver(g: Goal): Unit
def goalGroupOver(grp: Goals): Unit
}
trait Goals extends IndexedSeq[Goal] {
def +=(g: Goal): Unit
/**
* subclass to change bonus points default.
* bonus points only applied if win based on reward
*/
def bonusCalc(t: Trial): Int = 0
def bonusMessage: String = "?"
def next(gh: GoalHandler): Unit
def goalsLeft: Int = count(g => !g.isCompleted && g.isGood)
def reset(): Unit = {
for (g <- this) g.reset()
}
def reward: Int = filter(_.isGood.get).map(_.reward).sum
def startGoals: Iterable[Goal]
def tick(te: Tick, gh: GoalHandler): Unit
import collection.JavaConverters._
def getGoals(): java.util.List[Goal] = this.asJava
override def toString: String = s"GoalGroup[goals: ${this.mkString(",")}]"
}
abstract class GoalsBase extends Goals {
protected val _goals = ArrayBuffer[Goal]()
def +=(g: Goal): Unit = { _goals += g }
def length: Int = _goals.length
def apply(i: Int): Goal = _goals(i)
}
/** all goals start at the same time. can complete in any order */
class GoalGroup extends GoalsBase {
def next(gh: GoalHandler): Unit = {}
def startGoals: Iterable[Goal] = this
def tick(te: Tick, gh: GoalHandler): Unit = {
val len = _goals.length
var i = 0
while (i < len) {
val g = _goals(i)
if (!g.isCompleted) g.tick(te, gh)
i += 1
}
}
}
/**
* goals that must be completed in order for next to become active
* @param time limit in ticks to complete this goal
*/
class GoalSeq(val time: Int = 120 s) extends GoalsBase {
protected var _activeIndex = 0
protected var _activeTime = 0
/** ticks for current active goal */
def ticks = _activeTime
def next(gh: GoalHandler): Unit = {
_activeIndex += 1
_activeTime = 0
if (_activeIndex == _goals.length)
gh.goalGroupOver(this)
else
gh.goalStart(apply(_activeIndex))
}
override def reset(): Unit = {
super.reset()
_activeIndex = 0
_activeTime = 0
}
def startGoals = this.take(1)
def tick(te: Tick, gh: GoalHandler): Unit = {
if (_activeIndex < _goals.length) {
val g = _goals(_activeIndex)
g.tick(te, gh)
_activeTime += 1
if (_activeTime >= time)
gh.goalOver(g)
}
}
}
|
MemoryNetworks/memnets
|
api/src/main/scala/memnets/model/Goals.scala
|
Scala
|
apache-2.0
| 2,361 |
package net.danielkza.http2.stream
import scala.collection.immutable
import akka.stream._
import akka.stream.stage._
import akka.http.scaladsl.model.HttpEntity.{LastChunk, ChunkStreamPart, Chunk}
import net.danielkza.http2.protocol.{HTTP2Error, Frame}
import net.danielkza.http2.protocol.Frame.{Data, Headers}
import net.danielkza.http2.protocol.HTTP2Error.UnacceptableFrameError
class ChunkedDataDecodeStage(val trailers: Boolean = false)
extends GraphStage[FanOutShape2[Frame, ChunkStreamPart, Headers]]
{
val in: Inlet[Frame] = Inlet[Frame]("ChunkedDataDecodeStage.in")
val out0: Outlet[ChunkStreamPart] = Outlet[ChunkStreamPart]("ChunkedDataDecodeStage.out0")
val out1: Outlet[Headers] = Outlet[Headers]("ChunkedDataDecodeStage.out1")
override val shape = new FanOutShape2(in, out0, out1)
override def createLogic(inheritedAttributes: Attributes): GraphStageLogic = new GraphStageLogic(shape) { logic =>
private var completed = false
setHandler(in, new InHandler {
override def onPush(): Unit = {
grab(in) match {
case d: Data if trailers && d.endStream =>
failStage(UnacceptableFrameError())
case d: Data if !trailers && d.endStream =>
if(d.data.isEmpty) emit(out0, LastChunk, () => completeStage())
else emitMultiple(out0, immutable.Seq(Chunk(d.data), LastChunk), () => completeStage())
completed = true
case d: Data =>
emit(out0, Chunk(d.data))
case h: Headers if trailers && h.endStream =>
complete(out0)
emit(out1, h, () => completeStage())
completed = true
case h: Headers =>
failStage(UnacceptableFrameError())
}
}
override def onUpstreamFinish(): Unit = {
if(!completed)
failStage(HTTP2Error.HeaderError())
else
super.onUpstreamFinish()
}
})
setHandler(out0, new OutHandler {
override def onPull(): Unit = pull(in)
})
setHandler(out1, new OutHandler {
override def onPull(): Unit = {
// Do nothing by default. Only forward the demand when emitting the single header frame, after we have already
// grabbed all the Data frames (the stage set up by `emit` will take care of it)
}
})
}
}
|
danielkza/h2scala
|
core/src/main/scala/net/danielkza/http2/stream/ChunkedDataDecodeStage.scala
|
Scala
|
apache-2.0
| 2,329 |
package es.ucm.fdi.sscheck.matcher {
package specs2 {
import org.apache.spark.rdd.RDD
import org.specs2.matcher.Matcher
import org.specs2.matcher.MatchersImplicits._
import scalaz.syntax.std.boolean._
object RDDMatchers {
/** Number of records to show on failing predicates
* */
private val numErrors = 4
/** @return a matcher that checks whether predicate holds for all the records of
* an RDD or not.
*
* NOTE: in case exceptions like the following are generated when using a closure for the
* the predicate, use the other variant of foreachRecord() to explicitly specify the context
* available to the closure
* {{{
* Driver stacktrace:,org.apache.spark.SparkException: Job aborted due to stage failure:
* Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost):
* java.io.InvalidClassException: org.specs2.execute.Success; no valid constructor
* }}}
* */
def foreachRecord[T](predicate: T => Boolean): Matcher[RDD[T]] = { (rdd: RDD[T]) =>
val failingRecords = rdd.filter(! predicate(_))
(
failingRecords.isEmpty,
"each record fulfils the predicate",
s"predicate failed for records ${failingRecords.take(numErrors).mkString(", ")} ..."
)
}
def foreachRecord[T,C](predicateContext: C)(toPredicate: C => (T => Boolean)): Matcher[RDD[T]] = {
val predicate = toPredicate(predicateContext)
foreachRecord(predicate)
}
/** @return a matcher that checks whether predicate holds for at least one of the records of
* an RDD or not.
*
* NOTE: in case exceptions like the following are generated when using a closure for the
* the predicate, use the other variant of foreachRecord() to explicitly specify the context
* available to the closure
* {{{
* Driver stacktrace:,org.apache.spark.SparkException: Job aborted due to stage failure:
* Task 0 in stage 0.0 failed 1 times, most recent failure: Lost task 0.0 in stage 0.0 (TID 0, localhost):
* java.io.InvalidClassException: org.specs2.execute.Success; no valid constructor
* }}}
* */
def existsRecord[T](predicate: T => Boolean): Matcher[RDD[T]] = { (rdd: RDD[T]) =>
val exampleRecords = rdd.filter(predicate(_))
(
! exampleRecords.isEmpty,
"some record fulfils the predicate",
s"predicate failed for all the records"
)
}
def existsRecord[T,C](predicateContext: C)(toPredicate: C => (T => Boolean)): Matcher[RDD[T]] = {
val predicate = toPredicate(predicateContext)
existsRecord(predicate)
}
/** @return a Matcher that checks that both RDDs are equal as sets. It is is recommended to
* cache both RDDs to avoid recomputation
* */
def beEqualAsSetTo[T](actualRDD: RDD[T]): Matcher[RDD[T]] = { (expectedRDD: RDD[T]) =>
val inActualNotInExpected = actualRDD.subtract(expectedRDD)
val inExpectedNotInActual = expectedRDD.subtract(actualRDD)
lazy val errorMsg: String =
List(
(!inActualNotInExpected.isEmpty) option
s"unexpected records: ${inActualNotInExpected.take(numErrors).mkString(",")} ...",
(!inExpectedNotInActual.isEmpty) option
s"missing records: ${inExpectedNotInActual.take(numErrors).mkString(",")} ..."
).filter(_.isDefined)
.map(_.get).mkString(",")
(
inActualNotInExpected.isEmpty && inExpectedNotInActual.isEmpty,
"both RDDs contain the same records",
errorMsg
)
}
// TODO: idea for predicate with partial functions rdd.collect{case record => true}.toLocalIterator.hasNext must beTrue
}
}
}
|
juanrh/sscheck
|
src/main/scala/es/ucm/fdi/sscheck/matcher/specs2/package.scala
|
Scala
|
apache-2.0
| 3,939 |
package analysis
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.regression.LinearRegressionModel
import org.apache.spark.mllib.regression.LinearRegressionWithSGD
import org.apache.spark.mllib.stat.{MultivariateStatisticalSummary, Statistics}
object Regression {
def main(args: Array[String]) {
if (args.length < 4) {
args.foreach { println }
println("Usage: <clusterFilePath> <joinedDataFilePath> <outputPath> <numIterations>")
System.exit(0)
}
val conf = new SparkConf().setAppName("Cluster Regression")
//val conf = new SparkConf().setAppName("Cluster Regression").setMaster("local[*]")
val sc = new SparkContext(conf)
//load the joined data and cluster coordinates
val clusterData = sc.objectFile(args(0), 1);
val joinedData = sc.textFile(args(1))
val extractedData = joinedData.map { line =>
val lineArr = line.split(',')
//create a labeled point were the first field is the dependent variable and the
//rest of the array are independent variables
LabeledPoint(lineArr(0).toDouble, Vectors.dense(lineArr(1).split(',').map(_.toDouble)))}.cache()
//create linear model
val numIters = args(3).toInt
val model = LinearRegressionWithSGD.train(extractedData, numIters, 0.00000001)
val valuesAndPreds = extractedData.map { point =>
val pred = model.predict(point.features)
(point.label, pred)
}.persist()
val coorCoeff = Statistics.corr(valuesAndPreds.keys, valuesAndPreds.values)
//save
model.save(sc, args(2))
}
}
|
ramannanda9/restaurant-hotspots
|
Analytics/Regression.scala
|
Scala
|
gpl-3.0
| 1,773 |
package com.moon
/**
* 距离度量方法接口
* 度量空间
* Created by Paul on 2016/12/20.
*/
trait MetricSpace[T] {
def distance(a:T,b:T):Double
}
|
linpingchuan/misc
|
scala/BK-tree/src/main/scala-2.11/com/moon/MetricSpace.scala
|
Scala
|
gpl-2.0
| 164 |
package com.despegar.soffheap.perftest
import com.despegar.soffheap.map.SoffHeapMapBuilder
import java.lang.management.ManagementFactory
import com.despegar.soffheap.SoffHeap
import com.despegar.soffheap.SnapshotValue
import scala.collection.mutable.ListBuffer
object LotOfObjects extends App {
val arrays = System.getProperty("arrays").toInt
val elements = System.getProperty("elements").toInt
val snapshot = new SoffHeapMapBuilder[String,Array[SnapshotValue]]().withMaximumHeapElements(10).build()
def nonHeapMemoryUsage = ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage()
def neapMemoryUsage = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage()
val heapBefore = neapMemoryUsage.getUsed()
val overheadPerObject = 24 + 16
(1 to arrays) foreach {
index =>
val l: ListBuffer[SnapshotValue] = ListBuffer.empty
(1 to elements) foreach { i => l += SnapshotValue(s"value$i", i) }
val elementSize = SoffHeap.sizeOf(classOf[SnapshotValue])
println(s"el objeto en la heap ocupa ${elements * elementSize / 1024 / 1024} MB")
val obj = l.toArray
snapshot.put(s"key$index", obj)
println(snapshot.get(s"key$index").get)
}
while (true) {}
}
|
despegar/soffheap
|
src/test/java/com/despegar/soffheap/perftest/LotOfObjects.scala
|
Scala
|
bsd-2-clause
| 1,249 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.builders
import minitest.TestSuite
import monix.execution.Cancelable
import monix.execution.schedulers.TestScheduler
import monix.reactive.Observable
import monix.reactive.OverflowStrategy.Unbounded
import monix.execution.exceptions.DummyException
import scala.util.Success
object CreateObservableSuite extends TestSuite[TestScheduler] {
def setup() = TestScheduler()
def tearDown(s: TestScheduler): Unit = {
assert(s.state.tasks.isEmpty, "Scheduler should be left with no pending tasks")
}
test("should work") { implicit s =>
val o = Observable.create[Int](Unbounded) { out =>
out.onNext(1)
out.onNext(2)
out.onNext(3)
out.onNext(4)
out.onComplete()
Cancelable.empty
}
val sum = o.sum.runAsyncGetFirst
s.tick()
assertEquals(sum.value.get, Success(Some(10)))
}
test("should protect against user error") { implicit s =>
val ex = DummyException("dummy")
val o = Observable.create[Int](Unbounded) { out =>
throw ex
}
val sum = o.sum.runAsyncGetFirst
s.tick()
assertEquals(sum.value, None)
assertEquals(s.state.lastReportedError, ex)
}
}
|
monifu/monifu
|
monix-reactive/shared/src/test/scala/monix/reactive/internal/builders/CreateObservableSuite.scala
|
Scala
|
apache-2.0
| 1,856 |
import scala.collection.immutable.HashSet
object FilterStructuralSharing extends App {
val mm = new org.github.jamm.MemoryMeter()
val a = HashSet(0 until 1000:_*)
val b = a.filter(_ => true)
require(a eq b)
println(mm.measureDeep(a))
println(mm.measureDeep(b))
println(mm.measureDeep((a,b)))
}
|
rklaehn/scalamuc_20150707
|
scala211/src/main/scala/FilterStructuralSharing.scala
|
Scala
|
apache-2.0
| 309 |
package net.rfc1149.harassme
import android.content.{BroadcastReceiver, ContentResolver, Context, Intent}
import android.media.AudioManager._
import android.provider.CallLog.Calls
import android.telephony.TelephonyManager._
import org.scaloid.common.{SIntent, audioManager}
class HarassMeListener extends BroadcastReceiver {
import net.rfc1149.harassme.HarassMeListener._
override def onReceive(context: Context, intent: Intent) = {
implicit val ctx = context
val extras = intent.getExtras
extras.getString(EXTRA_STATE) match {
case EXTRA_STATE_RINGING =>
// Believe it or not, java.lang.String#isEmpty is not available for API < 9.
val number = Option(extras.getString(EXTRA_INCOMING_NUMBER)).filterNot(_.length == 0)
if (number.exists(shouldBeSignaled) && isSilent)
context.startService(SIntent[RingerService])
case _ =>
context.stopService(SIntent[RingerService])
}
}
}
object HarassMeListener {
private def isSilent(implicit context: Context) =
audioManager(context).getRingerMode != RINGER_MODE_NORMAL || audioManager(context).getStreamVolume(STREAM_RING) == 0
// Return true if the maximum number of calls has been reached with the current one.
private def shouldBeSignaled(number: String)(implicit context: Context): Boolean = {
val prefs = new Prefs(context)
prefs.serviceActivated &&
missedCalls(context, number, System.currentTimeMillis - prefs.minutesCount * 60000) >= prefs.callCount - 1
}
private def missedCalls(context: Context, number: String, since: Long): Int = {
val resolver = context.getContentResolver
val lastAnswered = lastAnsweredCallFrom(resolver, number)
missedCallsSince(resolver, number, since.max(lastAnswered getOrElse 0))
}
private[this] def lastAnsweredCallFrom(resolver: ContentResolver, number: String): Option[Long] =
for {
c <- Option(resolver.query(Calls.CONTENT_URI,
Array(Calls.DATE),
s"${Calls.NUMBER} = ? AND ${Calls.TYPE} = ${Calls.INCOMING_TYPE}",
Array(number),
s"${Calls.DATE} DESC"))
if c.moveToFirst()
} yield c.getLong(c.getColumnIndex(Calls.DATE))
private[this] def missedCallsSince(resolver: ContentResolver, number: String, since: Long): Int =
resolver.query(Calls.CONTENT_URI,
Array(Calls.DATE),
s"${Calls.NUMBER} = ? AND ${Calls.TYPE} = ${Calls.MISSED_TYPE} AND ${Calls.DATE} > ?",
Array(number, since.toString),
null).getCount
}
|
samueltardieu/harassme
|
src/main/scala/net/rfc1149/harassme/HarassMeListener.scala
|
Scala
|
gpl-3.0
| 2,499 |
package org.openurp.edu.eams.teach.service.impl
import org.openurp.edu.base.Student
import org.openurp.edu.eams.teach.service.StudentSource
class StaticStudentSource extends StudentSource {
var students: Set[Student] = _
}
|
openurp/edu-eams-webapp
|
core/src/main/scala/org/openurp/edu/eams/teach/service/impl/StaticStudentSource.scala
|
Scala
|
gpl-3.0
| 230 |
package org.cloudio.morpheus.test
import org.morpheus.Morpheus._
import org.morpheus.Morpher._
import org.morpheus._
import org.cloudio.morpheus.test.samples._
import org.junit.Assert._
import org.junit.Test
import org.morpheus.test.illTyped
/**
* Created by zslajchrt on 01/03/15.
*/
class EssentialCasesTests {
// @Test
// def testEntityOnlyImplicitCreation() {
//
// val morph: EntityA = compose[EntityA].make
//
// assertEquals(0, morph.id)
// assertEquals(1, morph.methodX(1))
//
// }
@Test
def testEntityOnlyExplicitCreation() {
implicit val ent = EntityA(100)
val morph: EntityA = glean[EntityA].make
assertEquals(100, morph.id)
assertEquals(101, morph.methodX(1))
}
// @Test
// def testOneWrapperOnEntity() {
// val model = compose[EntityA with EntityAValidator]
// val morph: EntityA with EntityAValidator = model.make
//
// // todo: identify name clashes in fragment variables, no two fragments can have an equally named non-private variable
// // todo: check that a fragment wrapper directly extends the fragment. This limitation may be removed in the future
// // todo: check that a fragment does not call any method from the context during the initialization
//
// assertEquals(0, morph.id)
// assertEquals(0, morph.counterValidatorX)
// assertEquals(1, morph.methodX(1))
// assertEquals(1, morph.counterValidatorX)
//
// }
@Test
def testOverridingTemplateMethodOnEntity() {
// todo: Warn that entity wrapper methods are not called when invoked from within the entity. Entity wrappers provide just a limited functionality.
// todo: In general, using entities is considered as a legacy workaround. Incorporating a legacy entity can be done in two ways:
// todo: 1) Wrapping the entity to a fragment trait. Since the entity is isolated from the other fragments, there is no limitation on the resulting composite.
// todo: 2) Using the entity as a 'lame' fragment. Such a fragment has limited functionality in terms of overriding its methods by fragment wrappers.
// todo: Specify exactly what is supported and what is not in terms of legacy entities
// val model = compose[EntityA with EntityAHelper]
// val morph: EntityA = model.morph
//
// val r: Int = morph.methodR(1)
// println(r)
}
// @Test
// def testMoreWrappersOnEntity() {
// val model = compose[EntityA with EntityAValidator with EntityALogger]
// val morph: EntityA with EntityAValidator with EntityALogger = model.make
//
// assertEquals(0, morph.id)
// assertEquals(0, morph.counterValidatorX)
// assertEquals(0, morph.counterLoggerX)
// assertEquals(1, morph.methodX(1))
// assertEquals(1, morph.counterValidatorX)
// assertEquals(1, morph.counterLoggerX)
// assertEquals(1, morph.logLevel)
//
// }
// @Test
// def testOneOptionalWrapperOnEntity() {
//
// def assertLoggerStatus(expCounter: Int, morph: EntityA with EntityALogger): Unit = {
// assertEquals(0, morph.id)
// assertEquals(expCounter - 1, morph.counterLoggerX)
// assertEquals(1, morph.methodX(1))
// assertEquals(expCounter, morph.counterLoggerX)
// assertEquals(1, morph.logLevel)
// }
//
// // /? operator is ON by default
// val model = singleton[EntityA with /?[EntityALogger]]
// import model._
//
// model.make match {
// case morph: EntityA with EntityALogger =>
// assertLoggerStatus(1, morph)
// case _ => fail()
// }
//
// // introducing the morph strategy
//
// var logFlag = false // logger is OFF
//
// implicit val morphStrategy = activator(
// ?[EntityALogger] { _ => logFlag}
// )
//
// // the morpher rules out the logger from the resulting composite
//
// model.morph match {
// case morph: EntityA with EntityALogger =>
// fail()
// case _ =>
// }
//
// logFlag = true // logger is ON
//
// // the morpher includes the logger from the resulting composite
//
// model.morph match {
// case morph: EntityA with EntityALogger =>
// assertLoggerStatus(2, morph)
// case _ => fail()
// }
//
// }
// @Test
// def testMoreOptionalWrappersOnEntity() {
//
// // /? operator is ON by default
// val model = singleton[EntityA with /?[EntityAValidator] with /?[EntityALogger]]
// import model._
//
// val validator = fragments.select[FragmentHolder[EntityAValidator]].proxy
// val logger = fragments.select[FragmentHolder[EntityALogger]].proxy
//
// def assertCounters(counterValidatorX: Int, counterValidatorY: Int, counterLoggerX: Int, counterLoggerZ: Int) = {
// assertEquals(counterValidatorX, validator.counterValidatorX)
// assertEquals(counterValidatorY, validator.counterValidatorY)
// assertEquals(counterLoggerX, logger.counterLoggerX)
// assertEquals(counterLoggerZ, logger.counterLoggerZ)
// }
//
// model.make match {
// case morph: EntityA with EntityAValidator with EntityALogger =>
// assertCounters(0, 0, 0, 0)
// morph.methodX(0)
// assertCounters(1, 0, 1, 0)
// morph.methodY(0)
// assertCounters(1, 1, 1, 0)
// morph.methodZ(0)
// assertCounters(1, 1, 1, 1)
//
// case _ => fail()
// }
//
// var validateFlag = false // validator is OFF
// var logFlag = false // logger is OFF
//
// implicit val morphStrategy = activator(
// ?[EntityAValidator] { _ => validateFlag} orElse
// ?[EntityALogger] { _ => logFlag}
// )
//
// model.morph match {
// case morph: EntityA with EntityAValidator with EntityALogger =>
// fail()
// case morph: EntityA with EntityAValidator =>
// fail()
// case morph: EntityA with EntityALogger =>
// fail()
// case morph: EntityA =>
// // no change in counters
// morph.methodX(0)
// assertCounters(1, 1, 1, 1)
// morph.methodY(0)
// assertCounters(1, 1, 1, 1)
// morph.methodZ(0)
// assertCounters(1, 1, 1, 1)
// // OK
// case _ =>
// fail()
// }
//
// validateFlag = true
// logFlag = true
//
// model.morph match {
// case morph: EntityA with EntityAValidator with EntityALogger =>
// morph.methodX(0)
// assertCounters(2, 1, 2, 1)
// morph.methodY(0)
// assertCounters(2, 2, 2, 1)
// morph.methodZ(0)
// assertCounters(2, 2, 2, 2)
// case _ =>
// fail()
// }
//
// validateFlag = true
// logFlag = false
//
// model.morph match {
// case morph: EntityA with EntityAValidator with EntityALogger =>
// fail()
// case morph: EntityA with EntityAValidator =>
// morph.methodX(0)
// assertCounters(3, 2, 2, 2)
// morph.methodY(0)
// assertCounters(3, 3, 2, 2)
// morph.methodZ(0)
// assertCounters(3, 3, 2, 2) // no change in the 4-th counter from the logger, which is OFF
// case morph: EntityA with EntityALogger =>
// fail()
// case morph: EntityA =>
// fail()
// case _ =>
// fail()
// }
//
// validateFlag = false
// logFlag = true
//
// model.morph match {
// case morph: EntityA with EntityAValidator with EntityALogger =>
// fail()
// case morph: EntityA with EntityAValidator =>
// fail()
// case morph: EntityA with EntityALogger =>
// morph.methodX(0)
// assertCounters(3, 3, 3, 2)
// morph.methodY(0)
// assertCounters(3, 3, 3, 2) // no change in the 3-rd counter from the validator, which is OFF
// morph.methodZ(0)
// assertCounters(3, 3, 3, 3)
// case morph: EntityA =>
// fail()
// case _ =>
// fail()
// }
// }
@Test
def testOneWrapperOnFragment(): Unit = {
// val cr = new ClassReader("org/cloudio/morpheus/test/PingLogger$class")
// ClassPrinter.printClass(cr)
implicit val pongConfig = PongConfig.cfg
val pingPong: Ping with PingLogger with Pong = compose[Ping with PingLogger with Pong].make
pingPong.ping(0)
assertEquals(11, pingPong.pingLoggerCounter)
}
@Test
def testMoreWrappersOnFragment(): Unit = {
implicit val pongConfig = PongConfig.cfg
val pingPong: Ping with PingValidator with PingLogger with Pong = compose[Ping with PingValidator with PingLogger with Pong].make
pingPong.ping(0)
assertEquals(11, pingPong.pingLoggerCounter)
assertEquals(11, pingPong.pingValidatorCounter)
}
@Test
def testMoreOptionalWrappersOnFragment() {
// /? operator is ON by default
implicit val pongConfig = PongConfig.cfg
val model = singleton[Ping with /?[PingValidator] with /?[PingLogger] with Pong]
import model._
val ping = fragments.select[FragmentHolder[Ping]].proxy
val validator = fragments.select[FragmentHolder[PingValidator]].proxy
val logger = fragments.select[FragmentHolder[PingLogger]].proxy
def assertCounters(counterPingX: Int, counterPingY: Int, counterPingZ: Int, counterValidatorX: Int, counterValidatorY: Int, counterLoggerX: Int, counterLoggerZ: Int) = {
assertEquals(counterPingX, ping.methodXCounterInPing)
assertEquals(counterPingY, ping.methodYCounterInPing)
assertEquals(counterPingZ, ping.methodZCounterInPing)
assertEquals(counterValidatorX, validator.methodXCounterInValidator)
assertEquals(counterValidatorY, validator.methodYCounterInValidator)
assertEquals(counterLoggerX, logger.methodXCounterInLogger)
assertEquals(counterLoggerZ, logger.methodZCounterInLogger)
}
model.make match {
case morph: Ping with PingValidator with PingLogger with Pong =>
assertCounters(0, 0, 0, 0, 0, 0, 0)
morph.methodX(0)
assertCounters(1, 0, 0, 1, 0, 1, 0)
morph.methodY(0)
assertCounters(1, 1, 0, 1, 1, 1, 0)
morph.methodZ(0)
assertCounters(1, 1, 1, 1, 1, 1, 1)
case _ => fail()
}
var validateFlag = false // validator is OFF
var logFlag = false // logger is OFF
implicit val morphStrategy = activator(
?[PingValidator] { _ => validateFlag} orElse
?[PingLogger] { _ => logFlag}
)
model.morph match {
case morph: Ping with PingValidator with PingLogger with Pong =>
fail()
case morph: Ping with PingValidator with Pong =>
fail()
case morph: Ping with PingLogger with Pong =>
fail()
case morph: Ping with Pong =>
// no change in counters
morph.methodX(0)
assertCounters(2, 1, 1, 1, 1, 1, 1)
morph.methodY(0)
assertCounters(2, 2, 1, 1, 1, 1, 1)
morph.methodZ(0)
assertCounters(2, 2, 2, 1, 1, 1, 1)
// OK
case _ =>
fail()
}
validateFlag = true
logFlag = true
model.morph match {
case morph: Ping with PingValidator with PingLogger with Pong =>
morph.methodX(0)
assertCounters(3, 2, 2, 2, 1, 2, 1)
morph.methodY(0)
assertCounters(3, 3, 2, 2, 2, 2, 1)
morph.methodZ(0)
assertCounters(3, 3, 3, 2, 2, 2, 2)
case _ =>
fail()
}
validateFlag = true
logFlag = false
model.morph match {
case morph: Ping with PingValidator with PingLogger with Pong =>
fail()
case morph: Ping with PingValidator with Pong =>
morph.methodX(0)
assertCounters(4, 3, 3, 3, 2, 2, 2)
morph.methodY(0)
assertCounters(4, 4, 3, 3, 3, 2, 2)
morph.methodZ(0)
assertCounters(4, 4, 4, 3, 3, 2, 2) // no change in the 4-th counter from the logger, which is OFF
case morph: Ping with PingLogger with Pong =>
fail()
case morph: Ping with Pong =>
fail()
case _ =>
fail()
}
validateFlag = false
logFlag = true
model.morph match {
case morph: Ping with PingValidator with PingLogger with Pong =>
fail()
case morph: Ping with PingValidator with Pong =>
fail()
case morph: Ping with PingLogger with Pong =>
morph.methodX(0)
assertCounters(5, 4, 4, 3, 3, 3, 2)
morph.methodY(0)
assertCounters(5, 5, 4, 3, 3, 3, 2) // no change in the 3-rd counter from the validator, which is OFF
morph.methodZ(0)
assertCounters(5, 5, 5, 3, 3, 3, 3)
case _ =>
fail()
}
}
// @Test
// def testMutableProxy() {
// val model = singleton[EntityA with \\?[EntityALogger]]
// import model._
//
// // retrieve the fragment instance of the logger
// val logger = fragments.select[FragmentHolder[EntityALogger]].proxy
//
// var logFlag = false // logger is OFF
//
// implicit val morphStrategy = activator(
// ?[EntityALogger] { _ => logFlag}
// )
//
// val proxy = model.morph_~
//
// assertEquals(0, logger.counterLoggerX)
// proxy.methodX(1)
// assertEquals(0, logger.counterLoggerX) // no change, the logger is OFF
//
// logFlag = true
// proxy.remorph()
//
// proxy.methodX(2)
// assertEquals(1, logger.counterLoggerX) // the counter incremented, the logger is ON
//
// logFlag = false
// proxy.remorph()
//
// proxy.methodX(3)
// assertEquals(1, logger.counterLoggerX) // no change, the logger is OFF
//
// }
//
// @Test
// def testAlternativeFragmentsDependentOnEntity(): Unit = {
//
// implicit val ent = external(EntityA(100))
// val model = singleton[EntityA with (EntityAJSONPrinter or EntityACSVPrinter)]
// import model._
//
// var printFmt = 'json
//
// implicit val morphStrategy = activator(
// ?[EntityAJSONPrinter] { _ => printFmt == 'json} orElse
// ?[EntityACSVPrinter] { _ => printFmt == 'csv}
// )
//
// val morph = model.morph_~
//
// var out = morph.print()
// assertEquals("{'id': 100}", out)
//
// printFmt = 'csv
// morph.remorph()
//
// out = morph.print()
// assertEquals("100", out)
// }
//
// @Test
// def testSharingFragments() {
// //val model = compose[EntityA with \\?[EntityAValidator] with \\?[EntityALogger]]
// val model1 = {
// singleton[EntityA with EntityAValidator with EntityALogger]
// }
//
// val outerLogger = model1.fragments.select[FragmentHolder[EntityALogger]]
//
// val model2 = {
// implicit val logger = external[EntityALogger](outerLogger.proxy)
// singleton[EntityA with EntityAValidator with EntityALogger]
// }
//
// val proxy1: EntityA with EntityAValidator with EntityALogger = model1.make
// val proxy2: EntityA with EntityAValidator with EntityALogger = model2.make
//
// proxy1.methodX(1)
// assertEquals(1, proxy1.counterLoggerX)
// assertEquals(1, proxy2.counterLoggerX)
// assertEquals(1, proxy1.counterValidatorX)
// assertEquals(0, proxy2.counterValidatorX)
// proxy2.methodX(2)
// assertEquals(2, proxy1.counterLoggerX)
// assertEquals(2, proxy2.counterLoggerX)
// assertEquals(1, proxy1.counterValidatorX)
// assertEquals(1, proxy2.counterValidatorX)
// }
@Test
def testMutuallyDependentFragments() {
// todo: a negative test for unsatisfied dependencies of PingDimA or PongDimA
implicit val pongConfig = single[Pong, PongConfig](new PongConfig {
val maxReturns: Int = 10
})
val model = singleton[Ping with Pong]
import model._
val ping = fragments.select[FragmentHolder[Ping]].proxy
val pong = fragments.select[FragmentHolder[Pong]].proxy
val pingPong: Ping with Pong = model.make
val res = pingPong.ping(0)
assertEquals(10, res)
assertEquals(11, ping.pingCounter)
assertEquals(11, pong.pongCounter)
}
@Test
def testVolatilePolymorphism(): Unit = {
implicit val mutConfig = single[MutableFragment, MutableFragmentConfig](MutableFragmentConfig())
val animal: Animal = compose[Animal with MutableFragment].make
val c = animal.carnivore
val likableFood = animal.craveFor
//assertTrue(likableFood.delegate.isInstanceOf[Apple])
assertEquals("Apple", likableFood.toString)
// make the food preference change
animal.carnivore = true
//assertTrue(likableFood.delegate.isInstanceOf[Fish])
assertEquals("Fish", likableFood.toString)
// make the food preference change again
animal.carnivore = false
//assertTrue(likableFood.delegate.isInstanceOf[Apple])
assertEquals("Apple", likableFood.toString)
}
@Test
def testVolatilePolymorphismCollection(): Unit = {
def movingAnimal(a: MutableMorphMirror[_]): Option[MovingAnimal] = inspect(a) {
case m: MovingAnimal => Some(m)
case _ => None
}
val herd: Herd = compose[Herd].make
val a1 = herd.add()
val a2 = herd.add()
for (a <- herd.members()) {
assertTrue(movingAnimal(a).isDefined)
assertTrue(a.delegate.isInstanceOf[MovingAnimal])
a.kill()
assertFalse(movingAnimal(a).isDefined)
assertFalse(a.delegate.isInstanceOf[MovingAnimal])
}
herd.members()(0).craveFor.delegate.isInstanceOf[Apple]
inspect(herd.members()(0).craveFor) {
case a: Apple => Some(a)
case _ => None
}
// todo
}
@Test
def testCompose(): Unit = {
val model = compose_?[Ping]
val ping1 = model.fragments.select[FragmentHolder[Ping]].proxy
val ping2 = model.fragments.select[FragmentHolder[Ping]].proxy
assertNotSame(ping1, ping2)
}
@Test
def testSingleton(): Unit = {
val model = singleton_?[Ping]
val ping1 = model.fragments.select[FragmentHolder[Ping]].proxy
val ping2 = model.fragments.select[FragmentHolder[Ping]].proxy
assertSame(ping1, ping2)
}
@Test
def testComposeWithSingletonFragment(): Unit = {
implicit val pongCfg = single[Pong, PongConfig](PongConfig)
val model = compose[Ping with Pong]
val ping1 = model.fragments.select[FragmentHolder[Ping]].proxy
val ping2 = model.fragments.select[FragmentHolder[Ping]].proxy
assertNotSame(ping1, ping2)
val pong1 = model.fragments.select[FragmentHolder[Pong]].proxy
val pong2 = model.fragments.select[FragmentHolder[Pong]].proxy
assertSame(pong1, pong2)
}
@Test
def testSingletonWithNonSingletonFragment(): Unit = {
implicit val pongCfg = frag[Pong, PongConfig](PongConfig)
val model = singleton[Ping with Pong]
val ping1 = model.fragments.select[FragmentHolder[Ping]].proxy
val ping2 = model.fragments.select[FragmentHolder[Ping]].proxy
assertSame(ping1, ping2)
val pong1 = model.fragments.select[FragmentHolder[Pong]].proxy
val pong2 = model.fragments.select[FragmentHolder[Pong]].proxy
assertNotSame(pong1, pong2)
}
@Test
def testSingletonWithNonSingletonFragmentReInit(): Unit = {
var maxReturnsVar = 1
implicit val pongCfg = frag[Pong, PongConfig](new PongConfig {
override val maxReturns: Int = maxReturnsVar
})
val model = singleton[Ping with Pong]
assertEquals(1, model.~.maxReturns)
maxReturnsVar = 2
model.~.remorph
assertEquals(2, model.~.maxReturns)
}
@Test
def testComplexStructure(): Unit = {
val editor = AlbumEditor()
editor.loadPhotos(List(1l, 2l, 3l))
val photos = editor.photos
println(photos)
for (photo <- photos) {
photo.resize(2, 2)
if (photo.width < photo.height)
photo.rotate(Math.PI / 2)
}
println(photos)
for (photo <- photos) {
inspect(photo) {
case bp: BigPhoto =>
println(s"Before iconized: $bp")
bp.iconize()
println(s"After iconized: $bp")
case _ =>
}
}
for (photo <- photos) {
inspect(photo) {
case icon: IconPhoto =>
println(s"Icon: $icon")
case _ =>
}
}
}
@Test
def testSelect(): Unit = {
val composite = compose[Jin or Jang]
import composite._
implicit val strategy = AlternatingMorphingStrategy(left, right)
val mutPx = morph_~
select[Jin](mutPx) match {
case Some(jin) =>
assertTrue(jin.isInstanceOf[Jin])
// OK
case None =>
fail()
}
select[Jang](mutPx) match {
case Some(jangEnt) =>
fail()
case None =>
// OK
}
// Switch the strategy to 'right' and re-morph the composite by notifying the proxy
strategy.switch(1)
mutPx.remorph()
select[Jang](mutPx) match {
case Some(jang) =>
assertTrue(jang.isInstanceOf[Jang])
// OK
case None =>
fail()
}
select[Jin](mutPx) match {
case Some(jinEnt) =>
fail()
case None =>
// OK
}
//must not compile
// select[Ping](mutPx) match {
// case Some(_) =>
// case None =>
// }
}
@Test
def testSelectHiddenFragment(): Unit = {
val kernel = compose[D1 with F]
val morph = kernel.!
val morphNarrowed = asMorphOf[F](morph)
// selecting the visible fragment F
select[F](morphNarrowed) match {
case None => fail()
case Some(f) => // OK
}
// selecting the invisible fragment D1
select[D1](morphNarrowed) match {
case None => fail()
case Some(d1) => // OK
}
}
@Test
def testSelectWithEntity(): Unit = {
implicit val cfg1 = external[EntityA](new EntityA(1))
val composite = compose[EntityA with (Jin or Jang)]
import composite._
implicit val strategy = AlternatingMorphingStrategy(left, right)
val mutPx = morph_~
select[EntityA with Jin](mutPx) match {
case Some(jinEnt) =>
assertTrue(jinEnt.isInstanceOf[EntityA with Jin])
// OK
case None =>
fail()
}
select[EntityA with Jang](mutPx) match {
case Some(jangEnt) =>
fail()
case None =>
// OK
}
// Switch the strategy to 'right' and re-morph the composite by notifying the proxy
strategy.switch(1)
mutPx.remorph()
select[EntityA with Jang](mutPx) match {
case Some(jangEnt) =>
assertTrue(jangEnt.isInstanceOf[EntityA with Jang])
// OK
case None =>
fail()
}
select[EntityA with Jin](mutPx) match {
case Some(jinEnt) =>
fail()
case None =>
// OK
}
//must not compile
// select[Ping](mutPx) match {
// case Some(_) =>
// case None =>
// }
}
@Test
def testDimensionWrapper(): Unit = {
val composite = compose[EntityB with EntityBRenderer with XMLRenderer]
import composite._
val px = make
assertEquals("<0/>", px.render)
assertEquals(1, px.xmlRendererCounter)
}
@Test
def testDimensionWrapperCrossSuperCall(): Unit = {
val composite = compose[EntityB with EntityBRenderer with XMLRenderer]
import composite._
val px = make
// super.methodX is called from XMLRenderer.methodU
px.methodU(1)
assertEquals(1, px.methodUCounterInXMLRenderer)
assertEquals(1, px.methodXCounterInRenderer)
assertEquals(0, px.methodUCounterInRenderer)
}
@Test
def testFragmentWrapperCrossSuperCall(): Unit = {
val composite = compose[EntityB with EntityBRenderer with EntityBRendererDecorator]
import composite._
val px = make
// super.methodU is called from EntityBRendererDecorator.methodV
px.methodV(1)
assertEquals(1, px.methodVCounterInDecorator)
assertEquals(1, px.methodUCounterInRenderer)
assertEquals(0, px.methodVCounterInRenderer)
}
@Test
def testDimensionAndFragmentWrapperCrossSuperCall(): Unit = {
val composite = compose[EntityB with EntityBRenderer with XMLRenderer with EntityBRendererDecorator]
import composite._
val px = make
// super.methodU is called from EntityBRendererDecorator.methodV
// super.methodX is called from XMLRenderer.methodU
px.methodV(1)
assertEquals(1, px.methodVCounterInDecorator)
assertEquals(1, px.methodUCounterInXMLRenderer)
assertEquals(1, px.methodXCounterInRenderer)
}
@Test
def testOptionalDimensionWrapper(): Unit = {
val composite = compose[EntityB with EntityBRenderer with /?[XMLRenderer]]
import composite._
implicit val strategy = AlternatingMorphingStrategy(left, right)
val px = morph_~
assertEquals("<0/>", px.render)
// Switch off the XMLRenderer
strategy.switch(1)
px.remorph()
assertEquals("0", px.render)
//println(px.rendererLoggerCounter)
}
@Test
def testDimensionAndFragmentWrapper(): Unit = {
val composite = compose[EntityB with EntityBRenderer with XMLRenderer with EntityBRendererDecorator]
import composite._
val px = make
assertEquals("{<0/>}", px.render)
assertEquals(1, px.xmlRendererCounter)
assertEquals(1, px.entityBRendererDecoratorCounter)
}
@Test
def testFragmentAndDimensionWrapper(): Unit = {
val composite = compose[EntityB with EntityBRenderer with EntityBRendererDecorator with XMLRenderer]
import composite._
val px = make
assertEquals("<{0}/>", px.render)
assertEquals(1, px.xmlRendererCounter)
assertEquals(1, px.entityBRendererDecoratorCounter)
}
@Test
def testOptionalDimensionAndFragmentWrappersOnFragment() {
// /? operator is ON by default
val model = singleton[EntityB with EntityBRenderer with /?[XMLRenderer] with /?[EntityBRendererDecorator]]
import model._
val fragment = fragments.select[FragmentHolder[EntityBRenderer]].proxy
val dimensionWrapper = fragments.select[FragmentHolder[XMLRenderer]].proxy
val fragmentWrapper = fragments.select[FragmentHolder[EntityBRendererDecorator]].proxy
def assertCounters(fragmentCounterX: Int, fragmentCounterY: Int, fragmentCounterZ: Int, dimensionWrapperCounterX: Int, dimensionWrapperCounterY: Int, fragmentWrapperCounterX: Int, fragmentWrapperCounterZ: Int) = {
assertEquals(fragmentCounterX, fragment.methodXCounterInRenderer)
assertEquals(fragmentCounterY, fragment.methodYCounterInRenderer)
assertEquals(fragmentCounterZ, fragment.methodZCounterInRenderer)
assertEquals(dimensionWrapperCounterX, dimensionWrapper.methodXCounterInXMLRenderer)
assertEquals(dimensionWrapperCounterY, dimensionWrapper.methodYCounterInXMLRenderer)
assertEquals(fragmentWrapperCounterX, fragmentWrapper.methodXCounterInDecorator)
assertEquals(fragmentWrapperCounterZ, fragmentWrapper.methodZCounterInDecorator)
}
model.make match {
case morph: EntityB with EntityBRenderer with XMLRenderer with EntityBRendererDecorator =>
assertCounters(0, 0, 0, 0, 0, 0, 0)
morph.methodX(0)
assertCounters(1, 0, 0, 1, 0, 1, 0)
morph.methodY(0)
assertCounters(1, 1, 0, 1, 1, 1, 0)
morph.methodZ(0)
assertCounters(1, 1, 1, 1, 1, 1, 1)
case _ => fail()
}
var dimWrapperFlag = false
var fragWrapperFlag = false
implicit val morphStrategy = activator(
?[XMLRenderer] { _ => dimWrapperFlag} orElse
?[EntityBRendererDecorator] { _ => fragWrapperFlag}
)
model.morph match {
case morph: EntityB with EntityBRenderer with XMLRenderer with EntityBRendererDecorator =>
fail()
case morph: EntityB with EntityBRenderer with XMLRenderer =>
fail()
case morph: EntityB with EntityBRenderer with EntityBRendererDecorator =>
fail()
case morph: EntityB with EntityBRenderer =>
// no change in counters
morph.methodX(0)
assertCounters(2, 1, 1, 1, 1, 1, 1)
morph.methodY(0)
assertCounters(2, 2, 1, 1, 1, 1, 1)
morph.methodZ(0)
assertCounters(2, 2, 2, 1, 1, 1, 1)
// OK
case _ =>
fail()
}
dimWrapperFlag = true
fragWrapperFlag = true
model.morph match {
case morph: EntityB with EntityBRenderer with XMLRenderer with EntityBRendererDecorator =>
morph.methodX(0)
assertCounters(3, 2, 2, 2, 1, 2, 1)
morph.methodY(0)
assertCounters(3, 3, 2, 2, 2, 2, 1)
morph.methodZ(0)
assertCounters(3, 3, 3, 2, 2, 2, 2)
case _ =>
fail()
}
dimWrapperFlag = true
fragWrapperFlag = false
model.morph match {
case morph: EntityB with EntityBRenderer with XMLRenderer with EntityBRendererDecorator =>
fail()
case morph: EntityB with EntityBRenderer with XMLRenderer =>
morph.methodX(0)
assertCounters(4, 3, 3, 3, 2, 2, 2)
morph.methodY(0)
assertCounters(4, 4, 3, 3, 3, 2, 2)
morph.methodZ(0)
assertCounters(4, 4, 4, 3, 3, 2, 2) // no change in the 4-th counter from the dimensionWrapper, which is OFF
case _ =>
fail()
}
dimWrapperFlag = false
fragWrapperFlag = true
model.morph match {
case morph: EntityB with EntityBRenderer with XMLRenderer with EntityBRendererDecorator =>
fail()
case morph: EntityB with EntityBRenderer with EntityBRendererDecorator =>
morph.methodX(0)
assertCounters(5, 4, 4, 3, 3, 3, 2)
morph.methodY(0)
assertCounters(5, 5, 4, 3, 3, 3, 2) // no change in the 3-rd counter from the validator, which is OFF
morph.methodZ(0)
assertCounters(5, 5, 5, 3, 3, 3, 3)
case _ =>
fail()
}
}
@Test
def testRatingStrategy(): Unit = {
val composite = compose[EntityB with EntityBRenderer with /?[EntityBRendererDecorator] with /?[XMLRenderer]]
//val composite = compose[EntityB with /?[XMLRenderer]]
import composite._
//var m = morph_~(lookupAlt(isFragment[XMLRenderer], isFragment[EntityBRendererDecorator]))
var m = morph_~(RatingStrategy(defaultStrategy, hasFragment[XMLRenderer](IncRating), hasFragment[EntityBRendererDecorator](IncRating)))
select[XMLRenderer with EntityBRendererDecorator](m) match {
case None =>
fail()
case Some(f) =>
// OK
}
//m = morph_~(lookupAlt(isFragment[XMLRenderer], isNotFragment[EntityBRendererDecorator]))
m = morph_~(RatingStrategy(defaultStrategy, hasFragment[XMLRenderer](IncRating), hasFragment[EntityBRendererDecorator](DecRating)))
select[EntityBRendererDecorator](m) match {
case None =>
// OK
case Some(f) =>
fail()
}
select[XMLRenderer with EntityBRendererDecorator](m) match {
case None =>
// OK
case Some(f) =>
fail()
}
select[XMLRenderer](m) match {
case None =>
fail()
case Some(f) =>
// OK
}
//m = morph_~(lookupAlt(isNotFragment[XMLRenderer], isNotFragment[EntityBRendererDecorator]))
m = morph_~(RatingStrategy(defaultStrategy, hasFragment[XMLRenderer](DecRating), hasFragment[EntityBRendererDecorator](DecRating)))
select[EntityBRendererDecorator](m) match {
case None =>
// OK
case Some(f) =>
fail()
}
select[XMLRenderer with EntityBRendererDecorator](m) match {
case None =>
// OK
case Some(f) =>
fail()
}
select[XMLRenderer](m) match {
case None =>
// OK
case Some(f) =>
fail()
}
select[EntityB with EntityBRenderer](m) match {
case None =>
fail()
case Some(f) =>
// OK
}
}
@Test
def testInclusiveReference(): Unit = {
val compositeNoAlts = compose[EntityB with EntityBRenderer with EntityBRendererDecorator with XMLRenderer]
assertTrue(A.inclRefTest(compositeNoAlts))
val compositeWithAlts = compose[EntityB with EntityBRenderer with /?[EntityBRendererDecorator] with /?[XMLRenderer]]
assertTrue(A.inclRefTest(compositeWithAlts))
// this should not compile since the missing EntityBRenderer
val incompatibleComp = compose[EntityB]
illTyped(
"""
A.inclRefTest(incompatibleComp)
""")
val incompleteComp = compose_?[EntityBRenderer with EntityBRendererDecorator with XMLRenderer]
// // this should not compile since the reference requires the deps check
// val depsCheckCompRef: &[EntityBRenderer with EntityBRendererDecorator with XMLRenderer] = incompleteComp
// // this should compile since the reference does not require the deps check
// val nodepsCheckCompRef: &?[EntityBRenderer with EntityBRendererDecorator with XMLRenderer] = incompleteComp
}
// @Test
// def testExclusiveReference(): Unit = {
// val c = compose[(A with B) or D1]
// val cr: &[A or D] = c
// val m = *(cr).~
// m.remorph(RatingStrategy(m.strategy, hasFragment[A](IncRating)))
// assertTrue(select[A](m).isDefined)
// m.remorph(RatingStrategy(m.strategy, hasFragment[A](DecRating), hasFragment[D](IncRating)))
// assertFalse(select[A](m).isDefined)
// assertTrue(select[D](m).isDefined)
//
// // this should not compile
// // val c2 = compose[(A with B) or D1]
// // val cr2: &[A or B] = c2
//
// // val compositeNoAlts = compose[EntityB with EntityBRenderer with EntityBRendererDecorator with XMLRenderer]
// // // this should not compile since the missing optional fragments EntityBRendererDecorator and XMLRenderer
// // A.exclRefTest(compositeNoAlts)
//
// val compositeWithCompleteAlts = compose[EntityB with EntityBRenderer with /?[EntityBRendererDecorator] with /?[XMLRenderer]]
// assertTrue(A.exclRefTest(compositeWithCompleteAlts))
//
// // val compositeWithMutuallyExclAlts = compose[EntityB with EntityBRenderer with (EntityBRendererDecorator or XMLRenderer)]
// // //this should not compile since it fails during the inclusive model validation (which precedes the exclusivity check) the source model cannot yield [EntityB with EntityBRenderer with EntityBRendererDecorator with XMLRenderer] alternative
// // assertTrue(A.exclRefTest(compositeWithMutuallyExclAlts))
//
// // val compositeWithAlts = compose[EntityB with EntityBRenderer with EntityBRendererDecorator with /?[XMLRenderer]]
// // //this should not compile since the missing optional fragment EntityBRendererDecorator
// // assertTrue(A.exclRefTest(compositeWithAlts))
//
// //
// // // this should not compile since the missing EntityBRenderer
// // val incompatibleComp = compose[EntityB]
// // assertTrue(A.inclRefTest(incompatibleComp))
//
// val incompleteComp = compose_?[EntityBRenderer with /?[EntityBRendererDecorator] with /?[XMLRenderer]]
//
// // this should not compile since the reference requires the deps check
// //val depsCheckCompRef: &[EntityBRenderer with /?[EntityBRendererDecorator] with /?[XMLRenderer]] = incompleteComp
//
// // this should compile since the reference does not require the deps check
// val nodepsCheckCompRef: &?[EntityBRenderer with /?[EntityBRendererDecorator] with /?[XMLRenderer]] = incompleteComp
//
// }
@Test
def testExistentialReference(): Unit = {
// this composite has the minimum components to be accepted by the existential reference
val comp = compose[EntityB with EntityBRenderer]
assertTrue(A.existRefTest(comp))
// This should not compile since there is no source alternative for any target one
illTyped(
"""
val comp2 = compose[EntityB]
assertTrue(A.existRefTest(comp2))
""")
}
@Test
def testDefaultStrategy(): Unit = {
val model = parse[EntityB with EntityBRenderer with /?[XMLRenderer]](true)
val defStrat = AlternatingMorphingStrategy(model.left, model.right)
val composite = build(model, false, FactoryProvider, defStrat, Total) // the equivalent to "compose"
import composite._
// We do not call morph_~ since we control the structure by the default strategy set to the composite instance
val px = make_~
assertEquals("<0/>", px.render)
// Switch off the XMLRenderer
defStrat.switch(1)
px.remorph()
assertEquals("0", px.render)
//println(px.rendererLoggerCounter)
}
// @Test
// def testConcat(): Unit = {
// val compositeNoAlts = compose[EntityB with EntityBRenderer with EntityBRendererDecorator with XMLRenderer]
// assertTrue(A.concatTestIncl(compositeNoAlts))
//
// val compositeWithAlts = compose[EntityB with EntityBRenderer with /?[EntityBRendererDecorator] with /?[XMLRenderer]]
// assertTrue(A.concatTestExcl(compositeWithAlts))
// }
def testComplexDependencyCheck(): Unit = {
// 1. Ping and Pong depend on each other
// 2. EntityBRenderer depends on EntityB, however they are both optional
implicit val pongConfig = PongConfig.cfg
compose[Ping with Pong with (Unit or (EntityB with EntityBRenderer))]
// compose[Unit] //todo
// should not compile
illTyped(
"""
compose[Ping with (Unit or (EntityB with EntityBRenderer))]
""")
// should not compile
illTyped(
"""
compose[Ping with Pong with /?[EntityB] with /?[EntityBRenderer]]
""")
}
@Test
def testUsingRefToSpecializeComposite(): Unit = {
val compositeWithAlts = compose[EntityB with EntityBRenderer with /?[EntityBRendererDecorator] with /?[XMLRenderer]]
val specCompRef: &[EntityB with XMLRenderer] = compositeWithAlts
// The LUB of the specialized composite is the same as its model type, since the composite type has only one alternative
// Also, the specialized type needn't be complete since no deps check is made, since it is assumed that the source
// composite instance is complete.
val deref = *(specCompRef)
val specComp: EntityB with XMLRenderer = deref.make
val specCompMut: EntityB with XMLRenderer = *(specCompRef).make_~
// just try to invoke some methods on the proxies
specComp.methodX(1)
specCompMut.methodX(1)
}
/**
* This is a more concise version (using the asMorphOf macro) of testUsingRefToSpecializeComposite
*/
@Test
def testAsCompositeOf(): Unit = {
val compositeWithAlts = compose[EntityB with EntityBRenderer with /?[EntityBRendererDecorator] with /?[XMLRenderer]]
val specComp = asMorphOf[EntityB with XMLRenderer](compositeWithAlts)
// just try to invoke some methods on the proxies
specComp.methodX(1)
val specCompMut = asMorphOf_~[EntityB with XMLRenderer](compositeWithAlts)
// just try to invoke some methods on the proxies
specCompMut.methodX(1)
specCompMut.remorph
// Should not compile because of the unknown fragment PingLogger
illTyped(
"""
asMorphOf[EntityB with PingLogger](compositeWithAlts)
""")
// Should not compile because the LUB is not same as the composite type (there are two alternatives)
illTyped(
"""
asMorphOf[EntityB with /?[XMLRenderer]](compositeWithAlts)
""")
}
@Test
def testPlaceholder(): Unit = {
val model: MorphModel[$[EntityA]] = parse[$[EntityA]](false)
model.rootNode match {
case fn@FragmentNode(_, true) =>
model.fragmentDescriptor(fn) match {
case None =>
fail()
case Some(fd) =>
import scala.reflect.runtime.universe._
assertTrue(fd.fragTag.tpe =:= implicitly[WeakTypeTag[EntityA]].tpe)
}
// OK
case _ =>
fail()
}
}
@Test
def testAltMappingsPseudoCode(): Unit = {
type Comp = EntityB with /?[EntityBRenderer]
val inst = compose[Comp]
val instRef: &[Comp] = inst
// Target alternative Corresponding original alternatives
// (0, 1) -> (0, 1)
// (0) -> (0), (0, 1)
instRef.altMappings.newAltToOrigAlt.get(List(0, 1)) match {
case None =>
fail()
case Some(origAlts) =>
assertEquals(List(
OrigAlt(List(0, 1), List(OriginalInstanceSource(FragmentNode(0, false)), OriginalInstanceSource(FragmentNode(1, false))))),
origAlts)
}
instRef.altMappings.newAltToOrigAlt.get(List(0)) match {
case None =>
fail()
case Some(origAlts) =>
assertEquals(List(
OrigAlt(List(0),List(OriginalInstanceSource(FragmentNode(0,false)))),
OrigAlt(List(0, 1),List(OriginalInstanceSource(FragmentNode(0,false)), OriginalInstanceSource(FragmentNode(1,false))))),
origAlts)
}
// Original alternative Alternative's template
// (0, 1) -> (Original(0), Original(1))
// (0) -> (Original(0))
// instRef.altMappings.origAltToTemplateAlt.get(List(0, 1)) match {
// case None =>
// fail()
// case Some(origAlts) =>
// assertEquals(List(OriginalInstanceSource(FragmentNode(0, false)), OriginalInstanceSource(FragmentNode(1, false))), origAlts)
// }
//
// instRef.altMappings.origAltToTemplateAlt.get(List(0)) match {
// case None =>
// fail()
// case Some(origAlts) =>
// assertEquals(List(OriginalInstanceSource(FragmentNode(0, false))), origAlts)
// }
}
@Test
def testMirrorTrait(): Unit = {
type Comp = EntityB with /?[EntityBRenderer]
val inst = compose[Comp]
// MorphMirror
val m = inst.make
assertEquals(2, m.myAlternative.size)
assertSame(inst, m.kernel)
// MutableMorphMirror
val mm = inst.make_~
assertEquals(2, mm.myAlternative.size)
assertSame(inst, mm.kernel)
assertNotNull(mm.delegate)
mm.remorph // test it just by calling it
}
@Test
def testMutableFragment(): Unit = {
val model = parse[(StatefulX or StatefulY) with MutableFragment](true)
import model._
val statusMonitor = EventMonitor[String]("status")
val strategy = activator(
?[StatefulX] { _ => statusMonitor("x", true)} orElse
?[StatefulY] { _ => statusMonitor("y", false)}
)
implicit val mutableFragConfig = single[MutableFragment, MutableFragmentConfig](MutableFragmentConfig(statusMonitor))
val inst = compose(model, strategy)
val m = inst.~
println(m.isInstanceOf[MutableFragment])
val isListening = m.startListening({
case CompositeEvent("status", _, _) => true
})
select[StatefulX](m) match {
case Some(s) => //OK
case None => fail()
}
m.switch()
select[StatefulY](m) match {
case Some(s) => //OK
case None => fail()
}
m.switch()
select[StatefulX](m) match {
case Some(s) => //OK
case None => fail()
}
m.stopListening()
m.switch()
// no change since we stopped listening
select[StatefulX](m) match {
case Some(s) => //OK
case None => fail()
}
}
}
|
zslajchrt/morpheus-tests
|
src/test/scala/org/cloudio/morpheus/test/EssentialCasesTests.scala
|
Scala
|
apache-2.0
| 42,423 |
package de.choffmeister.microserviceutils.test.first
object ErrorFirst {
def apply(): Throwable = new RuntimeException("Error")
}
|
choffmeister/microservice-utils
|
microservice-utils/src/test/scala/de/choffmeister/microserviceutils/test/first/ErrorFirst.scala
|
Scala
|
mit
| 133 |
/**
* Created by cravefm on 9/14/15.
*/
package object models {
object JsonFormats {
import play.api.libs.json.Json
// Generates Writes and Reads for Feed and User thanks to Json Macros
implicit val balanceFormat = Json.format[Balance]
}
}
|
setrar/rbchackaton
|
backend/app/models/package.scala
|
Scala
|
apache-2.0
| 262 |
package org.rebeam.tree.demo
import japgolly.scalajs.react.extra.router.StaticDsl._
import japgolly.scalajs.react.extra.router._
import org.rebeam.tree.view.Navigation
import org.rebeam.tree.view.pages._
import org.rebeam.tree.demo.DemoData._
import org.rebeam.tree.sync._
import japgolly.scalajs.react.vdom.html_<^._
object DemoRoutes {
sealed trait Page
case object HomePage extends Page
case object AddressPage extends Page
case object RefPage extends Page
case object RefFailurePage extends Page
sealed trait TodoPage extends Page {
def back: TodoPage
def toList(listId: Id[TodoList]) = TodoProjectListPage(listId)
}
sealed trait PageWithTodoProjectList extends TodoPage {
def listId: Id[TodoList]
def toItem(todoId: Id[Todo]) = TodoProjectListItemPage(listId, todoId)
override def back: TodoPage = TodoProjectPage
}
sealed trait PageWithTodoProjectListItem extends PageWithTodoProjectList {
def todoId: Id[Todo]
override def back: TodoPage = TodoProjectListPage(listId)
}
case object TodoProjectPage extends TodoPage {
override def back: TodoPage = TodoProjectPage
}
case object TodoProjectCachePage extends TodoPage {
override def back: TodoPage = TodoProjectCachePage
}
case class TodoProjectListPage(listId: Id[TodoList]) extends PageWithTodoProjectList
case class TodoProjectListItemPage(listId: Id[TodoList], todoId: Id[Todo]) extends PageWithTodoProjectListItem
implicit val todoTransitions = new PagesToTransition[TodoPage] {
override def apply(from: TodoPage, to: TodoPage) = {
if (from == to.back) PagesTransition.Right else PagesTransition.Left
}
}
implicit val refTransitions = new PagesToTransition[RefPage.type] {
override def apply(from: RefPage.type, to: RefPage.type) = {
PagesTransition.Right
}
}
implicit val refFailureTransitions = new PagesToTransition[RefFailurePage.type] {
override def apply(from: RefFailurePage.type, to: RefFailurePage.type) = {
PagesTransition.Right
}
}
val routerConfig = RouterConfigDsl[Page].buildConfig { dsl =>
import dsl._
//Provide a renderer for a view factory using Pages.
def dynRenderP[P <: Page](g: Pages[P, P] => VdomElement): P => Renderer =
p => Renderer(r => g(Pages(p, r.narrow[P])))
def id[A] = new RouteB[Id[A]](Id.regex.regex, 1, g => Id.fromString[A](g(0)), Id.toString(_))
def caseObject[A](s: String, a: A) = RouteB.literal(s).xmap(_ => a)(_ => ())
val refRoute = caseObject("#ref", RefPage)
val refFailureRoute = caseObject("#reffailure", RefFailurePage)
val todoProjectRoute = caseObject("#todo", TodoProjectPage)
val todoProjectListRoute = ("#todo/list" / id[TodoList]).caseClass[TodoProjectListPage]
val todoProjectListItemRoute = ("#todo/list" / id[TodoList] / "item" / id[Todo]).caseClass[TodoProjectListItemPage]
val todoProjectCacheRoute = caseObject("#todocache", TodoProjectCachePage)
(trimSlashes
| staticRoute(root, HomePage) ~> render(DemoViews.homeView())
| staticRoute("#address", AddressPage) ~> render(DemoViews.addressView)
| dynamicRouteCT(refRoute) ~> dynRenderP[RefPage.type](RefViews.refViewFactory(_): VdomElement)
| dynamicRouteCT(refFailureRoute) ~> dynRenderP[RefFailurePage.type](RefFailureViews.refViewFactory(_): VdomElement)
| dynamicRouteCT(todoProjectRoute) ~> dynRenderP[TodoPage](TodoPagesViews.todoProjectViewFactory(_): VdomElement)
| dynamicRouteCT(todoProjectListRoute) ~> dynRenderP[TodoPage](TodoPagesViews.todoProjectViewFactory(_): VdomElement)
| dynamicRouteCT(todoProjectListItemRoute) ~> dynRenderP[TodoPage](TodoPagesViews.todoProjectViewFactory(_): VdomElement)
| dynamicRouteCT(todoProjectCacheRoute) ~> dynRenderP[TodoPage](TodoPagesViews.todoProjectCacheViewFactory(_): VdomElement)
)
.notFound(redirectToPage(HomePage)(Redirect.Replace))
.renderWith(layout _ )
.verify(HomePage, AddressPage)//, TodoProjectPage)
}
val navs = List (
"Home" -> HomePage,
"Todo List" -> TodoProjectPage,
"Address" -> AddressPage
)
val navigation = Navigation.apply[Page]
def layout(ctl: RouterCtl[Page], r: Resolution[Page]) = {
val np = Navigation.Props(ctl, r, r.page, navs)
navigation(np)
}
val baseUrl = BaseUrl.fromWindowOrigin_/
def router = Router(baseUrl, routerConfig)
}
|
trepidacious/tree-material-ui
|
js/src/main/scala/org/rebeam/tree/demo/DemoRoutes.scala
|
Scala
|
gpl-3.0
| 4,434 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.frontend.v2_3
object Ref {
def apply[T <: AnyRef](v: T) = new Ref[T](v)
}
final class Ref[+T <: AnyRef](val value: T) {
if (value == null)
throw new InternalException("Attempt to instantiate Ref(null)")
def toIdString = Integer.toHexString(java.lang.System.identityHashCode(value))
override def toString = s"Ref@$toIdString($value)"
override def hashCode = java.lang.System.identityHashCode(value)
override def equals(that: Any) = that match {
case other: Ref[_] => value eq other.value
case _ => false
}
}
|
HuangLS/neo4j
|
community/cypher/frontend-2.3/src/main/scala/org/neo4j/cypher/internal/frontend/v2_3/Ref.scala
|
Scala
|
apache-2.0
| 1,381 |
package com.ticketmaster.api.commerce
object domain {
case class EventOffers(limits: Limits)
case class Limits(max: Int)
}
|
ticketmaster-api/sdk-scala
|
commerce/src/main/scala/com/ticketmaster/api/commerce/domain.scala
|
Scala
|
mit
| 131 |
package net.litola
import play.PlayAssetsCompiler
import sbt.Keys._
import sbt._
object SassPlugin extends AutoPlugin with PlayAssetsCompiler {
override def requires = sbt.plugins.JvmPlugin
override def trigger = allRequirements
object autoImport {
val sassEntryPoints = settingKey[PathFinder]("Paths to Sass files to be compiled")
val sassOptions = settingKey[Seq[String]]("Command line options for the sass command")
val sassWatcher = AssetsCompiler("sass",
{ file => (file ** "*.sass") +++ (file ** "*.scss") },
sassEntryPoints,
{ (name, min) =>
name
.replace(".sass", if (min) ".min.css" else ".css")
.replace(".scss", if (min) ".min.css" else ".css")
},
{ (file, options) => SassCompiler.compile(file, options) },
sassOptions
)
lazy val baseSassSettings: Seq[Def.Setting[_]] = Seq(
sassEntryPoints <<= (sourceDirectory in Compile)(base => ((base / "assets" ** "*.sass") +++ (base / "assets" ** "*.scss") --- base / "assets" ** "_*")),
sassOptions := Seq.empty[String],
resourceGenerators in Compile <+= sassWatcher
)
}
import autoImport._
override val projectSettings = baseSassSettings
}
|
jlitola/play-sass
|
src/main/scala/net/litola/SassPlugin.scala
|
Scala
|
mit
| 1,202 |
package gorillas.collection.immutable
import gorillas.collection.generic.KeyTransformation
import collection.{ SortedMap, IndexedSeqLike, GenTraversableOnce }
final class SortedArrayNavigableMap[K, V](protected[this] val sortedKeys: Array[K],
protected[this] val sortedValues: Array[V])(implicit val ordering: Ordering[K],
protected[this] val key2int: KeyTransformation[K],
protected[this] val keyManifest: ClassManifest[K],
protected[this] val valueManifest: ClassManifest[V])
extends NavigableMap[K, V] with SortedArrayMap[K, V] {
final def iterator: Iterator[(K, V)] = flatEntries
// -- Traversable/Iterable
override def foreach[U](f: ((K, V)) => U) {
var i = 0
while (i < sizeInt) {
while (i + 1 < sizeInt && sortedKeys(i) == sortedKeys(i + 1)) // Skip the duplicate keys
i += 1
f(sortedKeys(i) -> sortedValues(i))
i += 1
}
}
override def ++[V1 >: V: ClassManifest](xs: GenTraversableOnce[(K, V1)]): NavigableMap[K, V1] = {
val builder = newBuilder[V1]
if (xs.isInstanceOf[IndexedSeqLike[_, _]])
builder.sizeHint(xs.size + sortedKeys.length)
builder ++= (sortedKeys, sortedValues, 0, sortedKeys.length)
builder ++= xs
builder result ()
}
final def -(key: K) = {
if (!contains(key))
this
else {
val builder = newBuilder[V]
builder.sizeHint(sizeInt - 1)
var i = 0
while (i < sizeInt) {
val currentKey = sortedKeys(i)
if (currentKey != key) {
builder += ((currentKey, sortedValues(i)))
}
i += 1
}
builder result ()
}
}
/**
* @param from bottom key limit (inclusive.) None indicates no bound.
* @param until top key (exclusive.) None indicates no limit.
* @return a new navigable map with the given range.
*/
def rangeImpl(from: Option[K], until: Option[K]) = {
val lowerIndex = from match {
case None => 0
case Some(lower) =>
getClosestIndex(lower)
}
val higherIndex = until match {
case None => sizeInt
case Some(higher) =>
if (ordering.lt(highestKey, higher))
sizeInt
else {
var index = getClosestIndex(higher)
while (index >= 0 && ordering.gteq(sortedKeys(index), higher))
index -= 1
index + 1
}
}
if (lowerIndex == 0 && higherIndex == sizeInt)
this
else if (higherIndex < lowerIndex)
empty
else {
val builder = newBuilder[V]
builder ++= (sortedKeys.slice(lowerIndex, higherIndex), sortedValues.slice(lowerIndex, higherIndex))
builder result ()
}
}
// ------- Navigable Methods ------- //
/**
* Create NavigableMaps if V1's ClassManifest is available
* @param kv new key value pair
* @tparam V1 new value type
* @return a new map with the element added
*/
final def +[V1 >: V: ClassManifest](kv: (K, V1)): NavigableMap[K, V1] = {
var insertionIndex = getClosestIndex(kv._1)
while (insertionIndex < sizeInt && ordering.equiv(kv._1, sortedKeys(insertionIndex)))
insertionIndex += 1
val arrayInstanceOfV1 = sortedValues.asInstanceOf[Array[V1]]
val builder = newBuilder[V1]
builder.sizeHint(sizeInt + 1)
builder ++= (sortedKeys, arrayInstanceOfV1, 0, insertionIndex) += kv ++= (sortedKeys, arrayInstanceOfV1, insertionIndex, sizeInt - insertionIndex)
builder result ()
}
def +[V1 >: V](kv: (K, V1)): SortedMap[K, V1] = {
val builder = SortedMap.newBuilder[K, V1]
builder.sizeHint(sizeInt + 1)
builder ++= iterator += kv
builder result ()
}
final def get(key: K): Option[V] = { // "Final" might seem redundant but it is not (I checked with javap). "Final" allows extra optimization at the cost of inheritance..
val hintIdx = hintIndex(key2int.transform(key))
if (hintIdx < 0 || hintIdx >= sizeInt) // Out of range
None
else
binarySearch(key, hints(hintIdx), hints(hintIdx + 1)) // It turns out that inline parameters generate less bytecode
}
final override def contains(key: K) = get(key).isDefined
// ------- SortedMap and Map methods ------- //
override def size = sizeInt - duplicates
override def firstKey: K = lowestKey
override def lastKey: K = highestKey
}
|
rmleon/GorillasCollection
|
maps/src/main/scala/gorillas/collection/immutable/SortedArrayNavigableMap.scala
|
Scala
|
bsd-3-clause
| 4,279 |
/*
* Copyright 2015 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.cluster
import org.scalatest.{Matchers, FlatSpec}
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
import akka.actor._
import akka.routing.ActorSelectionRoutee
import akka.util.ByteString
import org.squbs.cluster.rebalance.{DataCenterAwareRebalanceLogic, CorrelateRoundRobinRoutingLogic, DefaultCorrelation}
class DataCenterAwarenessSpec extends FlatSpec with Matchers with MockitoSugar {
val myAddress = Address("akka.tcp", "pubsub", "10.100.194.253", 8080)
val correlates = Seq(Address("akka.tcp", "pubsub", "10.100.65.147", 8080),
Address("akka.tcp", "pubsub", "10.100.98.134", 8080))
val distances = Seq(Address("akka.tcp", "pubsub", "10.210.45.119", 8080),
Address("akka.tcp", "pubsub", "10.210.79.201", 8080))
"DefaultCorrelation" should "extract ipv4 subnet domain" in {
val mockAddress = Address("akka.tcp", "pubsub", "10.100.194.253", 8080)
DefaultCorrelation().common(mockAddress) should equal("[email protected]")
}
"CorrelateRoundRobinRoutingLogic" should "prefer routees that correlate with itself" in {
val routees = (correlates ++ distances).map(address => {
val mockActorSelection = mock[ActorSelection]
when(mockActorSelection.pathString).thenReturn(address.toString)
ActorSelectionRoutee(mockActorSelection)
}).toIndexedSeq
val logic = CorrelateRoundRobinRoutingLogic(myAddress)
logic.select("whatever", routees) match {
case ActorSelectionRoutee(selection) =>
selection.pathString should equal("akka.tcp://[email protected]:8080")
}
logic.select("whatever", routees) match {
case ActorSelectionRoutee(selection) =>
selection.pathString should equal("akka.tcp://[email protected]:8080")
}
logic.select("whatever", routees) match {
case ActorSelectionRoutee(selection) =>
selection.pathString should equal("akka.tcp://[email protected]:8080")
}
}
"DefaultDataCenterAwareRebalanceLogic" should "rebalance with correlations in considerations" in {
val partitionKey = ByteString("some partition")
val partitionsToMembers = Map(partitionKey -> Set.empty[Address])
def size(partitionKey:ByteString) = 2
var compensation = DataCenterAwareRebalanceLogic().compensate(partitionsToMembers, correlates ++ distances, size)
compensation.getOrElse(partitionKey, Set.empty) should equal(Set(correlates.head, distances.head))
val morePartition = ByteString("another partition")
compensation = DataCenterAwareRebalanceLogic().
compensate(compensation.updated(morePartition, Set.empty), correlates ++ distances, size)
compensation.getOrElse(partitionKey, Set.empty) should equal(Set(correlates.head, distances.head))
compensation.getOrElse(morePartition, Set.empty) should equal(Set(correlates.head, distances.head))
val balanced = DataCenterAwareRebalanceLogic().rebalance(compensation, (correlates ++ distances).toSet)
balanced.getOrElse(partitionKey, Set.empty) shouldNot equal(balanced.getOrElse(morePartition, Set.empty))
}
"DefaultDataCenterAwareRebalanceLogic" should "rebalance after a DC failure recovery" in {
val partitionKey = ByteString("some partition")
val partitionsToMembers = Map(partitionKey -> Set.empty[Address])
def size(partitionKey:ByteString) = 2
var compensation = DataCenterAwareRebalanceLogic().compensate(partitionsToMembers, correlates ++ distances, size)
compensation.getOrElse(partitionKey, Set.empty) should equal(Set(correlates.head, distances.head))
val balanced = DataCenterAwareRebalanceLogic().rebalance(compensation, (correlates ++ distances).toSet)
balanced.getOrElse(partitionKey, Set.empty) should have size 2
//unfortunately correlates are gone?!
compensation = DataCenterAwareRebalanceLogic().
compensate(partitionsToMembers.updated(partitionKey, Set(distances.head)), distances, size)
compensation.getOrElse(partitionKey, Set.empty) should equal(distances.toSet)
val rebalanced = DataCenterAwareRebalanceLogic().rebalance(compensation, distances.toSet)
rebalanced.getOrElse(partitionKey, Set.empty) should equal(distances.toSet)
val recovered = DataCenterAwareRebalanceLogic().rebalance(compensation, (correlates ++ distances).toSet)
recovered.getOrElse(partitionKey, Set.empty) should have size 2
recovered.getOrElse(partitionKey, Set.empty) shouldNot equal(distances.toSet)
correlates.contains(recovered.getOrElse(partitionKey, Set.empty).diff(distances.toSet).head) should equal(true)
}
}
|
keshin/squbs
|
squbs-zkcluster/src/test/scala/org/squbs/cluster/DataCenterAwarenessSpec.scala
|
Scala
|
apache-2.0
| 5,154 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen.agg.batch
import org.apache.flink.streaming.api.operators.OneInputStreamOperator
import org.apache.flink.table.data.binary.BinaryRowData
import org.apache.flink.table.data.{JoinedRowData, RowData}
import org.apache.flink.table.functions.AggregateFunction
import org.apache.flink.table.planner.codegen.OperatorCodeGenerator.generateCollect
import org.apache.flink.table.planner.codegen.{CodeGenUtils, CodeGeneratorContext, ProjectionCodeGenerator}
import org.apache.flink.table.planner.plan.utils.AggregateInfoList
import org.apache.flink.table.runtime.generated.GeneratedOperator
import org.apache.flink.table.runtime.operators.TableStreamOperator
import org.apache.flink.table.types.logical.RowType
import org.apache.calcite.tools.RelBuilder
/**
* Sort aggregation code generator to deal with all aggregate functions with keys.
* It require input in keys order.
*/
object SortAggCodeGenerator {
private[flink] def genWithKeys(
ctx: CodeGeneratorContext,
builder: RelBuilder,
aggInfoList: AggregateInfoList,
inputType: RowType,
outputType: RowType,
grouping: Array[Int],
auxGrouping: Array[Int],
isMerge: Boolean,
isFinal: Boolean)
: GeneratedOperator[OneInputStreamOperator[RowData, RowData]] = {
// prepare for aggregation
val aggInfos = aggInfoList.aggInfos
aggInfos
.map(_.function)
.filter(_.isInstanceOf[AggregateFunction[_, _]])
.map(ctx.addReusableFunction(_))
val functionIdentifiers = AggCodeGenHelper.getFunctionIdentifiers(aggInfos)
val aggBufferNames = AggCodeGenHelper.getAggBufferNames(auxGrouping, aggInfos)
val aggBufferTypes = AggCodeGenHelper.getAggBufferTypes(inputType, auxGrouping, aggInfos)
val inputTerm = CodeGenUtils.DEFAULT_INPUT1_TERM
val lastKeyTerm = "lastKey"
val currentKeyTerm = "currentKey"
val currentKeyWriterTerm = "currentKeyWriter"
val groupKeyRowType = AggCodeGenHelper.projectRowType(inputType, grouping)
val keyProjectionCode = ProjectionCodeGenerator.generateProjectionExpression(
ctx,
inputType,
groupKeyRowType,
grouping,
inputTerm = inputTerm,
outRecordTerm = currentKeyTerm,
outRecordWriterTerm = currentKeyWriterTerm).code
val keyNotEquals = AggCodeGenHelper.genGroupKeyChangedCheckCode(currentKeyTerm, lastKeyTerm)
val (initAggBufferCode, doAggregateCode, aggOutputExpr) = AggCodeGenHelper.genSortAggCodes(
isMerge,
isFinal,
ctx,
builder,
grouping,
auxGrouping,
aggInfos,
functionIdentifiers,
inputTerm,
inputType,
aggBufferNames,
aggBufferTypes,
outputType)
val joinedRow = "joinedRow"
ctx.addReusableOutputRecord(outputType, classOf[JoinedRowData], joinedRow)
val binaryRow = classOf[BinaryRowData].getName
ctx.addReusableMember(s"$binaryRow $lastKeyTerm = null;")
val processCode =
s"""
|hasInput = true;
|${ctx.reuseInputUnboxingCode(inputTerm)}
|
|// project key from input
|$keyProjectionCode
|if ($lastKeyTerm == null) {
| $lastKeyTerm = $currentKeyTerm.copy();
|
| // init agg buffer
| $initAggBufferCode
|} else if ($keyNotEquals) {
|
| // write output
| ${aggOutputExpr.code}
|
| ${generateCollect(s"$joinedRow.replace($lastKeyTerm, ${aggOutputExpr.resultTerm})")}
|
| $lastKeyTerm = $currentKeyTerm.copy();
|
| // init agg buffer
| $initAggBufferCode
|}
|
|// do doAggregateCode
|$doAggregateCode
|""".stripMargin.trim
val endInputCode =
s"""
|if (hasInput) {
| // write last output
| ${aggOutputExpr.code}
| ${generateCollect(s"$joinedRow.replace($lastKeyTerm, ${aggOutputExpr.resultTerm})")}
|}
""".stripMargin
val className = if (isFinal) "SortAggregateWithKeys" else "LocalSortAggregateWithKeys"
AggCodeGenHelper.generateOperator(
ctx,
className,
classOf[TableStreamOperator[RowData]].getCanonicalName,
processCode,
endInputCode,
inputType)
}
}
|
tzulitai/flink
|
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/batch/SortAggCodeGenerator.scala
|
Scala
|
apache-2.0
| 5,116 |
package net.koofr.driveby.resources
case class DirCreate(name: String, description: String)
object DirCreate {
import spray.json.DefaultJsonProtocol._
implicit val format = jsonFormat2(DirCreate.apply)
}
|
koofr/driveby
|
src/main/scala/net/koofr/driveby/resources/DirCreate.scala
|
Scala
|
mit
| 213 |
/*
* CurveCmdLine.scala
* (Mellite)
*
* Copyright (c) 2012-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.mellite.impl.objview
import de.sciss.proc.TimeRef
import de.sciss.synth.Curve
import org.rogach.scallop
import scala.util.Try
object CmdLineSupport {
private val curveNameMap: Map[String, Curve] = Map(
"step" -> Curve.step,
"lin" -> Curve.linear,
"linear" -> Curve.linear,
"exp" -> Curve.exponential,
"exponential" -> Curve.exponential,
"sin" -> Curve.sine,
"sine" -> Curve.sine,
"welch" -> Curve.welch,
"sqr" -> Curve.squared,
"squared" -> Curve.squared,
"cub" -> Curve.cubed,
"cubed" -> Curve.cubed
)
// private val durSpan = Span.From(0L)
// private val fmtDurTime = new TimeField.TimeFormat (durSpan, sampleRate = TimeRef.SampleRate)
// private val fmtDurFrames = new TimeField.FramesFormat(durSpan, sampleRate = TimeRef.SampleRate,
// viewSampleRate = TimeRef.SampleRate)
// private val fmtDurMilli = new TimeField.MilliFormat (durSpan, sampleRate = TimeRef.SampleRate)
def parseDuration(s: String): Option[Long] =
Try {
if (s.endsWith("ms")) {
val sec = s.substring(0, s.length - 2).trim.toDouble * 0.001
(sec * TimeRef.SampleRate + 0.5).toLong
} else if (s.contains(".") || s.contains(":")) {
val arr = s.split(':')
val sec0 = arr(arr.length - 1).toDouble
val min0 = if (arr.length <= 1) 0 else {
arr(arr.length - 2).toInt
}
val hour = if (arr.length <= 2) 0 else {
arr(arr.length - 3).toInt
}
val min1 = hour * 60 + min0
val sec1 = min1 * 60.0
val sec2 = sec0 + sec1
(sec2 * TimeRef.SampleRate + 0.5).toLong
} else { // frames
s.toLong
}
} .toOption
final case class Frames(value: Long)
implicit val ReadDuration: scallop.ValueConverter[Frames] = scallop.singleArgConverter { s =>
parseDuration(s) match {
case Some(n) => Frames(n)
case None => throw new IllegalArgumentException(s"Not a valid time format: $s")
}
}
implicit val ReadCurve: scallop.ValueConverter[Curve] = scallop.singleArgConverter { s =>
curveNameMap.getOrElse(s.toLowerCase, {
val p = s.toFloat
Curve.parametric(p)
})
}
}
|
Sciss/Mellite
|
app/src/main/scala/de/sciss/mellite/impl/objview/CmdLineSupport.scala
|
Scala
|
agpl-3.0
| 2,576 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.cassandra
import com.datastax.driver.core._
/**
* Define a Scala object hierarchy
* for input text parsing
*/
object TextBlockHierarchy {
sealed trait BlockType
object ParameterBlock extends BlockType
object StatementBlock extends BlockType
object DescribeBlock extends BlockType
object CommentBlock extends BlockType
abstract class AnyBlock(val blockType: BlockType) {
def get[U <: AnyBlock]: U = {
this.asInstanceOf[U]
}
}
case class Comment(text:String) extends AnyBlock(CommentBlock)
sealed trait ParameterType
object ConsistencyParam extends ParameterType
object SerialConsistencyParam extends ParameterType
object TimestampParam extends ParameterType
object RetryPolicyParam extends ParameterType
object FetchSizeParam extends ParameterType
abstract class QueryParameters(val paramType: ParameterType) extends AnyBlock(ParameterBlock) {
def getParam[U <: QueryParameters]: U = {
this.asInstanceOf[U]
}
}
case class Consistency(value: ConsistencyLevel) extends QueryParameters(ConsistencyParam)
case class SerialConsistency(value: ConsistencyLevel) extends QueryParameters(SerialConsistencyParam)
case class Timestamp(value: Long) extends QueryParameters(TimestampParam)
case class FetchSize(value: Int) extends QueryParameters(FetchSizeParam)
abstract class RetryPolicy extends QueryParameters(RetryPolicyParam)
object DefaultRetryPolicy extends RetryPolicy
object DowngradingRetryPolicy extends RetryPolicy
object FallThroughRetryPolicy extends RetryPolicy
object LoggingDefaultRetryPolicy extends RetryPolicy
object LoggingDowngradingRetryPolicy extends RetryPolicy
object LoggingFallThroughRetryPolicy extends RetryPolicy
sealed trait StatementType
object PrepareStatementType extends StatementType
object RemovePrepareStatementType extends StatementType
object BoundStatementType extends StatementType
object SimpleStatementType extends StatementType
object BatchStatementType extends StatementType
object DescribeClusterStatementType extends StatementType
object DescribeAllKeyspacesStatementType extends StatementType
object DescribeKeyspaceStatementType extends StatementType
object DescribeAllTablesStatementType extends StatementType
object DescribeTableStatementType extends StatementType
object DescribeTypeStatementType extends StatementType
object HelpStatementType extends StatementType
abstract class QueryStatement(val statementType: StatementType) extends AnyBlock(StatementBlock) {
def getStatement[U<: QueryStatement]: U = {
this.asInstanceOf[U]
}
}
case class SimpleStm(text:String) extends QueryStatement(SimpleStatementType)
case class PrepareStm(name: String, query:String) extends QueryStatement(PrepareStatementType)
case class RemovePrepareStm(name:String) extends QueryStatement(RemovePrepareStatementType)
case class BoundStm(name: String, values:String) extends QueryStatement(BoundStatementType)
case class BatchStm(batchType: BatchStatement.Type, statements: List[QueryStatement])
extends QueryStatement(BatchStatementType)
sealed trait DescribeCommandStatement {
val statement: String
}
class DescribeClusterCmd(override val statement: String = "DESCRIBE CLUSTER;")
extends QueryStatement(DescribeClusterStatementType) with DescribeCommandStatement
class DescribeKeyspacesCmd(override val statement: String = "DESCRIBE KEYSPACES;")
extends QueryStatement(DescribeAllKeyspacesStatementType) with DescribeCommandStatement
class DescribeTablesCmd(override val statement: String = "DESCRIBE TABLES;")
extends QueryStatement(DescribeAllTablesStatementType) with DescribeCommandStatement
case class DescribeKeyspaceCmd(keyspace: String) extends QueryStatement(DescribeKeyspaceStatementType)
with DescribeCommandStatement {
override val statement: String = s"DESCRIBE KEYSPACE $keyspace;"
}
case class DescribeTableCmd(keyspace:Option[String],table: String) extends QueryStatement(DescribeTableStatementType)
with DescribeCommandStatement {
override val statement: String = keyspace match {
case Some(ks) => s"DESCRIBE TABLE $ks.$table;"
case None => s"DESCRIBE TABLE $table;"
}
}
case class DescribeUDTCmd(keyspace:Option[String],udtName: String) extends QueryStatement(DescribeTypeStatementType)
with DescribeCommandStatement {
override val statement: String = keyspace match {
case Some(ks) => s"DESCRIBE TYPE $ks.$udtName;"
case None => s"DESCRIBE TYPE $udtName;"
}
}
class HelpCmd extends QueryStatement(HelpStatementType)
}
|
HeartSaVioR/incubator-zeppelin
|
cassandra/src/main/scala/org/apache/zeppelin/cassandra/TextBlockHierarchy.scala
|
Scala
|
apache-2.0
| 5,475 |
package lolchat
import cats.data.EitherT
import scala.concurrent.{ExecutionContext, Future}
package object data {
type AsyncResult[A] = EitherT[Future, Error, A]
type ExeCtx = ExecutionContext
}
|
Thangiee/League-of-Legend-Chat-Lib-Scala
|
core/src/main/scala/lolchat/data/package.scala
|
Scala
|
mit
| 202 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: [email protected]
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
*/
package org.openapitools.server.model
case class PipelineRunNodeedges(
id: Option[String],
`class`: Option[String]
)
|
cliffano/swaggy-jenkins
|
clients/scalatra/generated/src/main/scala/org/openapitools/server/model/PipelineRunNodeedges.scala
|
Scala
|
mit
| 438 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.examples
import java.nio.ByteBuffer
import java.util.SortedMap
import scala.collection.JavaConversions._
import org.apache.cassandra.db.IColumn
import org.apache.cassandra.hadoop.ColumnFamilyOutputFormat
import org.apache.cassandra.hadoop.ConfigHelper
import org.apache.cassandra.hadoop.ColumnFamilyInputFormat
import org.apache.cassandra.thrift._
import org.apache.cassandra.utils.ByteBufferUtil
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.SparkContext._
/*
* This example demonstrates using Spark with Cassandra with the New Hadoop API and Cassandra
* support for Hadoop.
*
* To run this example, run this file with the following command params -
* <cassandra_node> <cassandra_port>
*
* So if you want to run this on localhost this will be,
* localhost 9160
*
* The example makes some assumptions:
* 1. You have already created a keyspace called casDemo and it has a column family named Words
* 2. There are column family has a column named "para" which has test content.
*
* You can create the content by running the following script at the bottom of this file with
* cassandra-cli.
*
*/
object CassandraTest {
def main(args: Array[String]) {
val sparkConf = new SparkConf().setAppName("casDemo")
// Get a SparkContext
val sc = new SparkContext(sparkConf)
// Build the job configuration with ConfigHelper provided by Cassandra
val job = new Job()
job.setInputFormatClass(classOf[ColumnFamilyInputFormat])
val host: String = args(1)
val port: String = args(2)
ConfigHelper.setInputInitialAddress(job.getConfiguration(), host)
ConfigHelper.setInputRpcPort(job.getConfiguration(), port)
ConfigHelper.setOutputInitialAddress(job.getConfiguration(), host)
ConfigHelper.setOutputRpcPort(job.getConfiguration(), port)
ConfigHelper.setInputColumnFamily(job.getConfiguration(), "casDemo", "Words")
ConfigHelper.setOutputColumnFamily(job.getConfiguration(), "casDemo", "WordCount")
val predicate = new SlicePredicate()
val sliceRange = new SliceRange()
sliceRange.setStart(Array.empty[Byte])
sliceRange.setFinish(Array.empty[Byte])
predicate.setSlice_range(sliceRange)
ConfigHelper.setInputSlicePredicate(job.getConfiguration(), predicate)
ConfigHelper.setInputPartitioner(job.getConfiguration(), "Murmur3Partitioner")
ConfigHelper.setOutputPartitioner(job.getConfiguration(), "Murmur3Partitioner")
// Make a new Hadoop RDD
val casRdd = sc.newAPIHadoopRDD(
job.getConfiguration(),
classOf[ColumnFamilyInputFormat],
classOf[ByteBuffer],
classOf[SortedMap[ByteBuffer, IColumn]])
// Let us first get all the paragraphs from the retrieved rows
val paraRdd = casRdd.map {
case (key, value) => {
ByteBufferUtil.string(value.get(ByteBufferUtil.bytes("para")).value())
}
}
// Lets get the word count in paras
val counts = paraRdd.flatMap(p => p.split(" ")).map(word => (word, 1)).reduceByKey(_ + _)
counts.collect().foreach {
case (word, count) => println(word + ":" + count)
}
counts.map {
case (word, count) => {
val colWord = new org.apache.cassandra.thrift.Column()
colWord.setName(ByteBufferUtil.bytes("word"))
colWord.setValue(ByteBufferUtil.bytes(word))
colWord.setTimestamp(System.currentTimeMillis)
val colCount = new org.apache.cassandra.thrift.Column()
colCount.setName(ByteBufferUtil.bytes("wcount"))
colCount.setValue(ByteBufferUtil.bytes(count.toLong))
colCount.setTimestamp(System.currentTimeMillis)
val outputkey = ByteBufferUtil.bytes(word + "-COUNT-" + System.currentTimeMillis)
val mutations: java.util.List[Mutation] = new Mutation() :: new Mutation() :: Nil
mutations.get(0).setColumn_or_supercolumn(new ColumnOrSuperColumn())
mutations.get(0).column_or_supercolumn.setColumn(colWord)
mutations.get(1).setColumn_or_supercolumn(new ColumnOrSuperColumn())
mutations.get(1).column_or_supercolumn.setColumn(colCount)
(outputkey, mutations)
}
}.saveAsNewAPIHadoopFile("casDemo", classOf[ByteBuffer], classOf[List[Mutation]],
classOf[ColumnFamilyOutputFormat], job.getConfiguration)
sc.stop()
}
}
/*
create keyspace casDemo;
use casDemo;
create column family WordCount with comparator = UTF8Type;
update column family WordCount with column_metadata =
[{column_name: word, validation_class: UTF8Type},
{column_name: wcount, validation_class: LongType}];
create column family Words with comparator = UTF8Type;
update column family Words with column_metadata =
[{column_name: book, validation_class: UTF8Type},
{column_name: para, validation_class: UTF8Type}];
assume Words keys as utf8;
set Words['3musk001']['book'] = 'The Three Musketeers';
set Words['3musk001']['para'] = 'On the first Monday of the month of April, 1625, the market
town of Meung, in which the author of ROMANCE OF THE ROSE was born, appeared to
be in as perfect a state of revolution as if the Huguenots had just made
a second La Rochelle of it. Many citizens, seeing the women flying
toward the High Street, leaving their children crying at the open doors,
hastened to don the cuirass, and supporting their somewhat uncertain
courage with a musket or a partisan, directed their steps toward the
hostelry of the Jolly Miller, before which was gathered, increasing
every minute, a compact group, vociferous and full of curiosity.';
set Words['3musk002']['book'] = 'The Three Musketeers';
set Words['3musk002']['para'] = 'In those times panics were common, and few days passed without
some city or other registering in its archives an event of this kind. There were
nobles, who made war against each other; there was the king, who made
war against the cardinal; there was Spain, which made war against the
king. Then, in addition to these concealed or public, secret or open
wars, there were robbers, mendicants, Huguenots, wolves, and scoundrels,
who made war upon everybody. The citizens always took up arms readily
against thieves, wolves or scoundrels, often against nobles or
Huguenots, sometimes against the king, but never against cardinal or
Spain. It resulted, then, from this habit that on the said first Monday
of April, 1625, the citizens, on hearing the clamor, and seeing neither
the red-and-yellow standard nor the livery of the Duc de Richelieu,
rushed toward the hostel of the Jolly Miller. When arrived there, the
cause of the hubbub was apparent to all';
set Words['3musk003']['book'] = 'The Three Musketeers';
set Words['3musk003']['para'] = 'You ought, I say, then, to husband the means you have, however
large the sum may be; but you ought also to endeavor to perfect yourself in
the exercises becoming a gentleman. I will write a letter today to the
Director of the Royal Academy, and tomorrow he will admit you without
any expense to yourself. Do not refuse this little service. Our
best-born and richest gentlemen sometimes solicit it without being able
to obtain it. You will learn horsemanship, swordsmanship in all its
branches, and dancing. You will make some desirable acquaintances; and
from time to time you can call upon me, just to tell me how you are
getting on, and to say whether I can be of further service to you.';
set Words['thelostworld001']['book'] = 'The Lost World';
set Words['thelostworld001']['para'] = 'She sat with that proud, delicate profile of hers outlined
against the red curtain. How beautiful she was! And yet how aloof! We had been
friends, quite good friends; but never could I get beyond the same
comradeship which I might have established with one of my
fellow-reporters upon the Gazette,--perfectly frank, perfectly kindly,
and perfectly unsexual. My instincts are all against a woman being too
frank and at her ease with me. It is no compliment to a man. Where
the real sex feeling begins, timidity and distrust are its companions,
heritage from old wicked days when love and violence went often hand in
hand. The bent head, the averted eye, the faltering voice, the wincing
figure--these, and not the unshrinking gaze and frank reply, are the
true signals of passion. Even in my short life I had learned as much
as that--or had inherited it in that race memory which we call instinct.';
set Words['thelostworld002']['book'] = 'The Lost World';
set Words['thelostworld002']['para'] = 'I always liked McArdle, the crabbed, old, round-backed,
red-headed news editor, and I rather hoped that he liked me. Of course, Beaumont was
the real boss; but he lived in the rarefied atmosphere of some Olympian
height from which he could distinguish nothing smaller than an
international crisis or a split in the Cabinet. Sometimes we saw him
passing in lonely majesty to his inner sanctum, with his eyes staring
vaguely and his mind hovering over the Balkans or the Persian Gulf. He
was above and beyond us. But McArdle was his first lieutenant, and it
was he that we knew. The old man nodded as I entered the room, and he
pushed his spectacles far up on his bald forehead.';
*/
|
hengyicai/OnlineAggregationUCAS
|
examples/src/main/scala/org/apache/spark/examples/CassandraTest.scala
|
Scala
|
apache-2.0
| 10,089 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.collection
import java.io.Closeable
import org.geotools.data.FeatureReader
import org.geotools.data.simple.{SimpleFeatureCollection, SimpleFeatureIterator}
import org.locationtech.geomesa.utils.collection.CloseableIterator.CloseableIteratorImpl
import org.opengis.feature.Feature
import org.opengis.feature.`type`.FeatureType
import org.opengis.feature.simple.SimpleFeature
import scala.collection.Iterator
// By 'self-closing', we mean that the iterator will automatically call close once it is completely exhausted.
trait SelfClosingIterator[+A] extends CloseableIterator[A] {
abstract override def hasNext: Boolean = {
val res = super.hasNext
if (!res) {
close()
}
res
}
}
object SelfClosingIterator {
def apply[A](iter: Iterator[A], closeIter: => Unit = {}): SelfClosingIterator[A] =
new CloseableIteratorImpl(iter, closeIter) with SelfClosingIterator[A]
def apply[A](iter: Iterator[A] with Closeable): SelfClosingIterator[A] = apply(iter, iter.close())
def apply[A](iter: CloseableIterator[A]): SelfClosingIterator[A] = apply(iter, iter.close())
def apply[A <: Feature, B <: FeatureType](fr: FeatureReader[B, A]): SelfClosingIterator[A] =
apply(CloseableIterator(fr))
def apply(iter: SimpleFeatureIterator): SelfClosingIterator[SimpleFeature] = apply(CloseableIterator(iter))
def apply(c: SimpleFeatureCollection): SelfClosingIterator[SimpleFeature] = apply(c.features)
}
|
aheyne/geomesa
|
geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/collection/SelfClosingIterator.scala
|
Scala
|
apache-2.0
| 1,938 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.expressions
import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.sql.Column
import org.apache.spark.sql.catalyst.ScalaReflection
import org.apache.spark.sql.catalyst.expressions.ScalaUDF
import org.apache.spark.sql.types.DataType
/**
* A user-defined function. To create one, use the `udf` functions in `functions`.
*
* As an example:
* {{{
* // Define a UDF that returns true or false based on some numeric score.
* val predict = udf((score: Double) => score > 0.5)
*
* // Projects a column that adds a prediction column based on the score column.
* df.select( predict(df("score")) )
* }}}
*
* @since 1.3.0
*/
@InterfaceStability.Stable
case class UserDefinedFunction protected[sql] (
f: AnyRef,
dataType: DataType,
inputTypes: Option[Seq[DataType]]) {
private var _nameOption: Option[String] = None
private var _nullable: Boolean = true
private var _deterministic: Boolean = true
// This is a `var` instead of in the constructor for backward compatibility of this case class.
// TODO: revisit this case class in Spark 3.0, and narrow down the public surface.
private[sql] var nullableTypes: Option[Seq[Boolean]] = None
/**
* Returns true when the UDF can return a nullable value.
*
* @since 2.3.0
*/
def nullable: Boolean = _nullable
/**
* Returns true iff the UDF is deterministic, i.e. the UDF produces the same output given the same
* input.
*
* @since 2.3.0
*/
def deterministic: Boolean = _deterministic
/**
* Returns an expression that invokes the UDF, using the given arguments.
*
* @since 1.3.0
*/
@scala.annotation.varargs
def apply(exprs: Column*): Column = {
// TODO: make sure this class is only instantiated through `SparkUserDefinedFunction.create()`
// and `nullableTypes` is always set.
if (nullableTypes.isEmpty) {
nullableTypes = Some(ScalaReflection.getParameterTypeNullability(f))
}
if (inputTypes.isDefined) {
assert(inputTypes.get.length == nullableTypes.get.length)
}
Column(ScalaUDF(
f,
dataType,
exprs.map(_.expr),
nullableTypes.get,
inputTypes.getOrElse(Nil),
udfName = _nameOption,
nullable = _nullable,
udfDeterministic = _deterministic))
}
private def copyAll(): UserDefinedFunction = {
val udf = copy()
udf._nameOption = _nameOption
udf._nullable = _nullable
udf._deterministic = _deterministic
udf.nullableTypes = nullableTypes
udf
}
/**
* Updates UserDefinedFunction with a given name.
*
* @since 2.3.0
*/
def withName(name: String): UserDefinedFunction = {
val udf = copyAll()
udf._nameOption = Option(name)
udf
}
/**
* Updates UserDefinedFunction to non-nullable.
*
* @since 2.3.0
*/
def asNonNullable(): UserDefinedFunction = {
if (!nullable) {
this
} else {
val udf = copyAll()
udf._nullable = false
udf
}
}
/**
* Updates UserDefinedFunction to nondeterministic.
*
* @since 2.3.0
*/
def asNondeterministic(): UserDefinedFunction = {
if (!_deterministic) {
this
} else {
val udf = copyAll()
udf._deterministic = false
udf
}
}
}
// We have to use a name different than `UserDefinedFunction` here, to avoid breaking the binary
// compatibility of the auto-generate UserDefinedFunction object.
private[sql] object SparkUserDefinedFunction {
def create(
f: AnyRef,
dataType: DataType,
inputSchemas: Seq[Option[ScalaReflection.Schema]]): UserDefinedFunction = {
val inputTypes = if (inputSchemas.contains(None)) {
None
} else {
Some(inputSchemas.map(_.get.dataType))
}
val udf = new UserDefinedFunction(f, dataType, inputTypes)
udf.nullableTypes = Some(inputSchemas.map(_.map(_.nullable).getOrElse(true)))
udf
}
}
|
ahnqirage/spark
|
sql/core/src/main/scala/org/apache/spark/sql/expressions/UserDefinedFunction.scala
|
Scala
|
apache-2.0
| 4,728 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package base
import com.intellij.openapi.util.TextRange
import com.intellij.psi.tree.IElementType
import com.intellij.psi.{PsiAnnotationOwner, PsiElement, PsiLanguageInjectionHost, PsiLiteral}
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes._
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.types.ScType
/**
* @author Alexander Podkhalyuzin
* Date: 22.02.2008
*/
trait ScLiteral extends ScExpression with PsiLiteral with PsiLanguageInjectionHost {
/**
* This method works only for null literal (to avoid possibly dangerous usage)
* @param tp type, which should be returned by method getTypeWithouImplicits
*/
def setTypeForNullWithoutImplicits(tp: Option[ScType])
def getTypeForNullWithoutImplicits: Option[ScType]
def isString: Boolean
def isMultiLineString: Boolean
def getAnnotationOwner(annotationOwnerLookUp: ScLiteral => Option[PsiAnnotationOwner with PsiElement]): Option[PsiAnnotationOwner]
def isSymbol: Boolean
def isChar: Boolean
def contentRange: TextRange
}
object ScLiteral {
def unapply(literal: ScLiteral) = Some(literal.getValue)
}
class ScLiteralValueExtractor[T](literalTypes: IElementType*)(f: AnyRef => T) {
private val types = literalTypes.toSet
def unapply(literal: ScLiteral): Option[T] = {
val literalType = literal.getFirstChild.getNode.getElementType
if (types.contains(literalType)) Some(f(literal.getValue)) else None
}
}
object ScIntLiteral extends ScLiteralValueExtractor(tINTEGER)(_.asInstanceOf[java.lang.Integer].intValue)
object ScFloatLiteral extends ScLiteralValueExtractor(tFLOAT)(_.asInstanceOf[java.lang.Float].floatValue)
object ScCharLiteral extends ScLiteralValueExtractor(tCHAR)(_.asInstanceOf[java.lang.Character].charValue)
object ScBooleanLiteral extends ScLiteralValueExtractor(kTRUE, kFALSE)(_.asInstanceOf[java.lang.Boolean].booleanValue)
object ScStringLiteral extends ScLiteralValueExtractor(tSTRING, tWRONG_STRING, tMULTILINE_STRING)(_.asInstanceOf[String])
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/api/base/ScLiteral.scala
|
Scala
|
apache-2.0
| 2,102 |
import java.util.UUID
import play.api.data.FormError
import play.api.data.format.{Formats, Formatter}
import play.api.mvc._
/**
* @author [email protected]
*/
package object controllers {
implicit class RequestWithPreviousURI(req: RequestHeader) {
def previousURI(implicit req: Request[AnyContent]): Option[String] = {
req.body.asFormUrlEncoded.flatMap(_.get("previous_uri").flatMap(_.headOption))
}
}
def RedirectToPreviousURI(implicit req: Request[AnyContent]): Option[Result] = {
req.previousURI.map(Results.Redirect(_))
}
implicit def uuidFormat: Formatter[UUID] = new Formatter[UUID] {
def bind(key: String, data: Map[String, String]) = {
Formats.stringFormat.bind(key, data).right.flatMap { s =>
scala.util.control.Exception.allCatch[UUID]
.either(UUID.fromString(s))
.left.map(e => Seq(FormError(key, "error.uuid", Nil)))
}
}
def unbind(key: String, value: UUID) = Map(key -> value.toString)
}
}
|
lizepeng/app.io
|
app/controllers/package.scala
|
Scala
|
apache-2.0
| 995 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.kernel.protocol.v5.kernel
import akka.actor.{ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit, TestProbe}
import org.apache.toree.kernel.protocol.v5.{MessageType, SocketType}
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSpecLike, Matchers}
import test.utils.TestProbeProxyActor
import test.utils.MaxAkkaTestTimeout
class ActorLoaderSpec extends TestKit(
ActorSystem(
"ActorLoaderSpecSystem",
None,
Some(org.apache.toree.Main.getClass.getClassLoader)
))
with ImplicitSender with FunSpecLike with Matchers with MockitoSugar {
describe("ActorLoader"){
describe("#load( MessageType )"){
it("should load an ActorSelection that has been loaded into the system"){
val testProbe: TestProbe = TestProbe()
system.actorOf(Props(classOf[TestProbeProxyActor], testProbe),
MessageType.Outgoing.ClearOutput.toString)
val actorLoader: ActorLoader = SimpleActorLoader(system)
actorLoader.load(MessageType.Outgoing.ClearOutput) ! "<Test Message>"
testProbe.expectMsg("<Test Message>")
}
it("should expect no message when there is no actor"){
val testProbe: TestProbe = TestProbe()
val actorLoader: ActorLoader = SimpleActorLoader(system)
actorLoader.load(MessageType.Outgoing.CompleteReply) ! "<Test Message>"
testProbe.expectNoMsg(MaxAkkaTestTimeout)
// This is to test to see if there the messages go to the actor inbox or the dead mail inbox
system.actorOf(Props(classOf[TestProbeProxyActor], testProbe),
MessageType.Outgoing.CompleteReply.toString)
testProbe.expectNoMsg(MaxAkkaTestTimeout)
}
}
describe("#load( SocketType )"){
it("should load an ActorSelection that has been loaded into the system"){
val testProbe: TestProbe = TestProbe()
system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), SocketType.Shell.toString)
val actorLoader: ActorLoader = SimpleActorLoader(system)
actorLoader.load(SocketType.Shell) ! "<Test Message>"
testProbe.expectMsg("<Test Message>")
}
it("should expect no message when there is no actor"){
val testProbe: TestProbe = TestProbe()
val actorLoader: ActorLoader = SimpleActorLoader(system)
actorLoader.load(SocketType.IOPub) ! "<Test Message>"
testProbe.expectNoMsg(MaxAkkaTestTimeout)
// This is to test to see if there the messages go to the actor inbox or the dead mail inbox
system.actorOf(Props(classOf[TestProbeProxyActor], testProbe), SocketType.IOPub.toString)
testProbe.expectNoMsg(MaxAkkaTestTimeout)
}
}
}
}
|
hmost1/incubator-toree
|
kernel/src/test/scala/org/apache/toree/kernel/protocol/v5/kernel/ActorLoaderSpec.scala
|
Scala
|
apache-2.0
| 3,523 |
package com.rocketfuel.sdbc.postgresql
import com.rocketfuel.sdbc.base.jdbc.statement.{DateParameter, ParameterValue}
import java.time.LocalTime
import java.time.format.DateTimeFormatter
import org.postgresql.util.PGobject
/**
* This gives us better precision than the JDBC time type.
*
* @param localTime
*/
private class PGLocalTime(
var localTime: Option[LocalTime]
) extends PGobject() {
def this() {
this(None)
}
setType("time")
override def getValue: String = {
localTime.map(DateTimeFormatter.ISO_LOCAL_TIME.format).orNull
}
override def setValue(value: String): Unit = {
this.localTime = for {
reallyValue <- Option(value)
} yield {
PGLocalTime.parse(reallyValue)
}
}
}
private object PGLocalTime {
def apply(value: String): PGLocalTime = {
val t = new PGLocalTime()
t.setValue(value)
t
}
implicit def apply(l: LocalTime): PGLocalTime = {
new PGLocalTime(Some(l))
}
def parse(value: String): LocalTime = {
val parsed = DateTimeFormatter.ISO_LOCAL_TIME.parse(value)
LocalTime.from(parsed)
}
}
|
rocketfuel/sdbc
|
postgresql/src/main/scala/com/rocketfuel/sdbc/postgresql/PGLocalTime.scala
|
Scala
|
bsd-3-clause
| 1,100 |
package org.locationtech.geomesa.core.util
import java.util.concurrent.TimeUnit
import com.typesafe.scalalogging.slf4j.Logging
import org.apache.accumulo.core.client.BatchWriterConfig
import scala.util.Try
object GeoMesaBatchWriterConfig extends Logging {
val WRITER_LATENCY_SECONDS = "geomesa.batchwriter.latency.seconds" // Measured in seconds
val WRITER_LATENCY_MILLIS = "geomesa.batchwriter.latency.millis" // Measured in millis
val WRITER_MEMORY = "geomesa.batchwriter.memory" // Measured in bytes
val WRITER_THREADS = "geomesa.batchwriter.maxthreads"
val WRITE_TIMEOUT = "geomesa.batchwriter.timeout.seconds" // Timeout measured in seconds. Likely unnecessary.
val DEFAULT_LATENCY = 10000l // 10 seconds
val DEFAULT_MAX_MEMORY = 1000000l // 1 megabyte
val DEFAULT_THREADS = 10
protected [util] def fetchProperty(prop: String): Option[Long] =
for {
p <- Option(System.getProperty(prop))
num <- Try(java.lang.Long.parseLong(p)).toOption
} yield num
protected [util] def buildBWC: BatchWriterConfig = {
val bwc = new BatchWriterConfig
fetchProperty(WRITER_LATENCY_SECONDS) match {
case Some(latency) =>
logger.trace(s"GeoMesaBatchWriter config: maxLatency set to $latency seconds.")
bwc.setMaxLatency(latency, TimeUnit.SECONDS)
case None =>
val milliLatency = fetchProperty(WRITER_LATENCY_MILLIS).getOrElse(DEFAULT_LATENCY)
logger.trace(s"GeoMesaBatchWriter config: maxLatency set to $milliLatency milliseconds.")
bwc.setMaxLatency(milliLatency, TimeUnit.MILLISECONDS)
}
// TODO: Allow users to specify member with syntax like 100M or 50k.
// https://geomesa.atlassian.net/browse/GEOMESA-735
val memory = fetchProperty(WRITER_MEMORY).getOrElse(DEFAULT_MAX_MEMORY)
logger.trace(s"GeoMesaBatchWriter config: maxMemory set to $memory bytes.")
bwc.setMaxMemory(memory)
val threads = fetchProperty(WRITER_THREADS).map(_.toInt).getOrElse(DEFAULT_THREADS)
logger.trace(s"GeoMesaBatchWriter config: maxWriteThreads set to $threads.")
bwc.setMaxWriteThreads(threads.toInt)
fetchProperty(WRITE_TIMEOUT).foreach { timeout =>
logger.trace(s"GeoMesaBatchWriter config: maxTimeout set to $timeout seconds.")
bwc.setTimeout(timeout, TimeUnit.SECONDS)
}
bwc
}
def apply(): BatchWriterConfig = buildBWC
}
|
mmatz-ccri/geomesa
|
geomesa-core/src/main/scala/org/locationtech/geomesa/core/util/GeoMesaBatchWriterConfig.scala
|
Scala
|
apache-2.0
| 2,416 |
package org.jetbrains.plugins.scala.testingSupport.scalatest.staticStringTest
import org.jetbrains.plugins.scala.testingSupport.scalatest.ScalaTestTestCase
/**
* @author Roman.Shein
* @since 24.06.2015.
*/
trait FeatureSpecStaticStringTest extends ScalaTestTestCase {
val featureSpecClassName = "FeatureSpecStringTest"
val featureSpecFileName = featureSpecClassName + ".scala"
def addFeatureSpec() {
addFileToProject(featureSpecFileName,
"""
|import org.scalatest._
|
|class FeatureSpecStringTest extends FeatureSpec {
| val b = " B"
| val c = "C"
| feature("Feature" + " 1") {
| scenario("Scenario A") {
| }
| scenario("Scenario" + b) {
| }
| }
|
| feature(c) {
| scenario("Scenario C" + System.currentTimeMillis()) {
| }
| scenario("other") {}
| }
|
| feature("invalid") {
| scenario("Failed " + System.currentTimeMillis()) {}
| }
|}
""".stripMargin.trim()
)
}
def testFeatureSpecSumString() = {
addFeatureSpec()
assert(checkConfigAndSettings(createTestFromLocation(6, 7, featureSpecFileName), featureSpecClassName,
"Feature: Feature 1 Scenario: Scenario A"))
}
def testFeatureSpecValSumString() = {
addFeatureSpec()
assert(checkConfigAndSettings(createTestFromLocation(8, 7, featureSpecFileName), featureSpecClassName,
"Feature: Feature 1 Scenario: Scenario B"))
}
def testFeatureSpecValString() = {
addFeatureSpec()
assert(checkConfigAndSettings(createTestFromLocation(15, 7, featureSpecFileName), featureSpecClassName,
"Feature: C Scenario: other"))
}
def testFeatureSpecNonConst() = {
addFeatureSpec()
assert(checkConfigAndSettings(createTestFromLocation(13, 7, featureSpecFileName), featureSpecClassName,
"Feature: C Scenario: other"))
assert(checkConfigAndSettings(createTestFromLocation(19, 7, featureSpecFileName), featureSpecClassName))
}
}
|
double-y/translation-idea-plugin
|
test/org/jetbrains/plugins/scala/testingSupport/scalatest/staticStringTest/FeatureSpecStaticStringTest.scala
|
Scala
|
apache-2.0
| 2,064 |
package com.dwolla.cloudflare.domain.model.logpush
import java.time.Instant
import com.dwolla.cloudflare.domain.model.{LogpullOptions, LogpushDestination, LogpushId}
case class LogpushJob(
id: LogpushId,
enabled: Boolean,
name: Option[String],
logpullOptions: Option[LogpullOptions],
destinationConf: LogpushDestination,
lastComplete: Option[Instant],
lastError: Option[Instant],
errorMessage: Option[String]
)
case class CreateJob(
destinationConf: LogpushDestination,
ownershipChallenge: String,
name: Option[String],
enabled: Option[Boolean],
logpullOptions: Option[LogpullOptions]
)
|
Dwolla/scala-cloudflare
|
client/src/main/scala/com/dwolla/cloudflare/domain/model/logpush/LogpushJob.scala
|
Scala
|
mit
| 617 |
package module
import integration._
import skinny.SkinnyConfig
class IntegrationsModule extends scaldi.Module {
private val DEFAULT_SERVICE = "Null"
val service = SkinnyConfig.stringConfigValue("externalIntegration.service").map { configValue =>
configValue.capitalize
} getOrElse DEFAULT_SERVICE
import scala.reflect.runtime.universe
val runtimeMirror = universe.runtimeMirror(getClass.getClassLoader)
val module = runtimeMirror.staticModule(s"integration.${service}Integration")
val obj = runtimeMirror.reflectModule(module)
val integration = obj.instance
bind[ExternalServiceIntegration] to integration.asInstanceOf[ExternalServiceIntegration]
}
|
atware/sharedocs
|
src/main/scala/module/IntegrationsModule.scala
|
Scala
|
mit
| 676 |
package mesosphere.marathon
import java.util.concurrent.TimeoutException
import akka.actor.{ ActorRef, ActorSystem, Props }
import akka.event.EventStream
import akka.testkit._
import akka.util.Timeout
import mesosphere.marathon.MarathonSchedulerActor._
import mesosphere.marathon.Protos.MarathonTask
import mesosphere.marathon.api.LeaderInfo
import mesosphere.marathon.event._
import mesosphere.marathon.health.HealthCheckManager
import mesosphere.marathon.io.storage.StorageProvider
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state._
import mesosphere.marathon.tasks.{ OfferReviver, TaskIdUtil, TaskQueue, TaskTracker }
import mesosphere.marathon.upgrade.{ DeploymentManager, DeploymentPlan, DeploymentStep, StopApplication }
import mesosphere.mesos.protos.Implicits._
import mesosphere.mesos.protos.TaskID
import mesosphere.util.state.FrameworkIdUtil
import org.apache.mesos.Protos.Status
import org.apache.mesos.SchedulerDriver
import org.mockito.Matchers.any
import org.mockito.Mockito._
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.scalatest.{ BeforeAndAfterAll, Matchers }
import scala.collection.immutable.{ Seq, Set }
import scala.collection.mutable
import scala.concurrent.Future
import scala.concurrent.duration._
class MarathonSchedulerActorTest extends TestKit(ActorSystem("System"))
with MarathonSpec
with BeforeAndAfterAll
with Matchers
with ImplicitSender {
var repo: AppRepository = _
var groupRepo: GroupRepository = _
var deploymentRepo: DeploymentRepository = _
var hcManager: HealthCheckManager = _
var tracker: TaskTracker = _
var queue: TaskQueue = _
var frameworkIdUtil: FrameworkIdUtil = _
var driver: SchedulerDriver = _
var holder: MarathonSchedulerDriverHolder = _
var taskIdUtil: TaskIdUtil = _
var storage: StorageProvider = _
var taskFailureEventRepository: TaskFailureRepository = _
var leaderInfo: LeaderInfo = _
var schedulerActions: ActorRef => SchedulerActions = _
var deploymentManagerProps: SchedulerActions => Props = _
var historyActorProps: Props = _
implicit val defaultTimeout: Timeout = 5.seconds
before {
driver = mock[SchedulerDriver]
holder = new MarathonSchedulerDriverHolder
holder.driver = Some(driver)
repo = mock[AppRepository]
groupRepo = mock[GroupRepository]
deploymentRepo = mock[DeploymentRepository]
hcManager = mock[HealthCheckManager]
tracker = mock[TaskTracker]
queue = spy(new TaskQueue(offerReviver = mock[OfferReviver], conf = MarathonTestHelper.defaultConfig()))
frameworkIdUtil = mock[FrameworkIdUtil]
taskIdUtil = new TaskIdUtil
storage = mock[StorageProvider]
taskFailureEventRepository = mock[TaskFailureRepository]
leaderInfo = mock[LeaderInfo]
deploymentManagerProps = schedulerActions => Props(new DeploymentManager(
repo,
tracker,
queue,
schedulerActions,
storage,
hcManager,
system.eventStream
))
historyActorProps = Props(new HistoryActor(system.eventStream, taskFailureEventRepository))
schedulerActions = ref => new SchedulerActions(
repo, groupRepo, hcManager, tracker, queue, new EventStream(), ref, mock[MarathonConf])(system.dispatcher)
when(deploymentRepo.store(any())).thenAnswer(new Answer[Future[DeploymentPlan]] {
override def answer(p1: InvocationOnMock): Future[DeploymentPlan] = {
Future.successful(p1.getArguments()(0).asInstanceOf[DeploymentPlan])
}
})
when(deploymentRepo.expunge(any())).thenReturn(Future.successful(Seq(true)))
when(deploymentRepo.all()).thenReturn(Future.successful(Nil))
when(repo.apps()).thenReturn(Future.successful(Nil))
when(groupRepo.rootGroup()).thenReturn(Future.successful(None))
}
def createActor() = {
system.actorOf(
MarathonSchedulerActor.props(
schedulerActions,
deploymentManagerProps,
historyActorProps,
repo,
deploymentRepo,
hcManager,
tracker,
queue,
holder,
leaderInfo,
system.eventStream
)
)
}
def stopActor(ref: ActorRef): Unit = {
watch(ref)
system.stop(ref)
expectTerminated(ref)
}
override def afterAll(): Unit = {
system.shutdown()
}
test("RecoversDeploymentsAndReconcilesHealthChecksOnStart") {
val app = AppDefinition(id = "test-app".toPath, instances = 1)
when(groupRepo.rootGroup()).thenReturn(Future.successful(Some(Group.apply(PathId.empty, apps = Set(app)))))
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
awaitAssert({
verify(hcManager).reconcileWith(app.id)
}, 5.seconds, 10.millis)
verify(deploymentRepo, times(1)).all()
}
finally {
stopActor(schedulerActor)
}
}
test("ReconcileTasks") {
val app = AppDefinition(id = "test-app".toPath, instances = 1)
val tasks = Set(MarathonTask.newBuilder().setId("task_a").build())
when(repo.allPathIds()).thenReturn(Future.successful(Seq(app.id)))
when(tracker.get(app.id)).thenReturn(Set.empty[MarathonTask])
when(tracker.list).thenReturn(
mutable.HashMap(
PathId("nope") -> new TaskTracker.App(
"nope".toPath,
tasks,
false)))
when(tracker.get("nope".toPath)).thenReturn(tasks)
when(repo.currentVersion(app.id)).thenReturn(Future.successful(Some(app)))
when(tracker.count(app.id)).thenReturn(0)
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! ReconcileTasks
expectMsg(5.seconds, TasksReconciled)
awaitAssert({
verify(tracker).shutdown("nope".toPath)
verify(driver).killTask(TaskID("task_a"))
}, 5.seconds, 10.millis)
}
finally {
stopActor(schedulerActor)
}
}
test("ScaleApps") {
val app = AppDefinition(id = "test-app".toPath, instances = 1)
val tasks = Set(MarathonTask.newBuilder().setId("task_a").build())
when(repo.allPathIds()).thenReturn(Future.successful(Seq(app.id)))
when(tracker.get(app.id)).thenReturn(Set.empty[MarathonTask])
when(tracker.list).thenReturn(
mutable.HashMap(
PathId("nope") -> new TaskTracker.App(
"nope".toPath,
tasks,
false)))
when(tracker.get("nope".toPath)).thenReturn(tasks)
when(repo.currentVersion(app.id)).thenReturn(Future.successful(Some(app)))
when(tracker.count(app.id)).thenReturn(0)
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! ScaleApps
awaitAssert({
verify(queue).add(app, 1)
}, 5.seconds, 10.millis)
}
finally {
stopActor(schedulerActor)
}
}
test("ScaleApp") {
val app = AppDefinition(id = "test-app".toPath, instances = 1)
when(repo.allIds()).thenReturn(Future.successful(Seq(app.id.toString)))
when(tracker.get(app.id)).thenReturn(Set.empty[MarathonTask])
when(repo.currentVersion(app.id)).thenReturn(Future.successful(Some(app)))
when(tracker.count(app.id)).thenReturn(0)
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! ScaleApp("test-app".toPath)
awaitAssert({
verify(queue).add(app, 1)
}, 5.seconds, 10.millis)
expectMsg(5.seconds, AppScaled(app.id))
}
finally {
stopActor(schedulerActor)
}
}
test("Kill tasks with scaling") {
val app = AppDefinition(id = "test-app".toPath, instances = 1)
val taskA = MarathonTask.newBuilder().setId("taskA_id").build()
when(repo.allIds()).thenReturn(Future.successful(Seq(app.id.toString)))
when(tracker.get(app.id)).thenReturn(Set[MarathonTask](taskA))
when(tracker.fetchTask(app.id, taskA.getId))
.thenReturn(Some(taskA))
.thenReturn(None)
when(repo.currentVersion(app.id))
.thenReturn(Future.successful(Some(app)))
.thenReturn(Future.successful(Some(app.copy(instances = 0))))
when(tracker.count(app.id)).thenReturn(0)
when(repo.store(any())).thenReturn(Future.successful(app))
val statusUpdateEvent = MesosStatusUpdateEvent("", taskA.getId, "TASK_FAILED", "", app.id, "", Nil, app.version.toString)
when(driver.killTask(TaskID(taskA.getId))).thenAnswer(new Answer[Status] {
def answer(invocation: InvocationOnMock): Status = {
system.eventStream.publish(statusUpdateEvent)
Status.DRIVER_RUNNING
}
})
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! KillTasks(app.id, Set(taskA.getId))
expectMsg(5.seconds, TasksKilled(app.id, Set(taskA.getId)))
val Some(taskFailureEvent) = TaskFailure.FromMesosStatusUpdateEvent(statusUpdateEvent)
awaitAssert {
verify(taskFailureEventRepository, times(1)).store(app.id, taskFailureEvent)
}
// KillTasks does no longer scale
verify(repo, times(0)).store(any[AppDefinition]())
}
finally {
stopActor(schedulerActor)
}
}
test("Kill tasks") {
val app = AppDefinition(id = "test-app".toPath, instances = 1)
val taskA = MarathonTask.newBuilder().setId("taskA_id").build()
when(repo.allIds()).thenReturn(Future.successful(Seq(app.id.toString)))
when(tracker.get(app.id)).thenReturn(Set[MarathonTask](taskA))
when(tracker.fetchTask(app.id, taskA.getId))
.thenReturn(Some(taskA))
.thenReturn(None)
when(repo.currentVersion(app.id))
.thenReturn(Future.successful(Some(app)))
.thenReturn(Future.successful(Some(app.copy(instances = 0))))
when(tracker.count(app.id)).thenReturn(0)
when(repo.store(any())).thenReturn(Future.successful(app))
val statusUpdateEvent = MesosStatusUpdateEvent("", taskA.getId, "TASK_KILLED", "", app.id, "", Nil, app.version.toString)
when(driver.killTask(TaskID(taskA.getId))).thenAnswer(new Answer[Status] {
def answer(invocation: InvocationOnMock): Status = {
system.eventStream.publish(statusUpdateEvent)
Status.DRIVER_RUNNING
}
})
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! KillTasks(app.id, Set(taskA.getId))
expectMsg(5.seconds, TasksKilled(app.id, Set(taskA.getId)))
awaitAssert(verify(queue).add(app, 1))
}
finally {
stopActor(schedulerActor)
}
}
test("Deployment") {
val probe = TestProbe()
val app = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 2, upgradeStrategy = UpgradeStrategy(0.5), version = Timestamp(0))
val origGroup = Group(PathId("/foo/bar"), Set(app))
val appNew = app.copy(cmd = Some("cmd new"), version = Timestamp(1000))
val targetGroup = Group(PathId("/foo/bar"), Set(appNew))
val plan = DeploymentPlan("foo", origGroup, targetGroup, Nil, Timestamp.now())
system.eventStream.subscribe(probe.ref, classOf[UpgradeEvent])
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! Deploy(plan)
expectMsg(DeploymentStarted(plan))
val answer = probe.expectMsgType[DeploymentSuccess]
answer.id should be(plan.id)
system.eventStream.unsubscribe(probe.ref)
}
finally {
stopActor(schedulerActor)
}
}
test("Deployment resets rate limiter for affected apps") {
val probe = TestProbe()
val app = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 2, upgradeStrategy = UpgradeStrategy(0.5), version = Timestamp(0))
val taskA = MarathonTask.newBuilder().setId("taskA_id").build()
val origGroup = Group(PathId("/foo/bar"), Set(app))
val targetGroup = Group(PathId("/foo/bar"), Set())
val plan = DeploymentPlan("foo", origGroup, targetGroup, List(DeploymentStep(List(StopApplication(app)))), Timestamp.now())
when(tracker.get(app.id)).thenReturn(Set(taskA))
when(repo.store(any())).thenReturn(Future.successful(app))
when(repo.expunge(app.id)).thenReturn(Future.successful(Seq(true)))
system.eventStream.subscribe(probe.ref, classOf[UpgradeEvent])
queue.rateLimiter.addDelay(app)
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! Deploy(plan)
expectMsg(DeploymentStarted(plan))
awaitAssert(verify(repo).store(app.copy(instances = 0)))
system.eventStream.unsubscribe(probe.ref)
}
finally {
stopActor(schedulerActor)
}
}
test("Stopping an app sets instance count to 0 before removing the app completely") {
val probe = TestProbe()
val app = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 2, upgradeStrategy = UpgradeStrategy(0.5), version = Timestamp(0))
val taskA = MarathonTask.newBuilder().setId("taskA_id").build()
val taskB = MarathonTask.newBuilder().setId("taskB_id").build()
val origGroup = Group(PathId("/foo/bar"), Set(app))
val targetGroup = Group(PathId("/foo/bar"), Set())
val plan = DeploymentPlan("foo", origGroup, targetGroup, List(DeploymentStep(List(StopApplication(app)))), Timestamp.now())
when(tracker.get(app.id)).thenReturn(Set(taskA))
when(repo.store(any())).thenReturn(Future.successful(app))
when(repo.expunge(app.id)).thenReturn(Future.successful(Seq(true)))
when(driver.killTask(TaskID(taskA.getId))).thenAnswer(new Answer[Status] {
def answer(invocation: InvocationOnMock): Status = {
system.eventStream.publish(MesosStatusUpdateEvent("", taskA.getId, "TASK_KILLED", "", app.id, "", Nil, app.version.toString))
Status.DRIVER_RUNNING
}
})
system.eventStream.subscribe(probe.ref, classOf[UpgradeEvent])
queue.rateLimiter.addDelay(app)
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! Deploy(plan)
expectMsg(DeploymentStarted(plan))
awaitCond(queue.rateLimiter.getDelay(app).isOverdue(), 200.millis)
system.eventStream.unsubscribe(probe.ref)
}
finally {
stopActor(schedulerActor)
}
}
test("Deployment fail to acquire lock") {
val app = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 2, upgradeStrategy = UpgradeStrategy(0.5), version = Timestamp(0))
val group = Group(PathId("/foo/bar"), Set(app))
val plan = DeploymentPlan(Group.empty, group)
when(repo.store(any())).thenReturn(Future.successful(app))
when(repo.currentVersion(app.id)).thenReturn(Future.successful(None))
when(tracker.get(app.id)).thenReturn(Set.empty[MarathonTask])
when(repo.expunge(app.id)).thenReturn(Future.successful(Nil))
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! Deploy(plan)
expectMsgType[DeploymentStarted]
schedulerActor ! Deploy(plan)
val answer = expectMsgType[CommandFailed]
answer.cmd should equal(Deploy(plan))
answer.reason.isInstanceOf[AppLockedException] should be(true)
}
finally {
stopActor(schedulerActor)
}
}
test("Restart deployments after failover") {
val app = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 2, upgradeStrategy = UpgradeStrategy(0.5), version = Timestamp(0))
val group = Group(PathId("/foo/bar"), Set(app))
val plan = DeploymentPlan(Group.empty, group)
deploymentRepo = mock[DeploymentRepository]
when(deploymentRepo.expunge(any())).thenReturn(Future.successful(Seq(true)))
when(deploymentRepo.all()).thenReturn(Future.successful(Seq(plan)))
when(deploymentRepo.store(plan)).thenReturn(Future.successful(plan))
val schedulerActor = system.actorOf(
MarathonSchedulerActor.props(
schedulerActions,
deploymentManagerProps,
historyActorProps,
repo,
deploymentRepo,
hcManager,
tracker,
queue,
holder,
leaderInfo,
system.eventStream
))
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! Deploy(plan)
// This indicates that the deployment is already running,
// which means it has successfully been restarted
val answer = expectMsgType[CommandFailed]
answer.cmd should equal(Deploy(plan))
answer.reason.isInstanceOf[AppLockedException] should be(true)
}
finally {
stopActor(schedulerActor)
}
}
test("Forced deployment") {
val app = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 2, upgradeStrategy = UpgradeStrategy(0.5), version = Timestamp(0))
val group = Group(PathId("/foo/bar"), Set(app))
val plan = DeploymentPlan(Group.empty, group)
when(repo.store(any())).thenReturn(Future.successful(app))
when(repo.currentVersion(app.id)).thenReturn(Future.successful(None))
when(tracker.get(app.id)).thenReturn(Set.empty[MarathonTask])
when(repo.expunge(app.id)).thenReturn(Future.successful(Nil))
val schedulerActor = createActor()
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! Deploy(plan)
expectMsgType[DeploymentStarted]
schedulerActor ! Deploy(plan, force = true)
val answer = expectMsgType[DeploymentStarted]
}
finally {
stopActor(schedulerActor)
}
}
test("Cancellation timeout") {
val app = AppDefinition(id = PathId("app1"), cmd = Some("cmd"), instances = 2, upgradeStrategy = UpgradeStrategy(0.5), version = Timestamp(0))
val group = Group(PathId("/foo/bar"), Set(app))
val plan = DeploymentPlan(Group.empty, group)
when(repo.store(any())).thenReturn(Future.successful(app))
when(repo.currentVersion(app.id)).thenReturn(Future.successful(None))
when(tracker.get(app.id)).thenReturn(Set.empty[MarathonTask])
when(repo.expunge(app.id)).thenReturn(Future.successful(Nil))
val schedulerActor = TestActorRef(
MarathonSchedulerActor.props(
schedulerActions,
deploymentManagerProps,
historyActorProps,
repo,
deploymentRepo,
hcManager,
tracker,
queue,
holder,
leaderInfo,
system.eventStream,
cancellationTimeout = 0.seconds
)
)
try {
schedulerActor ! LocalLeadershipEvent.ElectedAsLeader
schedulerActor ! Deploy(plan)
expectMsgType[DeploymentStarted]
schedulerActor ! Deploy(plan, force = true)
val answer = expectMsgType[CommandFailed]
answer.reason.isInstanceOf[TimeoutException] should be(true)
answer.reason.getMessage should be
}
finally {
stopActor(schedulerActor)
}
}
}
|
spacejam/marathon
|
src/test/scala/mesosphere/marathon/MarathonSchedulerActorTest.scala
|
Scala
|
apache-2.0
| 18,982 |
package com.twitter.finagle.memcached.integration
import com.twitter.conversions.time._
import com.twitter.finagle.memcached.util.AtomicMap
import com.twitter.finagle._
import com.twitter.finagle.builder.ClientBuilder
import com.twitter.finagle.memcached.protocol.ClientError
import com.twitter.finagle.memcached.{Client, Entry, Interpreter, InterpreterService, KetamaClientBuilder, PartitionedClient}
import com.twitter.finagle.service.FailureAccrualFactory
import com.twitter.finagle.stats.InMemoryStatsReceiver
import com.twitter.io.Buf
import com.twitter.util._
import com.twitter.util.registry.GlobalRegistry
import java.net.{InetAddress, InetSocketAddress}
import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfter, FunSuite, Outcome}
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class MemcachedTest extends FunSuite with BeforeAndAfter {
val NumServers = 5
val NumConnections = 4
var servers: Seq[TestMemcachedServer] = Seq.empty
var client: Client = null
val TimeOut = 15.seconds
test("Clients and servers on different netty versions") {
val concurrencyLevel = 16
val slots = 500000
val slotsPerLru = slots / concurrencyLevel
val maps = (0 until concurrencyLevel).map { i =>
new SynchronizedLruMap[Buf, Entry](slotsPerLru)
}
val service = {
val interpreter = new Interpreter(new AtomicMap(maps))
new InterpreterService(interpreter)
}
val server = Memcached.server
val client = Memcached.client
val servers = Seq(
server.configured(Memcached.param.MemcachedImpl.Netty3),
server.configured(Memcached.param.MemcachedImpl.Netty4)
)
val clients = Seq(
client.configured(Memcached.param.MemcachedImpl.Netty3),
client.configured(Memcached.param.MemcachedImpl.Netty4)
)
for (server <- servers; client <- clients) {
val srv = server.serve(new InetSocketAddress(InetAddress.getLoopbackAddress, 0), service)
val clnt = client.newRichClient(
Name.bound(Address(srv.boundAddress.asInstanceOf[InetSocketAddress])), "client")
Await.result(clnt.delete("foo"), 5.seconds)
assert(Await.result(clnt.get("foo"), 5.seconds) == None)
Await.result(clnt.set("foo", Buf.Utf8("bar")), 5.seconds)
assert(Await.result(clnt.get("foo"), 5.seconds).get == Buf.Utf8("bar"), 5.seconds)
}
}
private val clientName = "test_client"
before {
val serversOpt = for (_ <- 1 to NumServers) yield TestMemcachedServer.start()
if (serversOpt.forall(_.isDefined)) {
servers = serversOpt.flatten
val n = Name.bound(servers.map { s => (Address(s.address)) }: _*)
client = Memcached.client.newRichClient(n, clientName)
}
}
after {
servers.foreach(_.stop())
}
override def withFixture(test: NoArgTest): Outcome = {
if (servers.length == NumServers) test() else {
info("Cannot start memcached. Skipping test...")
cancel()
}
}
test("set & get") {
Await.result(client.delete("foo"))
assert(Await.result(client.get("foo")) == None)
Await.result(client.set("foo", Buf.Utf8("bar")))
assert(Await.result(client.get("foo")).get == Buf.Utf8("bar"))
}
test("set & get data containing newlines") {
Await.result(client.delete("bob"))
assert(Await.result(client.get("bob")) == None)
Await.result(client.set("bob", Buf.Utf8("hello there \r\n nice to meet \r\n you")))
assert(Await.result(client.get("bob")).get ==
Buf.Utf8("hello there \r\n nice to meet \r\n you"), 3.seconds)
}
test("get") {
Await.result(client.set("foo", Buf.Utf8("bar")))
Await.result(client.set("baz", Buf.Utf8("boing")))
val result =
Await.result(
client.get(Seq("foo", "baz", "notthere"))
).map { case (key, Buf.Utf8(value)) =>
(key, value)
}
assert(result == Map("foo" -> "bar", "baz" -> "boing"))
}
if (Option(System.getProperty("USE_EXTERNAL_MEMCACHED")).isDefined) {
test("gets") {
// create a client that connects to only one server so we can predict CAS tokens
val client = Memcached.client.newRichClient(
Name.bound(Address(servers(0).address)), "client")
Await.result(client.set("foos", Buf.Utf8("xyz"))) // CAS: 1
Await.result(client.set("bazs", Buf.Utf8("xyz"))) // CAS: 2
Await.result(client.set("bazs", Buf.Utf8("zyx"))) // CAS: 3
Await.result(client.set("bars", Buf.Utf8("xyz"))) // CAS: 4
Await.result(client.set("bars", Buf.Utf8("zyx"))) // CAS: 5
Await.result(client.set("bars", Buf.Utf8("yxz"))) // CAS: 6
val result =
Await.result(
client.gets(Seq("foos", "bazs", "bars", "somethingelse"))
).map { case (key, (Buf.Utf8(value), Buf.Utf8(casUnique))) =>
(key, (value, casUnique))
}
val expected =
Map(
"foos" -> (("xyz", "1")), // the "cas unique" values are predictable from a fresh memcached
"bazs" -> (("zyx", "3")),
"bars" -> (("yxz", "6"))
)
assert(result == expected)
}
}
if (Option(System.getProperty("USE_EXTERNAL_MEMCACHED")).isDefined) {
test("cas") {
Await.result(client.set("x", Buf.Utf8("y")))
val Some((value, casUnique)) = Await.result(client.gets("x"))
assert(value == Buf.Utf8("y"))
assert(casUnique == Buf.Utf8("1"))
assert(!Await.result(client.checkAndSet("x", Buf.Utf8("z"), Buf.Utf8("2")).map(_.replaced)))
assert(Await.result(client.checkAndSet("x", Buf.Utf8("z"), casUnique).map(_.replaced)).booleanValue)
val res = Await.result(client.get("x"))
assert(res.isDefined)
assert(res.get == Buf.Utf8("z"))
}
}
test("append & prepend") {
Await.result(client.set("foo", Buf.Utf8("bar")))
Await.result(client.append("foo", Buf.Utf8("rab")))
assert(Await.result(client.get("foo")).get == Buf.Utf8("barrab"))
Await.result(client.prepend("foo", Buf.Utf8("rab")))
assert(Await.result(client.get("foo")).get == Buf.Utf8("rabbarrab"))
}
test("incr & decr") {
// As of memcached 1.4.8 (issue 221), empty values are no longer treated as integers
Await.result(client.set("foo", Buf.Utf8("0")))
assert(Await.result(client.incr("foo")) == Some(1L))
assert(Await.result(client.incr("foo", 2)) == Some(3L))
assert(Await.result(client.decr("foo")) == Some(2L))
Await.result(client.set("foo", Buf.Utf8("0")))
assert(Await.result(client.incr("foo")) == Some(1L))
val l = 1L << 50
assert(Await.result(client.incr("foo", l)) == Some(l + 1L))
assert(Await.result(client.decr("foo")) == Some(l))
assert(Await.result(client.decr("foo", l)) == Some(0L))
}
if (Option(System.getProperty("USE_EXTERNAL_MEMCACHED")).isDefined) {
test("stats") {
// We can't use a partitioned client to get stats, because we don't hash to a server based on
// a key. Instead, we create a ConnectedClient, which is connected to one server.
val service = Memcached.client.newService(Name.bound(Address(servers(0).address)), "client")
val connectedClient = Client(service)
val stats = Await.result(connectedClient.stats())
assert(stats != null)
assert(!stats.isEmpty)
stats.foreach { stat =>
assert(stat.startsWith("STAT"))
}
}
}
test("send malformed keys") {
// test key validation trait
intercept[ClientError] { Await.result(client.get("fo o")) }
intercept[ClientError] { Await.result(client.set("", Buf.Utf8("bar"))) }
intercept[ClientError] { Await.result(client.get(" foo")) }
intercept[ClientError] { Await.result(client.get("foo ")) }
intercept[ClientError] { Await.result(client.get(" foo")) }
val nullString: String = null
intercept[NullPointerException] { Await.result(client.get(nullString)) }
intercept[NullPointerException] { Await.result(client.set(nullString, Buf.Utf8("bar"))) }
intercept[ClientError] { Await.result(client.set(" ", Buf.Utf8("bar"))) }
assert(Await.result(client.set("\t", Buf.Utf8("bar")).liftToTry) == Return.Unit) // "\t" is a valid key
intercept[ClientError] { Await.result(client.set("\r", Buf.Utf8("bar"))) }
intercept[ClientError] { Await.result(client.set("\n", Buf.Utf8("bar"))) }
intercept[ClientError] { Await.result(client.set("\u0000", Buf.Utf8("bar"))) }
val veryLongKey = "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz"
intercept[ClientError] { Await.result(client.get(veryLongKey)) }
intercept[ClientError] { Await.result(client.set(veryLongKey, Buf.Utf8("bar"))) }
// test other keyed command validation
val nullSeq:Seq[String] = null
intercept[NullPointerException] { Await.result(client.get(nullSeq)) }
intercept[ClientError] { Await.result(client.append("bad key", Buf.Utf8("rab"))) }
intercept[ClientError] { Await.result(client.prepend("bad key", Buf.Utf8("rab"))) }
intercept[ClientError] { Await.result(client.replace("bad key", Buf.Utf8("bar"))) }
intercept[ClientError] { Await.result(client.add("bad key", Buf.Utf8("2"))) }
intercept[ClientError] { Await.result(client.checkAndSet("bad key", Buf.Utf8("z"), Buf.Utf8("2"))) }
intercept[ClientError] { Await.result(client.incr("bad key")) }
intercept[ClientError] { Await.result(client.decr("bad key")) }
intercept[ClientError] { Await.result(client.delete("bad key")) }
}
test("re-hash when a bad host is ejected") {
client = Memcached.client
.configured(FailureAccrualFactory.Param(1, () => 10.minutes))
.configured(Memcached.param.EjectFailedHost(true))
.newRichClient(Name.bound(servers.map { s => (Address(s.address)) }: _*), "test_client")
val partitionedClient = client.asInstanceOf[PartitionedClient]
// set values
Await.result(Future.collect(
(0 to 20).map { i =>
client.set(s"foo$i", Buf.Utf8(s"bar$i"))
}
), TimeOut)
// shutdown one memcache host
servers(0).stop()
// trigger ejection
for (i <- 0 to 20) {
Await.ready(client.get(s"foo$i"), TimeOut)
}
// other hosts alive
val clientSet =
(0 to 20).foldLeft(Set[Client]()){ case (s, i) =>
val c = partitionedClient.clientOf(s"foo$i")
s + c
}
assert(clientSet.size == NumServers - 1)
// previously set values have cache misses
var cacheMisses = 0
for (i <- 0 to 20) {
if (Await.result(client.get(s"foo$i"), TimeOut) == None) cacheMisses = cacheMisses + 1
}
assert(cacheMisses > 0)
}
test("GlobalRegistry pipelined client") {
val expectedKey = Seq("client", "memcached", clientName, "is_pipelining")
val isPipelining = GlobalRegistry.get.iterator.exists { e =>
e.key == expectedKey && e.value == "true"
}
assert(isPipelining)
}
test("GlobalRegistry non-pipelined client") {
val name = "not-pipelined"
val expectedKey = Seq("client", "memcached", name, "is_pipelining")
KetamaClientBuilder()
.clientBuilder(ClientBuilder()
.hosts(Seq(servers(0).address))
.name(name)
.codec(new com.twitter.finagle.memcached.protocol.text.Memcached())
.hostConnectionLimit(1))
.build()
val isPipelining = GlobalRegistry.get.iterator.exists { e =>
e.key == expectedKey && e.value == "false"
}
assert(isPipelining)
}
test("host comes back into ring after being ejected") {
import com.twitter.finagle.memcached.protocol._
class MockedMemcacheServer extends Service[Command, Response] {
def apply(command: Command) = command match {
case Get(key) => Future.value(Values(List(Value(Buf.Utf8("foo"), Buf.Utf8("bar")))))
case Set(_, _, _, _) => Future.value(Error(new Exception))
case x => Future.exception(new MatchError(x))
}
}
val cacheServer = Memcached.serve(
new InetSocketAddress(InetAddress.getLoopbackAddress, 0),
new MockedMemcacheServer)
val timer = new MockTimer
val statsReceiver = new InMemoryStatsReceiver
val client = Memcached.client
.configured(FailureAccrualFactory.Param(1, () => 10.minutes))
.configured(Memcached.param.EjectFailedHost(true))
.configured(param.Timer(timer))
.configured(param.Stats(statsReceiver))
.newRichClient(Name.bound(Address(cacheServer.boundAddress.asInstanceOf[InetSocketAddress])), "cacheClient")
Time.withCurrentTimeFrozen { timeControl =>
// Send a bad request
intercept[Exception] { Await.result(client.set("foo", Buf.Utf8("bar"))) }
// Node should have been ejected
assert(statsReceiver.counters.get(List("cacheClient", "ejections")) == Some(1))
// Node should have been marked dead, and still be dead after 5 minutes
timeControl.advance(5.minutes)
// Shard should be unavailable
intercept[ShardNotAvailableException] {
Await.result(client.get(s"foo"))
}
timeControl.advance(5.minutes)
timer.tick()
// 10 minutes (markDeadFor duration) have passed, so the request should go through
assert(statsReceiver.counters.get(List("cacheClient", "revivals")) == Some(1))
assert(Await.result(client.get(s"foo")).get == Buf.Utf8("bar"))
}
}
test("Add and remove nodes") {
val addrs = servers.map { s => (Address(s.address)) }
// Start with 3 backends
val mutableAddrs: ReadWriteVar[Addr] = new ReadWriteVar(Addr.Bound(addrs.toSet.drop(2)))
val sr = new InMemoryStatsReceiver
val myClient = Memcached.client
.withLoadBalancer.connectionsPerEndpoint(NumConnections)
.withStatsReceiver(sr)
.newRichClient(Name.Bound.singleton(mutableAddrs), "test_client")
assert(sr.counters(Seq("test_client", "redistributes")) == 1)
assert(sr.counters(Seq("test_client", "loadbalancer", "rebuilds")) == 3)
assert(sr.counters(Seq("test_client", "loadbalancer", "updates")) == 3)
assert(sr.counters(Seq("test_client", "loadbalancer", "adds")) == NumConnections * 3)
assert(sr.counters(Seq("test_client", "loadbalancer", "removes")) == 0)
// Add 2 nodes to the backends, for a total of 5 backends
mutableAddrs.update(Addr.Bound(addrs.toSet))
assert(sr.counters(Seq("test_client", "redistributes")) == 2)
// Need to rebuild each of the 5 nodes with `numConnections`
assert(sr.counters(Seq("test_client", "loadbalancer", "rebuilds")) == 5)
assert(sr.counters(Seq("test_client", "loadbalancer", "updates")) == 5)
assert(sr.counters(Seq("test_client", "loadbalancer", "adds")) == NumConnections * 5)
assert(sr.counters(Seq("test_client", "loadbalancer", "removes")) == 0)
// Remove 1 node from the backends, for a total of 4 backends
mutableAddrs.update(Addr.Bound(addrs.toSet.drop(1)))
assert(sr.counters(Seq("test_client", "redistributes")) == 3)
// Don't need to rebuild or update any existing nodes
assert(sr.counters(Seq("test_client", "loadbalancer", "rebuilds")) == 5)
assert(sr.counters(Seq("test_client", "loadbalancer", "updates")) == 5)
assert(sr.counters(Seq("test_client", "loadbalancer", "adds")) == NumConnections * 5)
assert(sr.counters(Seq("test_client", "leaves")) == 1)
// Node is removed, closing `numConnections` in the LoadBalancer
assert(sr.counters(Seq("test_client", "loadbalancer", "removes")) == NumConnections)
// Update the backends with the same list, for a total of 4 backends
mutableAddrs.update(Addr.Bound(addrs.toSet.drop(1)))
assert(sr.counters(Seq("test_client", "redistributes")) == 4)
// Ensure we don't do anything in the LoadBalancer because the set of nodes is the same
assert(sr.counters(Seq("test_client", "loadbalancer", "rebuilds")) == 5)
assert(sr.counters(Seq("test_client", "loadbalancer", "updates")) == 5)
assert(sr.counters(Seq("test_client", "loadbalancer", "adds")) == NumConnections * 5)
assert(sr.counters(Seq("test_client", "loadbalancer", "removes")) == NumConnections)
}
test("FailureAccrualFactoryException has remote address") {
val client = Memcached.client
.withLoadBalancer.connectionsPerEndpoint(1)
// 1 failure triggers FA; make sure FA stays in "dead" state after failure
.configured(FailureAccrualFactory.Param(1, 10.minutes))
.withEjectFailedHost(false)
.newTwemcacheClient(Name.bound(Address("localhost", 1234)), "client")
// Trigger transition to "Dead" state
intercept[Exception] {
Await.result(client.delete("foo"), 1.second)
}
// Client has not been ejected, so the same client gets a re-application of the connection,
// triggering the 'failureAccrualEx' in KetamaFailureAccrualFactory
val failureAccrualEx = intercept[HasRemoteInfo] {
Await.result(client.delete("foo"), 1.second)
}
assert(failureAccrualEx.getMessage.contains("Endpoint is marked dead by failureAccrual"))
assert(failureAccrualEx.getMessage.contains("Downstream Address: localhost/127.0.0.1:1234"))
}
}
|
spockz/finagle
|
finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/MemcachedTest.scala
|
Scala
|
apache-2.0
| 17,197 |
package cracker
@serializable
class CrackerTreeMessageRedPhase (val first : Option[CrackerTreeMessageIdentification], val second : Option[CrackerTreeMessageTree]) extends CrackerMessageSize
{
def getMessageSize = first.getOrElse(CrackerTreeMessageIdentification.empty).getMessageSize + second.getOrElse(CrackerTreeMessageTree.empty).getMessageSize
}
object CrackerTreeMessageRedPhase
{
def apply(first : CrackerTreeMessageIdentification) = new CrackerTreeMessageRedPhase(Option.apply(first), Option.empty)
def apply(second : CrackerTreeMessageTree) = new CrackerTreeMessageRedPhase(Option.empty, Option.apply(second))
}
|
hpclab/cracker
|
src/cracker/CrackerMessageRedPhase.scala
|
Scala
|
mit
| 625 |
//
// Logger.scala -- Scala object Logger
// Project OrcScala
//
// Created by jthywiss on Aug 21, 2010.
//
// Copyright (c) 2016 The University of Texas at Austin. All rights reserved.
//
// Use and redistribution of this file is governed by the license terms in
// the LICENSE file found in the project's top-level directory and also found at
// URL: http://orc.csres.utexas.edu/license.shtml .
//
package orc.compile
/** Logger for the orc.compile subsystem
*
* @author jthywiss
*/
object Logger extends orc.util.Logger("orc.compile")
|
orc-lang/orc
|
OrcScala/src/orc/compile/Logger.scala
|
Scala
|
bsd-3-clause
| 545 |
package edu.neu.coe.csye._7200.util
import scala.util.{Try,Failure}
/**
* Classes which provide chaining of Try operations where each operation is (typically) a function which takes
* the same parameter.
* There are two distinct forms:
* <ul><li>the Trial/Match classes where the function given is of the form V=>Try[T]</li>
* <li>the LiftTrial/LiftMatch classes where the function given is of the form V=>T</li>
* </ul>
* The more general class of each form is Trial (LiftTrial) which takes any function which operates on the given parameter.
* The other class is Match/LiftMatch which takes a PartialFunction of form Any=>Try[T] or Any=>T as appropriate.
*
* @author scalaprof
*/
/**
* Trial class which can be composed leftwards or rightwards with functions (including other Trial or Match object and even LiftTrial and LiftMatch objects)
*
* @param f the function which, when applied to the input parameter of type V, will yield a Try[T]
* @param <V> the type of the input parameter
* @param <T> the underlying type of the resulting Try
*/
case class Trial[V, T](f: V=>Try[T]) extends TrialBase[V,T](f)
object Trial {
// The following method creates a null trial which can be used at the start or end
// of a chain of functions
def none[V,T]: Trial[V,T] = Trial.apply(_ => Failure(new Exception("null trial")))
def lift[V,T](f: V=>T): Trial[V,T] = Trial(Lift(f))
}
/**
* Match class which can be composed leftwards or rightwards with functions (including other TrialBase object)
*
* @param f the partial function which, when applied to the input parameter of type Any, will yield a Try[T]
* @param <T> the underlying type of the resulting Try
*/
case class Match[T](f: PartialFunction[Any,Try[T]]) extends TrialBase[Any,T](f)
/**
* LiftTrial class which can be composed leftwards or rightwards with functions (including other TrialBase object).
* Note that anywhere you can right LiftTrial(f), you could, more simply, write Trial(Lift(f))
*
* @param f the function which, when applied to the input parameter of type V, will yield a T
* @param <V> the type of the input parameter
* @param <T> the underlying type of the resulting Try
*/
case class LiftTrial[V, T](f: V=>T) extends TrialBase[V,T](Lift(f))
/**
* LiftMatch class which can be composed leftwards or rightwards with functions (including other TrialBase object)
* Note that anywhere you can right LiftMatch(f), you could, in theory, write Trial(Lift(f)) but in practice it can
* be tricky so this definition remains for more convenience.
*
* @param f the partial function which, when applied to the input parameter of type Any, will yield a T
* @param <T> the underlying type of the resulting Try
*/
case class LiftMatch[T](f: PartialFunction[Any,T]) extends TrialBase[Any,T](Lift(f))
case class Lift[V, T](f: V=>T) extends (V=>Try[T]) {
def apply(v: V): Try[T] = Try(f(v))
}
/**
* Abstract base class for Trial, Match, LiftTrial and LiftMatch
*
* @param f the function which, when applied to the input parameter of type V, will yield a Try[T]
* @param <V> the type of the input parameter
* @param <T> the underlying type of the resulting Try
*/
abstract class TrialBase[V, T](f: V=>Try[T]) extends (V=>Try[T]) {
def apply(v: V): Try[T] = f(v)
private def orElse(f: V=>Try[T], g: V=>Try[T]): V=>Try[T] = {v => f(v) orElse g(v) }
def |: (g: V => Try[T]): Trial[V,T] = Trial(orElse(g,f))
def :| (g: V => Try[T]): Trial[V,T] = Trial(orElse(f,g))
def ^: (g: V => T): Trial[V,T] = Trial(orElse(Lift(g),f))
def :^ (g: V => T): Trial[V,T] = Trial(orElse(f,Lift(g)))
}
|
rchillyard/Scalaprof
|
FunctionalProgramming/src/main/scala/edu/neu/coe/csye/_7200/util/Trial.scala
|
Scala
|
gpl-2.0
| 3,597 |
package com.socrata.balboa.server
import com.socrata.balboa.server.ResponseWithType.json
import com.typesafe.scalalogging.StrictLogging
import org.eclipse.jetty.http.HttpStatus.INTERNAL_SERVER_ERROR_500
import org.scalatra.json.JacksonJsonSupport
import org.scalatra.{InternalServerError, ScalatraServlet}
trait UnexpectedErrorFilter extends ScalatraServlet
with StrictLogging
with JacksonJsonSupport {
// Generic error handling so that all unexpected errors will result in logging
// a stack trace and returning a 500 status code.
error {
case e: Throwable =>
logger.error("Fatal error", e)
contentType = json
InternalServerError(Error(INTERNAL_SERVER_ERROR_500, "Server error: " + e.getMessage))
}
}
|
socrata-platform/balboa
|
balboa-http/src/main/scala/com/socrata/balboa/server/UnexpectedErrorFilter.scala
|
Scala
|
apache-2.0
| 739 |
package db
import java.util.concurrent.Executors
import cats.effect.{Async, Blocker, ContextShift, IO}
import com.zaxxer.hikari.{HikariConfig, HikariDataSource}
import doobie.Transactor
import play.api.inject.ApplicationLifecycle
import scala.concurrent.{ExecutionContext, Future}
case class JdbcConfig(
driverClassName: String,
url: String,
user: String,
password: String,
maxConnectionPoolSize: Int = 10
)
object DatabaseConfig {
def simpleTransactor(config: JdbcConfig)(implicit ec: ExecutionContext) = {
implicit val cs: ContextShift[IO] = IO.contextShift(ec)
Transactor.fromDriverManager[IO](
config.driverClassName,
config.url,
config.user,
config.password
)
}
def transactorAndDataSource[F[_] : Async](config: JdbcConfig)(implicit cs: ContextShift[F]): (Transactor[F], HikariDataSource) = {
val connectEC = ExecutionContext.fromExecutor(Executors.newFixedThreadPool(10))
val transactEC = ExecutionContext.fromExecutor(Executors.newCachedThreadPool)
val hikariConfig = new HikariConfig()
hikariConfig.setDriverClassName("org.postgresql.Driver")
hikariConfig.setMaximumPoolSize(config.maxConnectionPoolSize)
hikariConfig.setJdbcUrl(config.url)
hikariConfig.setUsername(config.user)
hikariConfig.setPassword(config.password)
hikariConfig.addDataSourceProperty("socketTimeout", "45")
val dataSource = new HikariDataSource(hikariConfig)
(Transactor.fromDataSource.apply(dataSource, connectEC, Blocker.liftExecutionContext(transactEC)), dataSource)
}
def transactor[F[_] : Async](config: JdbcConfig)(implicit cs: ContextShift[F]): Transactor[F] = {
val (transactor, _) = transactorAndDataSource(config)
transactor
}
def transactor[F[_] : Async](config: JdbcConfig, applicationLifecycle: ApplicationLifecycle)(implicit cs: ContextShift[F]): Transactor[F] = {
// manually creating the transactor to avoid having it wrapped in a Resource. Resources don't play well with
// Play's way of handling lifecycle
val (transactor, dataSource) = transactorAndDataSource(config)
applicationLifecycle.addStopHook(() => Future.successful(dataSource.close()))
transactor
}
}
|
guardian/mobile-n10n
|
common/src/main/scala/db/DatabaseConfig.scala
|
Scala
|
apache-2.0
| 2,205 |
package ingraph.sandbox
import ingraph.compiler.test.{CompilerTest, CompilerTestConfig}
class JsaTest extends CompilerTest {
override val config = CompilerTestConfig(querySuitePath = Some("jsa")
, compileGPlanOnly = true
, skipGPlanResolve = false
, skipGPlanBeautify = false
, printQuery = false
, printCypher = true
, printGPlan = true
, printNPlan = false
, printFPlan = false
)
test("arithmetics_logarithmArgument.cypher") {
compileFromFile("arithmetics_logarithmArgument")
}
test("arithmetics_squareRootArgument.cypher") {
compileFromFile("arithmetics_squareRootArgument")
}
test("basicimport.cypher") {
compileFromFile("basicimport")
}
test("BlockStatement.cypher") {
compileFromFile("BlockStatement")
}
test("Boolean.cypher") {
compileFromFile("Boolean")
}
test("CallExpressionNoParam.cypher") {
compileFromFile("CallExpressionNoParam")
}
test("CallExpressionParam.cypher") {
compileFromFile("CallExpressionParam")
}
test("countcompilationunitnodes.cypher") {
compileFromFile("countcompilationunitnodes")
}
test("countnodes.cypher") {
compileFromFile("countnodes")
}
ignore("deletegraph.cypher") {
compileFromFile("deletegraph")
}
test("divisionByZero_simpleVariable.cypher") {
compileFromFile("divisionByZero_simpleVariable")
}
test("EqualsZero.cypher") {
compileFromFile("EqualsZero")
}
test("ExceptionThrown.cypher") {
compileFromFile("ExceptionThrown")
}
ignore("exportAlias_importAlias.cypher") {
compileFromFile("exportAlias_importAlias")
}
ignore("exportAlias_importDefault.cypher") {
compileFromFile("exportAlias_importDefault")
}
ignore("exportAlias_importName.cypher") {
compileFromFile("exportAlias_importName")
}
ignore("exportDeclaration_importAlias.cypher") {
compileFromFile("exportDeclaration_importAlias")
}
ignore("exportDeclaration_importName.cypher") {
compileFromFile("exportDeclaration_importName")
}
ignore("exportDefaultDeclaration_importAlias.cypher") {
compileFromFile("exportDefaultDeclaration_importAlias")
}
ignore("exportDefaultDeclaration_importDefault.cypher") {
compileFromFile("exportDefaultDeclaration_importDefault")
}
ignore("exportDefaultDeclaration_importName.cypher") {
compileFromFile("exportDefaultDeclaration_importName")
}
ignore("exportDefaultName_importAlias.cypher") {
compileFromFile("exportDefaultName_importAlias")
}
ignore("exportDefaultName_importDefault.cypher") {
compileFromFile("exportDefaultName_importDefault")
}
ignore("exportDefaultName_importName.cypher") {
compileFromFile("exportDefaultName_importName")
}
ignore("exportName_importAlias.cypher") {
compileFromFile("exportName_importAlias")
}
ignore("exportName_importName.cypher") {
compileFromFile("exportName_importName")
}
test("ExpressionStatement.cypher") {
compileFromFile("ExpressionStatement")
}
test("FunctionCallStatement.cypher") {
compileFromFile("FunctionCallStatement")
}
test("FunctionDeclaration.cypher") {
compileFromFile("FunctionDeclaration")
}
test("FunctionReturnStatement.cypher") {
compileFromFile("FunctionReturnStatement")
}
test("FunctionThrowStatement.cypher") {
compileFromFile("FunctionThrowStatement")
}
//FIXME: Incomplete compilation found: p = shortestPath((fun)-[*]->(`call`))
ignore("generatecalls.cypher") {
compileFromFile("generatecalls")
}
test("getlastcommithash.cypher") {
compileFromFile("getlastcommithash")
}
test("IfStatementAlternate.cypher") {
compileFromFile("IfStatementAlternate")
}
test("IfStatementNoAlternate.cypher") {
compileFromFile("IfStatementNoAlternate")
}
test("Infinity.cypher") {
compileFromFile("Infinity")
}
test("ListNoItem.cypher") {
compileFromFile("ListNoItem")
}
test("ListWithItem.cypher") {
compileFromFile("ListWithItem")
}
test("LiteralX.cypher") {
compileFromFile("LiteralX")
}
test("LogicalOr.cypher") {
compileFromFile("LogicalOr")
}
test("nonInitializedVariable.cypher") {
compileFromFile("nonInitializedVariable")
}
test("Null.cypher") {
compileFromFile("Null")
}
test("Numeric.cypher") {
compileFromFile("Numeric")
}
test("QualifierSystem.cypher") {
compileFromFile("QualifierSystem")
}
test("Read.cypher") {
compileFromFile("Read")
}
test("RegExp.cypher") {
compileFromFile("RegExp")
}
ignore("removecfg.cypher") {
compileFromFile("removecfg")
}
ignore("removefile.cypher") {
compileFromFile("removefile")
}
ignore("setcommithash.cypher") {
compileFromFile("setcommithash")
}
test("String.cypher") {
compileFromFile("String")
}
test("TypeSystem.cypher") {
compileFromFile("TypeSystem")
}
ignore("typing.cypher") {
compileFromFile("typing")
}
test("unreachableCode.cypher") {
compileFromFile("unreachableCode")
}
test("unusedExports_exportDeclaration.cypher") {
compileFromFile("unusedExports_exportDeclaration")
}
test("unusedExports_exportDefault.cypher") {
compileFromFile("unusedExports_exportDefault")
}
test("unusedExports_exportName_exportAlias.cypher") {
compileFromFile("unusedExports_exportName_exportAlias")
}
ignore("unusedfunctions.cypher") {
compileFromFile("unusedfunctions")
}
test("VariableDeclaration.cypher") {
compileFromFile("VariableDeclaration")
}
test("VariableDeclarationStatement.cypher") {
compileFromFile("VariableDeclarationStatement")
}
test("VariableDeclarator.cypher") {
compileFromFile("VariableDeclarator")
}
test("VariableInitialization.cypher") {
compileFromFile("VariableInitialization")
}
test("VariableReference.cypher") {
compileFromFile("VariableReference")
}
test("Write.cypher") {
compileFromFile("Write")
}
}
|
FTSRG/ingraph
|
compiler/src/test/scala/ingraph/sandbox/JsaTest.scala
|
Scala
|
epl-1.0
| 5,900 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.utils
import org.junit.Assume._
import org.junit.BeforeClass
import org.scalajs.testsuite.utils.Platform._
object Requires {
trait TypedArray {
@BeforeClass def needsTypedArrays(): Unit =
assumeTrue("Assumed typed arrays are supported", typedArrays)
}
trait StrictFloats {
@BeforeClass def needsTypedArrays(): Unit =
assumeTrue("Assumed strict floats", hasStrictFloats)
}
}
|
scala-js/scala-js
|
test-suite/js/src/test/scala/org/scalajs/testsuite/utils/Requires.scala
|
Scala
|
apache-2.0
| 711 |
/**
* Copyright (c) 2014 Mark S. Kolich
* http://mark.koli.ch
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
import sbt._
import sbt.Keys._
object Dependencies {
// Internal dependencies.
private val kolichHttpClient4Closure = "com.kolich" % "kolich-httpclient4-closure" % "2.3" % "compile"
// External dependencies.
private val signpostCore = "oauth.signpost" % "signpost-core" % "1.2.1.2" % "compile"
private val signpostHttpClient4 = "oauth.signpost" % "signpost-commonshttp4" % "1.2.1.2" % "compile"
val deps = Seq(
kolichHttpClient4Closure,
signpostCore, signpostHttpClient4)
}
object Resolvers {
private val kolichRepo = "Kolich repo" at "http://markkolich.github.io/repo"
val depResolvers = Seq(kolichRepo)
}
object Twitter extends Build {
import Dependencies._
import Resolvers._
private val aName = "kolich-twitter"
private val aVer = "0.2"
private val aOrg = "com.kolich"
lazy val twitter: Project = Project(
aName,
new File("."),
settings = Defaults.defaultSettings ++ Seq(resolvers := depResolvers) ++ Seq(
version := aVer,
organization := aOrg,
scalaVersion := "2.10.1",
javacOptions ++= Seq("-Xlint", "-g"),
shellPrompt := { (state: State) => { "%s:%s> ".format(aName, aVer) } },
// True to export the packaged JAR instead of just the compiled .class files.
exportJars := true,
// Disable using the Scala version in output paths and artifacts.
// When running 'publish' or 'publish-local' SBT would append a
// _<scala-version> postfix on artifacts. This turns that postfix off.
crossPaths := false,
// Keep the scala-lang library out of the generated POM's for this artifact.
autoScalaLibrary := false,
// Only add src/main/java and src/test/java as source folders in the project.
// Not a "Scala" project at this time.
unmanagedSourceDirectories in Compile <<= baseDirectory(new File(_, "src/main/java"))(Seq(_)),
unmanagedSourceDirectories in Test <<= baseDirectory(new File(_, "src/test/java"))(Seq(_)),
// Tell SBT to include our .java files when packaging up the source JAR.
unmanagedSourceDirectories in Compile in packageSrc <<= baseDirectory(new File(_, "src/main/java"))(Seq(_)),
// Override the SBT default "target" directory for compiled classes.
classDirectory in Compile <<= baseDirectory(new File(_, "target/classes")),
// Tweaks the name of the resulting JAR on a "publish" or "publish-local".
artifact in packageBin in Compile <<= (artifact in packageBin in Compile, version) apply ((artifact, ver) => {
val newName = artifact.name + "-" + ver
Artifact(newName, artifact.`type`, artifact.extension, artifact.classifier, artifact.configurations, artifact.url)
}),
// Tweaks the name of the resulting source JAR on a "publish" or "publish-local".
artifact in packageSrc in Compile <<= (artifact in packageSrc in Compile, version) apply ((artifact, ver) => {
val newName = artifact.name + "-" + ver
Artifact(newName, artifact.`type`, artifact.extension, artifact.classifier, artifact.configurations, artifact.url)
}),
// Tweaks the name of the resulting POM on a "publish" or "publish-local".
artifact in makePom <<= (artifact in makePom, version) apply ((artifact, ver) => {
val newName = artifact.name + "-" + ver
Artifact(newName, artifact.`type`, artifact.extension, artifact.classifier, artifact.configurations, artifact.url)
}),
// Do not bother trying to publish artifact docs (scaladoc, javadoc). Meh.
publishArtifact in packageDoc := false,
// Override the global name of the artifact.
artifactName <<= (name in (Compile, packageBin)) { projectName =>
(config: ScalaVersion, module: ModuleID, artifact: Artifact) =>
var newName = projectName
if (module.revision.nonEmpty) {
newName += "-" + module.revision
}
newName + "." + artifact.extension
},
// Override the default 'package' path used by SBT. Places the resulting
// JAR into a more meaningful location.
artifactPath in (Compile, packageBin) ~= { defaultPath =>
file("dist") / defaultPath.getName
},
// Override the default 'test:package' path used by SBT. Places the
// resulting JAR into a more meaningful location.
artifactPath in (Test, packageBin) ~= { defaultPath =>
file("dist") / "test" / defaultPath.getName
},
libraryDependencies ++= deps,
retrieveManaged := true)
)
}
|
markkolich/kolich-twitter
|
project/Build.scala
|
Scala
|
mit
| 5,681 |
package com.ponkotuy.http
import java.io._
import java.nio.charset.Charset
import java.util.concurrent.TimeUnit
import javax.net.ssl.SSLContext
import com.ponkotuy.build.BuildInfo
import com.ponkotuy.config.ClientConfig
import com.ponkotuy.data.{Auth, MyFleetAuth}
import com.ponkotuy.restype._
import com.ponkotuy.tool.TempFileTool
import com.ponkotuy.util.Log
import org.apache.http.client.config.RequestConfig
import org.apache.http.client.entity.UrlEncodedFormEntity
import org.apache.http.client.methods.{CloseableHttpResponse, HttpGet, HttpHead, HttpPost}
import org.apache.http.client.utils.HttpClientUtils
import org.apache.http.conn.ssl.SSLConnectionSocketFactory
import org.apache.http.entity.ContentType
import org.apache.http.entity.mime.MultipartEntityBuilder
import org.apache.http.entity.mime.content.FileBody
import org.apache.http.impl.client.HttpClientBuilder
import org.apache.http.message.BasicNameValuePair
import org.json4s._
import org.json4s.native.Serialization
import org.json4s.native.Serialization.write
import scala.collection.JavaConverters._
import scala.collection.mutable
/** Access To MyFleetGirls
*
* @author
* Date: 14/03/23.
*/
object MFGHttp extends Log {
val UTF8 = Charset.forName("UTF-8")
val userAgent = s"${BuildInfo.name} client ver:${BuildInfo.version} w/JVM: ${util.Properties.javaVersion}"
val sslContext: SSLContext = new MFGKeyStore().getSslContext
val config = RequestConfig.custom()
.setConnectTimeout(60*1000)
.setRedirectsEnabled(true)
.setStaleConnectionCheckEnabled(true)
.build()
val httpBuilder = HttpClientBuilder.create()
.setUserAgent(userAgent)
.setDefaultRequestConfig(config)
.setSSLSocketFactory(new SSLConnectionSocketFactory(sslContext))
.setSslcontext(sslContext)
.setConnectionTimeToLive(5 * 60 , TimeUnit.SECONDS)
.setMaxConnPerRoute(1)
.setRetryHandler(new RetryWithWait(10, 10000L))
ClientConfig.clientProxyHost.foreach(httpBuilder.setProxy)
val http = httpBuilder.build()
implicit val formats = Serialization.formats(NoTypeHints)
def get(uStr: String, ver: Int = 1): Option[String] = {
getOrig(ClientConfig.getUrl(2) + uStr)
}
def getOrig(url: String): Option[String] = {
val get = new HttpGet(url)
var res:CloseableHttpResponse = null
try {
res = http.execute(get)
Some(allRead(res.getEntity.getContent))
} catch {
case e: Throwable =>
error(e.getStackTrace.mkString("\n"))
None
} finally {
HttpClientUtils.closeQuietly(res)
}
}
def post(p: HttpPostable)(implicit auth: Option[Auth], auth2: Option[MyFleetAuth]): Int = {
p match {
case m: MasterPostable => masterPost(m.url, m.data, m.ver)
case n: NormalPostable => post(n.url, n.data, n.ver)
case f: FilePostable =>
TempFileTool.save(f.file, f.ext) { file =>
postFile(f.url, f.fileBodyKey, f.ver)(file)
}
}
}
def post(uStr: String, data: String, ver: Int = 1)(implicit auth: Option[Auth], auth2: Option[MyFleetAuth]): Int = {
if(auth.isEmpty) { info(s"Not Authorized: $uStr"); return 1 }
val url = ClientConfig.postUrl(ver) + uStr
val content = Map("auth" -> write(auth), "auth2" -> write(auth2), "data" -> data)
postOrig(url, content)
}
def postOrig(url: String, data: Map[String, String]): Int = {
val post = new HttpPost(url)
post.setEntity(createEntity(data))
var res:CloseableHttpResponse = null
try {
res = http.execute(post)
val status = res.getStatusLine.getStatusCode
if(300 <= status && status < 400) {
val location = res.getFirstHeader("Location").getValue
postOrig(location, data)
}
alertResult(res)
} catch {
case e: Exception => error(e.getStackTrace.mkString("\n")); 1
} finally {
HttpClientUtils.closeQuietly(res)
}
}
def masterPost(uStr: String, data: String, ver: Int = 1)(implicit auth2: Option[MyFleetAuth]): Int = {
val post = new HttpPost(ClientConfig.postUrl(ver) + uStr)
val entity = createEntity(Map("auth2" -> write(auth2), "data" -> data))
post.setEntity(entity)
var res:CloseableHttpResponse = null
try {
res = http.execute(post)
alertResult(res)
} catch {
case e: Exception => error(e.getStackTrace.mkString("\n")); 1
} finally {
HttpClientUtils.closeQuietly(res)
}
}
private def createEntity(map: Map[String, String]): UrlEncodedFormEntity = {
val nvps = map.map { case (key, value) =>
new BasicNameValuePair(key, value)
}
new UrlEncodedFormEntity(nvps.asJava, UTF8)
}
def postFile(uStr: String, fileBodyKey: String, ver: Int = 1)(file: File)(
implicit auth: Option[Auth], auth2: Option[MyFleetAuth]): Int = {
if(auth.isEmpty) { info(s"Not Authorized: $uStr"); return 601 }
val post = new HttpPost(ClientConfig.postUrl(ver) + uStr)
val entity = MultipartEntityBuilder.create()
entity.setCharset(UTF8)
entity.addTextBody("auth", write(auth), ContentType.APPLICATION_JSON)
entity.addTextBody("auth2", write(auth2), ContentType.APPLICATION_JSON)
entity.addPart(fileBodyKey, new FileBody(file))
post.setEntity(entity.build())
var res:CloseableHttpResponse = null
try {
res = http.execute(post)
alertResult(res)
} catch {
case e: Exception => error((e.getMessage :+ e.getStackTrace).mkString("\n")); 600
} finally {
HttpClientUtils.closeQuietly(res)
}
}
private def alertResult(res: CloseableHttpResponse): Int = {
val stCode = res.getStatusLine.getStatusCode
val content = allRead(res.getEntity.getContent)
if(stCode >= 400) {
error(s"Error Response ${stCode}\n${res.getStatusLine}\n${content}")
}
stCode
}
def existsImage(key: String, version: Int): Boolean =
head(s"/image/ship_obf/$key/$version.jpg", ver = 2).getStatusLine.getStatusCode == 200
def existsSound(s: SoundUrlId): Boolean =
head(s"/sound/ship_obf/${s.shipKey}/${s.soundId}/${s.version}.mp3", ver = 2).getStatusLine.getStatusCode == 200
def existsMap(area: Int, info: Int, version: Int): Boolean =
head(s"/map/${area}/${info}${version}.jpg", ver = 2).getStatusLine.getStatusCode == 200
private def head(uStr: String, ver: Int = 1) = {
val head = new HttpHead(ClientConfig.getUrl(ver) + uStr)
var res:CloseableHttpResponse = null
try {
res = http.execute(head)
} finally {
HttpClientUtils.closeQuietly(res)
}
res
}
def allRead(is: InputStream): String = {
val reader = new InputStreamReader(is, "UTF-8")
val builder = mutable.StringBuilder.newBuilder
val buf = new Array[Char](1024)
var num = reader.read(buf)
while(0 <= num) {
builder ++= buf.take(num)
num = reader.read(buf)
}
builder.result()
}
}
|
nekoworkshop/MyFleetGirls
|
client/src/main/scala/com/ponkotuy/http/MFGHttp.scala
|
Scala
|
mit
| 6,861 |
/**Copyright 2012 University of Helsinki, Daria Antonova, Herkko Virolainen, Panu Klemola
*
*Licensed under the Apache License, Version 2.0 (the "License");
*you may not use this file except in compliance with the License.
*You may obtain a copy of the License at
*
*http://www.apache.org/licenses/LICENSE-2.0
*
*Unless required by applicable law or agreed to in writing, software
*distributed under the License is distributed on an "AS IS" BASIS,
*WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*See the License for the specific language governing permissions and
*limitations under the License.*/
package format
import play.api.libs.json.Json.toJson
import play.api.libs.json.{ JsValue, Format, JsObject }
import anorm.{ Pk, Id, NotAssigned }
import models.Model
import java.util.Date
object ModelFormat {
import format.PkFormat._
import format.DateFormat._
implicit object ModelFormat extends Format[Model] {
def reads(json: JsValue): Model = Model(
(json \\ "id").as[Pk[Long]],
(json \\ "name").as[String],
(json \\ "dateCreated").as[Date])
def writes(model: Model): JsObject = JsObject(Seq(
"id" -> toJson(model.id),
"name" -> toJson(model.name),
"dateCreated" -> toJson(model.dateCreated)))
}
}
|
Herkko/ElasticWorkflow
|
app/format/ModelFormat.scala
|
Scala
|
apache-2.0
| 1,283 |
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *\\
* @ @ *
* # # # # (c) 2017 CAB *
* # # # # # # *
* # # # # # # # # # # # # *
* # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # *
* # # # # # # # # # # # # # # # # # # *
* @ @ *
\\* * http://github.com/alexcab * * * * * * * * * * * * * * * * * * * * * * * * * */
package mathact.tools.math
import mathact.core.bricks.blocks.BlockContext
import mathact.core.bricks.linking.LinkThrough
import mathact.core.bricks.plumbing.wiring.obj.{ObjOnStart, ObjWiring}
import mathact.tools.Tool
/** Base class for boolean logic operators
* Created by CAB on 26.12.2016.
*/
abstract class BooleanLogic(context: BlockContext, name: String, imgPath: String)
extends Tool(context, name, imgPath) with ObjWiring with ObjOnStart
with LinkThrough[Boolean, Boolean]{
//Parameters
val randomDelayRange = 10
//Evaluation function
protected def eval(input: Seq[Boolean]): Boolean
//Variables
@volatile private var _default = false
@volatile private var values = Array[Boolean]()
//Outflow
private val outflow = new Outflow[Boolean] { def send(v: Boolean): Unit = pour(v) }
//Functions
private def buildInflow(): Inflow[Boolean] = new Inflow[Boolean]{
//Construction
val i = values.length
values = Array.fill(i + 1)(_default)
//Methods
protected def drain(v: Boolean) = {
if(values(i) != v){
values(i) = v
outflow.send(eval(values))}}}
//DSL
def default: Boolean = _default
def default_=(v: Boolean){ _default = v }
//On start
protected def onStart(): Unit = outflow.send(eval(values))
//Connection point
def in = Inlet(buildInflow())
val out = Outlet(outflow)}
|
AlexCAB/MathAct
|
mathact_tools/src/main/scala/mathact/tools/math/BooleanLogic.scala
|
Scala
|
mit
| 2,469 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.