code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package de.sebastiankreutzer.chip8
import java.awt.BorderLayout
import java.awt.Dimension
import java.awt.GridLayout
import java.awt.event.ActionEvent
import java.awt.event.ActionListener
import java.awt.event.KeyAdapter
import java.awt.event.KeyEvent
import java.awt.event.KeyListener
import javax.swing.ButtonGroup
import javax.swing.JDialog
import javax.swing.JFileChooser
import javax.swing.JFrame
import javax.swing.JLabel
import javax.swing.JMenu
import javax.swing.JMenuBar
import javax.swing.JMenuItem
import javax.swing.JPanel
import javax.swing.JRadioButtonMenuItem
import javax.swing.JToggleButton
import javax.swing.KeyStroke
import javax.swing.WindowConstants
import javax.swing.JOptionPane
class UI extends JFrame with InputProcessor with KeyListener {
val keys = new Array[Boolean](1024)
val Size = new Dimension(800, 600)
val screen = new Screen()
setTitle(Main.Title + " - Chip8 Emulator")
setVisible(true)
setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
screen.setPreferredSize(Size)
screen.setMinimumSize(Size)
screen.setMaximumSize(Size)
add(screen)
pack()
setLocationRelativeTo(null)
val bar = new JMenuBar()
val fileMenu = new JMenu("File")
val loadRomItem = new JMenuItem("Load ROM")
loadRomItem.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
val fc = new JFileChooser(Main.configs.romDir)
val result = fc.showOpenDialog(UI.this)
if (result == JFileChooser.APPROVE_OPTION) {
val file = fc.getSelectedFile()
Main.loadRom(file)
}
}
})
fileMenu.add(loadRomItem)
bar.add(fileMenu)
val emulatorMenu = new JMenu("Emulator")
// val startItem = new JMenuItem("Start")
// startItem.addActionListener(new ActionListener() {
// override def actionPerformed(e: ActionEvent) {
// Main.startVM()
// }
// })
// emulatorMenu.add(startItem)
//
// val stopItem = new JMenuItem("Stop")
// stopItem.addActionListener(new ActionListener() {
// override def actionPerformed(e: ActionEvent) {
// Main.stopVM()
// }
// })
// emulatorMenu.add(stopItem)
emulatorMenu.addSeparator()
val pauseItem = new JMenuItem("Pause")
pauseItem.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
Main.pauseVM()
}
})
emulatorMenu.add(pauseItem)
val resumeItem = new JMenuItem("Resume")
resumeItem.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
Main.resumeVM()
}
})
emulatorMenu.add(resumeItem)
emulatorMenu.addSeparator()
val resetItem = new JMenuItem("Reset")
resetItem.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
Main.resetRom()
}
})
emulatorMenu.add(resetItem)
emulatorMenu.addSeparator()
bar.add(emulatorMenu)
val freqMenu = new JMenu("Frequency")
val freqGroup = new ButtonGroup()
val freqs = Array(10, 50, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000, 5000, 10000)
freqs.foreach(freq => {
val freqItem = new JMenuItem(freq + " Hz")
freqItem.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
Main.setVMFrequency(freq)
}
})
freqGroup.add(freqItem)
freqMenu.add(freqItem)
})
emulatorMenu.add(freqMenu)
val stateMenu = new JMenu("States")
val loadItem = new JMenuItem("Load")
loadItem.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
Main.loadState()
}
})
loadItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F1, 0))
stateMenu.add(loadItem)
val saveItem = new JMenuItem("Save")
saveItem.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
Main.saveState()
}
})
saveItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.VK_F2, 0))
stateMenu.add(saveItem)
val slotMenu = new JMenu("Select Slot")
val slotGroup = new ButtonGroup()
for (i <- 0 to 9) {
val slotItem = new JRadioButtonMenuItem("Slot " + i)
slotItem.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
Main.selectSlot(i)
}
})
slotItem.setAccelerator(KeyStroke.getKeyStroke(KeyEvent.getExtendedKeyCodeForChar(i + 48), 0))
slotGroup.add(slotItem)
slotMenu.add(slotItem)
}
stateMenu.add(slotMenu)
bar.add(stateMenu)
val optionMenu = new JMenu("Options")
val keyItem = new JMenuItem("Key Bindings")
val keyDialog = new JDialog(this, "Key Bindings")
val keyPanel = new JPanel()
val grid = new GridLayout(4, 4)
grid.setHgap(20)
grid.setVgap(10)
val keyButtonGroup = new ButtonGroup()
for (i <- 0 to 15) {
val buttonPanel = new JPanel(new BorderLayout())
val keyLabel = new JLabel("Button " + i)
buttonPanel.add(keyLabel, BorderLayout.WEST)
val keyButton = new JToggleButton(KeyEvent.getKeyText(Main.configs.getKeyBinding(i)))
keyButton.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
val keyListener = new KeyAdapter() {
override def keyPressed(e:KeyEvent) {
Main.configs.setKeyBinding(i, e.getKeyCode)
keyButton.setText(KeyEvent.getKeyText(e.getKeyCode))
keyButton.removeKeyListener(this)
}
}
keyButton.addKeyListener(keyListener)
}
})
keyButtonGroup.add(keyButton)
buttonPanel.add(keyButton, BorderLayout.EAST)
keyPanel.add(buttonPanel)
}
keyPanel.setLayout(grid)
keyDialog.setContentPane(keyPanel)
keyItem.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
keyDialog.setLocationRelativeTo(UI.this)
keyDialog.setVisible(true)
}
})
keyDialog.pack()
keyDialog.setDefaultCloseOperation(WindowConstants.HIDE_ON_CLOSE)
optionMenu.add(keyItem)
val viewMenu = new JMenu("Color Scheme")
val colorGroup = new ButtonGroup()
ColorScheme.All.foreach(cs => {
val colorItem = new JRadioButtonMenuItem(cs.name)
colorItem.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent) {
screen.setColorScheme(cs)
Main.configs.colorScheme = cs
}
})
colorGroup.add(colorItem)
viewMenu.add(colorItem)
})
optionMenu.add(viewMenu)
bar.add(optionMenu)
val helpMenu = new JMenu("Help")
val aboutItem = new JMenuItem("About")
aboutItem.addActionListener(new ActionListener() {
override def actionPerformed(e :ActionEvent) {
JOptionPane.showMessageDialog(UI.this, "<html><body>Emul8 v1.0<br>Chip8 Emulator by Sebastian Kreutzer (2014)<br><a href=\\"http://sebastian-kreutzer.de\\">http://sebastian-kreutzer.de/</a></body></html>")
}
})
helpMenu.add(aboutItem)
bar.add(helpMenu)
setJMenuBar(bar)
screen.addKeyListener(this)
screen.setFocusable(true)
screen.requestFocusInWindow()
def getScreen(): Screen = screen
override def isKeyDown(key: Int): Boolean = keys(Main.configs.getKeyBinding(key))
override def getPressedKey(): Int = {
var key = -1
for (i <- 0 to 15) {
if (keys(Main.configs.getKeyBinding(i)))
key = i
}
key
}
override def keyPressed(e: KeyEvent) = {
keys(e.getKeyCode()) = true
//println("key " + Main.configs.getReverseKeyBinding(e.getKeyCode()) + " down (code=" + e.getKeyCode() + ")")
}
override def keyReleased(e: KeyEvent) = {
keys(e.getKeyCode()) = false
}
override def keyTyped(e: KeyEvent) {}
}
|
sebastiankreutzer/chip8
|
src/main/scala/de/sebastiankreutzer/chip8/UI.scala
|
Scala
|
gpl-2.0
| 7,548 |
package cn.changhong.web.init
import java.net.InetSocketAddress
import java.util.concurrent.TimeUnit
import cn.changhong.web.router._
import com.twitter.finagle.builder.ServerBuilder
import com.twitter.finagle.http.{Request, RichHttp, Http}
import com.twitter.util.Duration
/**
* Created by yangguo on 14-12-8.
*/
object Start {
def main(args:Array[String]): Unit ={
GlobalConfigFactory.server_ip=args(0)
GlobalConfigFactory.server_port=args(1).toInt
val service = AccessLogFilterService andThen ExceptionFilterService andThen SpiderActionInspectorFilterService andThen TimeoutFilterService andThen ForeRouter
ServerBuilder()
.codec(RichHttp[Request](Http()))
.readTimeout(Duration(5,TimeUnit.SECONDS))
.bindTo(new InetSocketAddress(GlobalConfigFactory.server_ip,GlobalConfigFactory.server_port))
.name(args(2))
.build(service)
}
}
|
guoyang2011/myfinagle
|
WebTemplate/src/main/scala/cn/changhong/web/init/Start.scala
|
Scala
|
apache-2.0
| 888 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Input}
case class CP35(value: Option[Int]) extends CtBoxIdentifier(name = "Vehicle expenses") with CtOptionalInteger with Input
object CP35 {
def apply(int: Int): CP35 = CP35(Some(int))
}
|
keithhall/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/CP35.scala
|
Scala
|
apache-2.0
| 898 |
package plob
import java.nio.file.attribute.BasicFileAttributes
import java.nio.file.SimpleFileVisitor
import java.nio.file.WatchEvent
import java.nio.file.FileSystems
import java.nio.file.FileVisitResult
import java.nio.file.Files
import java.nio.file.LinkOption
import java.nio.file.Path
import java.nio.file.StandardCopyOption
import java.nio.file.StandardWatchEventKinds
import java.util.Calendar
import scala.annotation.tailrec
import scala.collection.JavaConversions.asScalaBuffer
import scala.collection.mutable.ListBuffer
import scala.sys.process.Process
import builders.pipe
import plob.builders.toFilter
import plob.builders.toPath
import java.nio.file.WatchService
import java.nio.file.WatchKey
import scala.sys.process.ProcessLogger
class ProcessLoggerAnnotedPath(who : String, change : Change, path : Path, outLevel : Level = Level.Info, errLevel : Level = Level.Warning) extends ProcessLogger {
private var _annotedPaths : List[AnnotedPath] = Nil
def annotedPaths = _annotedPaths.toList
def out(s: => String): Unit = { _annotedPaths = AnnotedPath(change, path, Set(Marker(who, s, outLevel))) :: _annotedPaths }
def err(s: => String): Unit = { _annotedPaths = AnnotedPath(change, path, Set(Marker(who, s, errLevel))) :: _annotedPaths }
def buffer[T](f: => T): T = f
// def close(): Unit = writer.close()
// def flush(): Unit = writer.flush()
}
sealed trait Position
object Position {
case class OffSet(v : Int) extends Position
case class LC(line : Int, column : Int) extends Position
case class Range(begin : Position, end : Position) extends Position
}
sealed trait Level
object Level {
case object Trace extends Level
case object Debug extends Level
case object Info extends Level
case object Warning extends Level
case object Error extends Level
case object Fatal extends Level
}
case class Marker(who : String, what : String, level : Level, where : Option[Position] = None, when : Option[Calendar] = None)
sealed trait Change
object Change {
case object Created extends Change
case object Modified extends Change
case object FakeModified extends Change
case object Deleted extends Change
case object Test extends Change
}
case class AnnotedPath(change : Change, path : Path, markers : Set[Marker] = Set.empty)
class AnnotedPathGenerator(rootDir : Path) {
private val _rootDir = rootDir.normalize()
def all : builders.AnnotedPathS = {
val back = new ListBuffer[AnnotedPath]()
Files.walkFileTree(_rootDir, new SimpleFileVisitor[Path]() {
override def visitFile(f : Path, attrs : BasicFileAttributes) : FileVisitResult = {
//println("...", f, f.getParent, f.getRoot())
back += AnnotedPath(Change.FakeModified, f)
FileVisitResult.CONTINUE
}
})
back // .toSeq
}
//def watcher :(builders : builders.Builder,)
//TODO manage StandardWatchEventKinds.OVERFLOW
private def toAnnotedPath(dir : Path, event : WatchEvent[_]) : AnnotedPath = {
val status = event.kind match {
case StandardWatchEventKinds.ENTRY_CREATE => Change.Created
case StandardWatchEventKinds.ENTRY_DELETE => Change.Deleted
case _ => Change.Modified
}
AnnotedPath(status, dir.resolve(event.context().asInstanceOf[Path]).normalize())
}
def runAllOnce(build : builders.Builder, resultsCallback : (builders.AnnotedPathS) => Unit) {
val apathsAfter = build(all)
resultsCallback(apathsAfter)
}
def watch(build : builders.Builder, resultsCallback : (builders.AnnotedPathS) => Unit) {
val watchService = _rootDir.getFileSystem().newWatchService()
var watchKeys = Map.empty[WatchKey,Path]
def register(dir : Path) = {
val wkey = dir.register(watchService, StandardWatchEventKinds.ENTRY_CREATE, StandardWatchEventKinds.ENTRY_MODIFY, StandardWatchEventKinds.ENTRY_DELETE)
println("++ ", (wkey -> dir))
watchKeys += (wkey -> dir)
}
// loop forever to watch directory
@tailrec
def waitEvent(watchService : WatchService) {
import scala.collection.JavaConversions._
println("waiting FS event ...")
val wkey0 = watchService.take() // this call is blocking until events are present
val wkeys = ListBuffer[WatchKey](wkey0)
println(">>> ", wkey0)
// TODO refactor
// grab all enqueued changes
var wkeyi : WatchKey = null
do {
wkeyi = watchService.poll()
if (wkeyi != null) wkeys += wkeyi
} while(wkeyi != null)
// poll for file system events on the WatchKeys
val apathsBefore = for {
wkey <- wkeys.distinct
dir <- List(watchKeys(wkey))
event <- wkey.pollEvents().toSeq
} yield {
println(">> ", dir, wkey)
toAnnotedPath(dir, event)
}
println("build trigger by", apathsBefore)
// watch newly created directory
for (apath <- apathsBefore) {
if (apath.change == Change.Created && Files.isDirectory(apath.path, LinkOption.NOFOLLOW_LINKS)) {
register(apath.path)
}
}
//TODO stop watching deleted directory (and subdirectory)
val apathsAfter = build(apathsBefore)
resultsCallback(apathsAfter)
// if the watched directed gets deleted, get out of run method
for (wkey <- wkeys) {
if (!wkey.reset()) {
//System.out.println("No longer valid");
wkey.cancel()
watchKeys -= (wkey)
}
}
if (watchKeys.isEmpty) {
watchService.close()
} else {
waitEvent(watchService)
}
}
// register dir and subdirectory
Files.walkFileTree(_rootDir, new SimpleFileVisitor[Path]() {
override def preVisitDirectory(dir : Path, attrs : BasicFileAttributes) : FileVisitResult = {
register(dir)
FileVisitResult.CONTINUE
}
})
waitEvent(watchService)
}
}
|
davidB/plob
|
src/main/scala/plob/core.scala
|
Scala
|
unlicense
| 5,863 |
package com.outr.stripe.balance
import com.outr.stripe.Money
case class Reversal(id: String,
`object`: String,
amount: Money,
balanceTransaction: String,
created: Long,
currency: String,
metadata: Map[String, String],
transfer: String)
|
outr/scala-stripe
|
core/jvm/src/main/scala/com/outr/stripe/balance/Reversal.scala
|
Scala
|
mit
| 378 |
package org.orbeon.dom
import org.orbeon.dom.io.{OutputFormat, XMLWriter}
import org.orbeon.dom.tree.{AbstractNode, WithParent}
import org.orbeon.io.{IOUtils, StringBuilderWriter}
import scala.jdk.CollectionConverters._
import java.{util => ju, lang => jl}
object Node {
implicit class NodeOps[N <: Node](private val n: N) extends AnyVal {
def serializeToString(format: OutputFormat = XMLWriter.DefaultFormat): String =
IOUtils.useAndClose(new StringBuilderWriter) { writer =>
new XMLWriter(writer, format).write(n)
writer.result
}
/**
* Go over the `Node` and its children and make sure that there are no two contiguous text nodes so as to ensure that
* XPath expressions run correctly. As per XPath 1.0 (http://www.w3.org/TR/xpath):
*
* "As much character data as possible is grouped into each text node: a text node never has an immediately
* following or preceding sibling that is a text node."
*/
def normalizeTextNodes: N = {
val nodesToDetach = new ju.ArrayList[Node]
n.accept(
new VisitorSupport {
override def visit(elem: Element): Unit = {
var previousNode: Node = null
var sb: jl.StringBuilder = null
for (currentNode <- elem.nodeIterator) {
if (previousNode ne null) {
previousNode match {
case previousNodeText: Text if currentNode.isInstanceOf[Text] =>
if (sb eq null)
sb = new jl.StringBuilder(previousNodeText.getText)
sb.append(currentNode.getText)
nodesToDetach.add(currentNode)
case _: Text =>
// Update node if needed
if (sb ne null)
previousNode.setText(sb.toString)
previousNode = currentNode
sb = null
case _ =>
previousNode = currentNode
sb = null
}
} else {
previousNode = currentNode
sb = null
}
}
if ((previousNode ne null) && (sb ne null))
previousNode.setText(sb.toString)
}
}
)
// Detach nodes only in the end so as to not confuse the acceptor above
for (currentNode <- nodesToDetach.asScala)
currentNode.detach()
n
}
}
def nodeTypeName(node: Node): String = node match {
case _: Element => "Element"
case _: Attribute => "Attribute"
case _: Text => "Text"
case _: Document => "Document"
case _: Comment => "Comment"
case _: ProcessingInstruction => "ProcessingInstruction"
case _: Namespace => "Namespace"
case _ => throw new IllegalStateException
}
}
trait Node extends Cloneable {
def getType: Int
def getParent: Element
def parentElemOpt: Option[Element] = Option(getParent)
def setParent(parent: Element): Unit
def getDocument: Document
def setDocument(document: Document): Unit
def documentOpt: Option[Document] = Option(getDocument)
def getName: String
def getText: String
def setText(text: String): Unit
def getStringValue: String
def detach(): Node
def accept(visitor: Visitor): Unit
// TODO: Move this to a separate object, like `Node.deepCopy()` and use pattern matching.
// Maybe check this: https://tpolecat.github.io/2015/04/29/f-bounds.html
def deepCopy: Node
def createCopy: Node
}
object Text {
def apply(text: String): Text = new Text(text ensuring (_ ne null))
}
class Text(var text: String) extends AbstractNode with WithParent {
def getType: Int = 3
override def getText: String = text
override def setText(text: String): Unit = this.text = text
def accept(visitor: Visitor): Unit = visitor.visit(this)
override def toString = s"""Text("$text")"""
}
object Comment {
def apply(text: String): Comment = new Comment(text ensuring (_ ne null))
}
class Comment(var text: String) extends AbstractNode with WithParent {
def getType: Int = 8
override def getText: String = text
override def setText(text: String): Unit = this.text = text
def accept(visitor: Visitor): Unit = visitor.visit(this)
override def toString = s"""Comment("$text")"""
}
object ProcessingInstruction {
def apply(target: String, data: String): ProcessingInstruction =
new ProcessingInstruction(target, data)
}
class ProcessingInstruction(var target: String, var text: String)
extends AbstractNode with WithParent {
def getType: Int = 7
override def getName: String = getTarget
def getTarget: String = target
def setTarget(target: String): Unit = this.target = target
override def getText: String = text
override def setText(text: String): Unit = this.text = text
def accept(visitor: Visitor): Unit = visitor.visit(this)
override def toString = s"""ProcessingInstruction("$target", "$text")"""
}
|
orbeon/orbeon-forms
|
dom/src/main/scala/org/orbeon/dom/Node.scala
|
Scala
|
lgpl-2.1
| 5,096 |
package a71.对数
import a71.MathCount
object Runner {
def number2FromInt(num: Int): Number2 = {
lazy val genTailNext1: Number2Bottom => Number2 = num2 => {
def number2(n: Int, zero: => Number2): Number2 = if (n > 0) Number2S(number2(n - 1, zero)) else zero
lazy val n2Zero: Number2 = Number2T(tailNext = () => num2, genTailNext = genTailNext1)
number2(num, n2Zero)
}
lazy val genTailNext2: Number2Bottom => Number2Bottom = num2 => {
def number2(n: Int, zero: => Number2Bottom): Number2Bottom = if (n > 0) Number2SBottom(number2(n - 1, zero)) else zero
lazy val n2Zero: Number2Bottom = Number2TBottom(tailNext = () => num2, genTailNext = genTailNext2)
number2(num, n2Zero)
}
lazy val numZero: Number2Bottom = Number2Zero(() => genTailNext2(numZero))
genTailNext1(numZero)
}
def number1FromInt(num: Int): Number1 = if (num > 0) Number1S(number1FromInt(num - 1)) else Number1T
def count(number: Number4): Int = number match {
case Number4S(tail) => count(tail) + 1
case Number4T => 0
}
def main(arr: Array[String]): Unit = {
for {
i1 <- 0 to 600
i2 <- 2 to 20
} {
val result = number1FromInt(i1).method1(number2FromInt(i2 - 1))
val numResult = count(result)
assert(numResult == MathCount.log(底数 = i2, 真数 = i1))
}
}
}
|
djx314/ubw
|
a71-行李箱密码锁/src/main/scala/a71/对数/Runner.scala
|
Scala
|
bsd-3-clause
| 1,415 |
package me.heaton.shortestpath
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
class FloydSpec extends Specification with Mockito{
val graph = new Graph("AB5, BC4, CD8, DC8, DE6, AD5, CE2, EB3, AE7")
val floid = new Floyd(graph)
"the shortest path of A to C" should {
"be 9" in {
floid.shortest("A", "C") === 9
}
}
"the shortest path of B to B" should {
"be 9" in {
floid.shortest("B", "B") === 9
}
}
}
|
heaton/hello-scala
|
src/test/scala/me/heaton/shortestpath/FloydSpec.scala
|
Scala
|
mit
| 473 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package security
import org.scalatestplus.play.PlaySpec
class EncryptionSpec extends PlaySpec {
val plainText = "testing123"
class Setup {
object EncryptionTest extends Encryption
}
"Inputting a string" should {
"encrypt the string" in new Setup {
val encrypted = EncryptionTest.sha512(plainText)
assert(encrypted != plainText)
}
"output a 128 character length string" in new Setup {
val encrypted = EncryptionTest.sha512(plainText)
encrypted.length mustBe 128
}
}
}
|
chrisjwwalker/cjww-auth-service
|
test/security/EncryptionSpec.scala
|
Scala
|
apache-2.0
| 1,257 |
package cas.web.interface
import java.io.File
import akka.actor.{ActorSystem, Props}
import akka.io.IO
import akka.pattern.ask
import akka.util.Timeout
import cas.analysis.estimation._
import cas.service.AProducer
import cas.utils._
import cas.web.dealers.vk.VkApiDealer
import com.typesafe.config.ConfigFactory
import spray.can.Http
import scala.concurrent.duration._
import scala.util.Try
import org.elasticsearch.common.settings.Settings
import scala.xml._
import scala.xml.factory.XMLLoader
import scala.xml.parsing.NoBindingFactoryAdapter
import org.ccil.cowan.tagsoup.jaxp.SAXFactoryImpl
import Utils._
import cas.persistence.SubjectsGrader
import cas.utils.Files._
import cas.utils.UtilAliases.ErrorMsg
import cas.web.pages.ControlPage
import scala.concurrent.duration._
import scala.xml.{Node, XML}
import cats.data.Validated.{invalid, valid}
import cats.std.all._
import cats.syntax.cartesian._
object ImplicitRuntime {
implicit val system = ActorSystem("web-service")
implicit val timeout = 10.seconds
}
import spray.client.pipelining._
object Boot extends App {
import ImplicitRuntime._
import system.dispatcher
implicit val t = Timeout(timeout)
val interface = system.actorOf(Props[AInterfaceControl], "interface-control")
val config = ConfigFactory.load()
val addr = config.getString("cas.interface")
val port = config.getInt("cas.port")
println(s"Starting server on $addr:$port")
IO(Http) ? Http.Bind(interface, addr, port)
// transformData
/*val a = (valid[String, Int](1) combine
valid[String, Int](2) combine
valid[String, Int](3)) map {_ + _ + _}
println(a)*/
def transformData = {
val weights = Array(3.1642, -61.6405, -15.6417, -1.7404)
val classifier = new SubjectsClassificator(weights,
new StaticLoyaltyEstimator(StaticLoyaltyConfigs()),
new ContinuousInvRelEstimator(ContinuousInvRelEstimatorConfigs(ControlPage.searcher)),
new CorrectnessEstimator(CorrectnessConfigs()))
val grader: SubjectsGrader = new SubjectsGrader()
val path = Files.resources + "/cas/data/marked/marked_testing.json"
for {
// data <- grader.convertDumpToData(path)
estims <- grader.convertDumpToEstimations(path)
} yield {
println(estims.length)
}
}
}
|
bk0606/CAS
|
src/main/scala/cas/web/interface/Boot.scala
|
Scala
|
mit
| 2,284 |
package edu.berkeley.ce.sparkrocks
import breeze.linalg
import breeze.linalg.{DenseMatrix, DenseVector}
import org.apache.commons.lang3.builder.HashCodeBuilder
object Joint {
/**
* Find the distance of the joint plane from the input local origin.
*
* @param normalVec Normal vector to the joint plane
* @param localOrigin The local origin from which the distance is referenced. This should be in global coordinates.
* @param center The center of the joint plane. This should be in global coordinates.
* @return
*/
def findDistance(normalVec: Array[Double], localOrigin: Array[Double],
center: Array[Double]): Double = {
assert(normalVec.length == 3 && localOrigin.length == 3 && center.length == 3)
val w = DenseVector.zeros[Double](3)
w(0) = localOrigin(0) - center(0)
w(1) = localOrigin(1) - center(1)
w(2) = localOrigin(2) - center(2)
val n = DenseVector[Double](normalVec)
NumericUtils.roundToTolerance(-(n dot w) / linalg.norm(n))
}
/**
* Converts point from local to global coordinates
*
* @param point The point to transform
* @param localOrigin Local origin's global coordinates
* @param normal Normal to joint plane
* @param dip Dip direction of plane
* @return Tuple that contains x, y and z coordinates of point in global coordinates
*/
private def localPointToGlobal(point: Array[Double], localOrigin: Array[Double],
normal: Array[Double], dip: Double): Array[Double] = {
assert(point.length == 3 && localOrigin.length == 3 && normal.length == 3)
val Nplane = if (normal(2) > -NumericUtils.EPSILON) {
// Ensures that normal will always point in -z global direction (ensures right-handed local coordinates)
-1.0 * DenseVector[Double](normal)
} else {
DenseVector[Double](normal)
}
val strike = (dip - math.Pi / 2) % (2 * math.Pi) // strike = dip - pi/2 (US convention)
val Nstrike = DenseVector[Double](math.cos(-strike), math.sin(-strike), 0.0)
val Ndip = linalg.cross(Nplane, Nstrike)
// Q defines the linear transformation to convert to global coordinates
val Q = DenseMatrix.zeros[Double](3,3)
Q(::, 0) := Nstrike
Q(::, 1) := Ndip
Q(::, 2) := Nplane
// Transform point from local to global coordinates
val transformedPoint = Q * DenseVector[Double](point)
Array(
transformedPoint(0) + localOrigin(0),
transformedPoint(1) + localOrigin(1),
transformedPoint(2) + localOrigin(2)
)
}
/**
* Find a bounding sphere for a non-persistent joint. This function is not
* intended for use with persistent joints.
*
* @param normalVec The normal vector of the plane in which the joint lies
* @param distance The distance of the joint's plane from its local origin
* @param centerX The x coordinate of the joint's center
* @param centerY The y coordinate of the joint's center
* @param centerZ The z coordinate of the joint's center
* @param faces A sequence of faces specifying the joint's shape
* @param dip Dip direction of joint plane
* @return A pair where the first element is a 3-element double array giving the center of
* the bounding sphere and the second element is the radius of the
* bounding sphere.
*/
private def findBoundingSphere(normalVec: Array[Double], distance: Double, centerX: Double,
centerY: Double, centerZ: Double, faces: Seq[(Array[Double],Double)], dip: Double):
(Array[Double], Double) = {
assert(normalVec.length == 3)
// Linear program only in 2-D since constraints are specified entirely
// by persistence of joint within the joint plane
val basisVectors = Array(
Array(1.0, 0.0),
Array(0.0, 1.0),
Array(-1.0, 0.0),
Array(0.0, -1.0)
)
val maxCoordinates = basisVectors.map { v =>
// Only 2 variables in linear program - in-plane bounding circle so 2-D
val linProg = new LinearProgram(2)
linProg.setObjFun(v, LinearProgram.MAX)
faces foreach { case (normal, d) =>
val a = normal(0)
val b = normal(1)
if (d < 0.0) {
val coeffs = Array(NumericUtils.applyTolerance(-a), NumericUtils.applyTolerance(-b))
val rhs = NumericUtils.applyTolerance(-d)
linProg.addConstraint(coeffs, LinearProgram.LE, rhs)
} else {
val coeffs = Array(NumericUtils.applyTolerance(a), NumericUtils.applyTolerance(b))
val rhs = NumericUtils.applyTolerance(d)
linProg.addConstraint(coeffs, LinearProgram.LE, rhs)
}
}
val results = linProg.solve().get._1
val resultsSeq = Seq(results(0), results(1))
// Values of principal axes vectors set to 0.0 exacly, so okay to check for equality of Double
resultsSeq.filter(math.abs(_) > NumericUtils.EPSILON) match {
case Nil => 0.0
case x+:xs => x
}
}
val pairedCoords = maxCoordinates.take(2).zip(maxCoordinates.takeRight(2))
val center = pairedCoords.map { case (x, y) => 0.5 * (x + y) }
val diffVector = pairedCoords.map { case (x, y) => x - y }
val radius = 0.5 * linalg.norm(DenseVector[Double](diffVector))
// Shift from Joint local coordinates to global coordinates
val transformedCenter = Joint.localPointToGlobal(Array(center(0), center(1), 0.0),
Array(centerX, centerY, centerZ), normalVec, dip)
(transformedCenter, radius)
}
/**
* Find the vector indicating dip direction of the joint plane. Global positive x-axis points North and
* z-axis oriented with positive upward. Positive y-axis will point west based on this orientation.
*
* @param normalVec Normal vector to the joint plane
* @return Dip direction of the plane, indicating direction of greatest increase in z. Return as vector (a, b, 0)
*/
private def dipDirVector(normalVec: Array[Double]): DenseVector[Double] = {
assert(normalVec.length == 3)
// Dip direction is in opposite direction of gradient indicating greatest increase in z.
if ((math.abs(normalVec(2)) > NumericUtils.EPSILON) &&
(math.abs(math.abs(normalVec(2)) - 1.0) > NumericUtils.EPSILON)) {
DenseVector[Double](normalVec(0) / normalVec(2), normalVec(1) / normalVec(2), 0.0)
} else if (math.abs(normalVec(2)) < NumericUtils.EPSILON) {
// Joint is vertical, assigns non-zero z component that will be caught in dipDir function
DenseVector[Double](0.0, 0.0, -1.0)
} else {
// Joint is horizontal, dip direction arbitrarily assigned to 90 degrees so that strike is 0 degrees
DenseVector[Double](0.0, -1.0, 0.0)
}
}
/**
* Finds the dip direction of the input joint as an azimuth. Global positive x-axis points North.
*
* @param normalVec Normal vector to the joint plane
* @return Dip direction as an azimuth in radians
*/
private def dipDir(normalVec: Array[Double]): Double = {
assert(normalVec.length == 3)
val dipVector = Joint.dipDirVector(normalVec)
val xAxis = DenseVector[Double](1.0, 0.0, 0.0)
if (dipVector(2) != -1.0) { // Checks if joint is vertical - set to -1.0 in dipDirVector for vertical joints
if (normalVec(1) > 0.0) {
2.0*math.Pi - math.acos((xAxis dot dipVector) / (linalg.norm(xAxis) * linalg.norm(dipVector)))
} else {
math.acos((xAxis dot dipVector) / (linalg.norm(xAxis) * linalg.norm(dipVector)))
}
} else {
val normal = DenseVector[Double](normalVec(0), normalVec(1), normalVec(2))
if (normalVec(1) > 0.0) {
2.0*math.Pi - math.acos((xAxis dot normal) / (linalg.norm(normal) * linalg.norm(xAxis)))
} else {
math.acos((xAxis dot normal) / (linalg.norm(normal) * linalg.norm(xAxis)))
}
}
}
/**
* Finds the dip angle of the input joint.
*
* @param normalVec Normal vector to the joint plane
* @return Dip angle in radians
*/
private def dipAngle(normalVec: Array[Double]): Double = {
assert(normalVec.length == 3)
val dipVector = Joint.dipDirVector(normalVec)
val normal = DenseVector[Double](normalVec(0), normalVec(1), normalVec(2))
// Checks for horizontal and vertical joints. This is set in dipDirVector function so can compare doubles exactly
if ((dipVector(1) != -1.0) && (dipVector(2) != -1.0)) {
if (normal(2) > 0.0) {
math.Pi/2.0 - math.acos((normal dot dipVector) / (linalg.norm(normal) * linalg.norm(dipVector)))
} else {
math.acos((normal dot dipVector) / (linalg.norm(normal) * linalg.norm(dipVector))) - math.Pi/2.0
}
} else if (dipVector(1) == -1.0) { // Joint is horizontal
0.0
} else { // Joint is vertical
math.Pi/2.0
}
}
}
/**
* A simple data structure to represent a joint.
*
* @constructor Create a new joint.
* @param normalVec The normal vector to the joint. The individual vector components
* can be accessed as 'a', 'b', and 'c'. Assumed to be a unit vector.
* @param localOrigin The local origin from which the distance is referenced. The individual
* components are accessed as 'localX', 'localY', and 'localZ'.
* @param center Cartesian coordinates for the center of the joint. The individual
* components can be accessed as 'centerX', 'centerY', and 'centerZ'.
* @param phi The joint's friction angle (phi).
* @param cohesion The cohesion along the joint
* @param shape A list of lines specifying the shape of the joint. Each item is a
* 3-tuple. The first two items specify the line, while the last gives the distance
* of the line from the joint's center in the local coordinate system.
* @param boundingSphereParam An optional parameter that can be used to specify the bounding
* sphere for the joint, if it is known. This prevents an expensive recalculation
* of the bounding sphere.
* @param dipAngleParam An optional parameter that can be used to specify the dip angle for the joint.
* This avoids recalculation of a known dip angle.
* @param dipDirectionParam An optional parameter that can be used to specify the dip direction for
* the joint. This avoids recalculation of a known dip direction.
*/
@SerialVersionUID(1L)
case class Joint(normalVec: Array[Double], localOrigin: Array[Double],
center: Array[Double], phi: Double, cohesion: Double,
shape: Vector[(Array[Double],Double)], dipAngleParam: Option[Double]=None,
dipDirectionParam: Option[Double]=None,
boundingSphereParam: Option[(Array[Double],Double)]=null) extends Serializable {
assert(normalVec.length == 3 && localOrigin.length == 3 && center.length == 3)
val a = normalVec(0)
val b = normalVec(1)
val c = normalVec(2)
val centerX = center(0)
val centerY = center(1)
val centerZ = center(2)
val d = Joint.findDistance(normalVec, localOrigin, center)
val localX = localOrigin(0)
val localY = localOrigin(1)
val localZ = localOrigin(2)
val dipAngle = dipAngleParam match {
case None => Joint.dipAngle(normalVec)
case Some(da) => da
}
val dipDirection = dipDirectionParam match {
case None => Joint.dipDir(normalVec)
case Some(dd) => dd
}
val boundingSphere = boundingSphereParam match {
case null =>
if (shape.isEmpty) {
None
} else {
Some(Joint.findBoundingSphere(normalVec, d, centerX, centerY, centerZ, shape, dipDirection))
}
case bs => bs
}
/** Converts lines defining shape of joint from local to global coordinates
*
* @return A seq of pairs, each representing a plane that specifies a boundary of the
* joint in the global coordinate space. The first item of each pair is a normal
* vector for the plane, and the second item is the distance of the plane from the origin.
*/
def globalCoordinates: Seq[(Array[Double], Double)] = {
val Nplane = if (c > -NumericUtils.EPSILON) {
// Ensures that normal will always point in -z global direction (ensures right-handed local coordinates)
-1.0 * DenseVector[Double](a, b, c)
} else {
DenseVector[Double](a, b, c)
}
val strike = (dipDirection - math.Pi / 2) % (2 * math.Pi) // Strike = dipDirection - pi/2 (US convention)
val Nstrike = DenseVector[Double](math.cos(-strike), math.sin(-strike), 0.0)
val Ndip = linalg.cross(Nplane, Nstrike)
// Q defines the linear transformation to convert to global coordinates
val Q = DenseMatrix.zeros[Double](3,3)
Q(::, 0) := Nstrike
Q(::, 1) := Ndip
Q(::, 2) := Nplane
val shapeVectors = shape.map { case (normal, _) => DenseVector[Double](normal(0), normal(1), 0) }
val globalShapeVecs = shapeVectors.map { Q*_ }
val centerVec = DenseVector[Double](centerX - localOrigin(0), centerY - localOrigin(1), centerZ - localOrigin(2))
val localDistances = shape.map { _._2 }
val globalDistances = globalShapeVecs.zip(localDistances).map
{ case (shapeVec, dist) => dist + shapeVec.dot(centerVec) }
// Convert back to triples to hide underlying Breeze implementation
val globalShapeTuples = globalShapeVecs.map {x => Array(x(0), x(1), x(2))}
globalShapeTuples.zip(globalDistances)
}
/**
* Calculates the distances of the joint relative to a new origin
*
* @param blockOrigin: new local origin
* @return Distance relative to block origin (new local origin)
*/
def updateJoint(blockOrigin: Array[Double]): Joint = {
assert(blockOrigin.length == 3)
Joint(normalVec, blockOrigin, center, phi, cohesion, shape, Some(dipAngle),
Some(dipDirection), boundingSphere)
}
def approximateEquals(inputJoint: Joint, tolerance: Double = NumericUtils.EPSILON): Boolean = {
math.abs(a - inputJoint.a) < tolerance &&
math.abs(b - inputJoint.b) < tolerance &&
math.abs(c - inputJoint.c) < tolerance &&
math.abs(centerX - inputJoint.centerX) < tolerance &&
math.abs(centerY - inputJoint.centerY) < tolerance &&
math.abs(centerZ - inputJoint.centerZ) < tolerance &&
math.abs(d - inputJoint.d) < tolerance &&
math.abs(phi - inputJoint.phi) < tolerance &&
math.abs(cohesion - inputJoint.cohesion) < tolerance &&
math.abs(dipAngle - inputJoint.dipAngle) < tolerance &&
math.abs(dipDirection - inputJoint.dipDirection) < tolerance &&
((shape zip inputJoint.shape) forall { case ((norm1, d1), (norm2, d2)) =>
math.abs(norm1(0) - norm2(0)) < tolerance &&
math.abs(norm1(1) - norm2(1)) < tolerance &&
math.abs(norm1(2) - norm2(2)) < tolerance &&
math.abs(d1 - d2) < tolerance
})
}
override def equals(obj: Any): Boolean = {
obj match {
case j: Joint =>
this.a == j.a && this.b == j.b && this.c == j.c &&
this.centerX == j.centerX && this.centerY == j.centerY && this.centerZ == j.centerZ &&
this.dipAngle == j.dipAngle && this.dipDirection == j.dipDirection &&
((this.shape zip j.shape) forall { case ((norm1, d1), (norm2, d2)) =>
(norm1 sameElements norm2) && d1 == d2
})
}
}
override def hashCode: Int = {
val hcBuilder = new HashCodeBuilder()
hcBuilder.append(a)
hcBuilder.append(b)
hcBuilder.append(c)
hcBuilder.append(centerX)
hcBuilder.append(centerY)
hcBuilder.append(centerZ)
hcBuilder.append(dipAngle)
hcBuilder.append(dipDirection)
shape foreach { case (normVec, dist) =>
hcBuilder.append(normVec(0)).append(normVec(1)).append(normVec(2))
hcBuilder.append(dist)
}
hcBuilder.toHashCode
}
}
|
cb-geo/spark-rocks
|
src/main/scala/edu/berkeley/ce/sparkrocks/Joint.scala
|
Scala
|
gpl-2.0
| 15,682 |
package uk.org.nbn.nbnv.importer.validation
import uk.org.nbn.nbnv.importer.records.NbnRecord
import collection.mutable.ListBuffer
import uk.org.nbn.nbnv.importer.fidelity.{ResultLevel, Result}
import uk.org.nbn.nbnv.importer.spatial.GridSquareInfoFactory
import uk.org.nbn.nbnv.importer.data.Database
class GridReferenceValidator (db: Database) {
def validate(record: NbnRecord) = {
val resultList = new ListBuffer[Result]
//is valid grid ref
val v1 = new Nbnv81Validator
val r1 = v1.validate(record.gridReferenceRaw.get, record.key)
resultList.append(r1)
if(r1.level == ResultLevel.DEBUG)
{
//does grid ref match grid ref type if specified.
val v2 = new Nbnv159Validator()
val r2 = v2.validate(record)
resultList.append(r2)
//Is the requested precision valid for the grid ref.
val factory = new GridSquareInfoFactory(db)
val v3 = new Nbnv90Validator(factory)
val r3 = v3.validate(record)
resultList.appendAll(r3)
}
resultList.toList
}
}
|
JNCC-dev-team/nbn-importer
|
importer/src/main/scala/uk/org/nbn/nbnv/importer/validation/GridReferenceValidator.scala
|
Scala
|
apache-2.0
| 1,073 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf.loaders
import java.nio.ByteOrder
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.nn.ops.{Inv => InvOps}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.tf.Context
import org.tensorflow.framework.{DataType, NodeDef}
import scala.reflect.ClassTag
class Inv extends TensorflowOpsLoader {
import Utils._
override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder, context: Context[T])
(implicit ev: TensorNumeric[T]): Module[T] = {
val t = getType(nodeDef.getAttrMap, "T")
if (t == DataType.DT_FLOAT) {
InvOps[T, Float]()
} else if (t == DataType.DT_DOUBLE) {
InvOps[T, Double]()
} else {
throw new UnsupportedOperationException(s"Not support load Inv when type is ${t}")
}
}
}
|
wzhongyuan/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/utils/tf/loaders/Inv.scala
|
Scala
|
apache-2.0
| 1,474 |
package io.scalac.amqp.impl
import scala.concurrent.stm.Ref
import scala.util.{Success, Failure, Try}
import scala.util.control.NonFatal
import com.rabbitmq.client.{ShutdownSignalException, ShutdownListener, Connection}
import io.scalac.amqp.Delivery
import org.reactivestreams.{Subscriber, Publisher}
private[amqp] class QueuePublisher(
/** RabbitMQ library connection. */
connection: Connection,
/** Queue to consume from. */
queue: String,
/** Number of unacknowledged messages in the flight. It's beneficial to have this number higher
* than 1 due to improved throughput. Setting this number to high may increase memory usage -
* depending on average message size and speed of subscribers. */
prefetch: Int = 20) extends Publisher[Delivery] {
require(prefetch > 0, "prefetch <= 0")
val subscribers = Ref(Set[Subscriber[_ >: Delivery]]())
override def subscribe(subscriber: Subscriber[_ >: Delivery]) =
subscribers.single.getAndTransform(_ + subscriber) match {
case ss if ss.contains(subscriber) ⇒
throw new IllegalStateException(s"Rule 1.10: Subscriber=$subscriber is already subscribed to this publisher.")
case _ ⇒
Try(connection.createChannel()) match {
case Success(channel) ⇒
channel.addShutdownListener(newShutdownListener(subscriber))
val subscription = new QueueSubscription(channel, queue, subscriber)
try {
subscriber.onSubscribe(subscription)
channel.basicQos(prefetch)
channel.basicConsume(queue, false, subscription)
} catch {
case NonFatal(exception) ⇒ subscriber.onError(exception)
}
case Failure(cause) ⇒
subscriber.onSubscribe(CanceledSubscription)
subscriber.onError(cause)
}
}
def newShutdownListener(subscriber: Subscriber[_ >: Delivery]) = new ShutdownListener {
override def shutdownCompleted(cause: ShutdownSignalException) =
subscribers.single.transform(_ - subscriber)
}
override def toString = s"QueuePublisher(connection=$connection, queue=$queue, prefetch=$prefetch)"
}
|
iozozturk/reactive-rabbit
|
src/main/scala/io/scalac/amqp/impl/QueuePublisher.scala
|
Scala
|
apache-2.0
| 2,174 |
package pl.writeonly.son2.path.notations
import com.jayway.jsonpath.spi.json.JsonOrgJsonProvider
import com.jayway.jsonpath.spi.mapper.JsonOrgMappingProvider
import pl.writeonly.son2.apis.config.Meta
import pl.writeonly.son2.apis.core.Formats
import pl.writeonly.son2.path.core.ProvidersPath
import pl.writeonly.son2.path.notation.CreatorNotationProvider
object CreatorNotationOrgJson extends CreatorNotationProvider {
override val meta = Meta(ProvidersPath.ORG, Formats.OBJECT)
override def jsonProvider = new JsonOrgJsonProvider()
override def mappingProvider = new JsonOrgMappingProvider()
}
|
writeonly/son2
|
scallions-impl/scallions-path/src/main/scala/pl/writeonly/son2/path/notations/CreatorNotationOrgJson.scala
|
Scala
|
apache-2.0
| 606 |
package fr.inria.spirals.sigma.ttc14.fixml
import fr.unice.i3s.sigma.m2t.M2T
import fr.inria.spirals.sigma.ttc14.fixml.objlang.support.ObjLang
import fr.inria.spirals.sigma.ttc14.fixml.objlang.support.ObjLang._objlang._
class ObjLang2CEnumHeader extends BaseObjLang2Enum with ObjLang2CPP with CHeader {
override def content = {
!s"typedef enum" curlyIndent {
genEnumItems
}
!s" ${source.name};"
}
override def genEnumItems = {
!(source.items map (source.name + "_" + _.name) mkString (", "))
}
}
|
fikovnik/ttc14-fixml-sigma
|
ttc14-fixml-extension-3/src/fr/inria/spirals/sigma/ttc14/fixml/ObjLang2CEnumHeader.scala
|
Scala
|
epl-1.0
| 529 |
/** Represents a doc comment, splitting it into `body` and `tags`
* `tags` are all lines starting with an `@`, where the tag thats starts
* with `@` is paired with the text that follows, up to the next
* tagged line.
* `body` what comes before the first tagged line
*/
case class DocComment(body: String, tags: Map[String, List[String]])
object DocComment:
def fromString(str: String): DocComment =
val lines = str.linesIterator.toList
def tagged(line: String): Option[(String, String)] =
val ws = WordSplitter(line)
val tag = ws.next()
if tag.startsWith("@") then Some(tag, line.drop(ws.nextOffset))
else None
val (bodyLines, taggedLines) = lines.span(tagged(_).isEmpty)
def tagPairs(lines: List[String]): List[(String, String)] = lines match
case line :: lines1 =>
val (tag, descPrefix) = tagged(line).get
val (untaggedLines, lines2) = lines1.span(tagged(_).isEmpty)
val following = untaggedLines.map(_.dropWhile(_ <= ' '))
(tag, (descPrefix :: following).mkString("\\n")) :: tagPairs(lines2)
case _ =>
Nil
DocComment(bodyLines.mkString("\\n"), tagPairs(taggedLines).groupMap(_._1)(_._2))
end DocComment
|
lampepfl/dotty
|
tests/run/decorators/DocComment.scala
|
Scala
|
apache-2.0
| 1,224 |
package korolev
import java.util.concurrent.ConcurrentLinkedQueue
import scala.language.higherKinds
import scala.util.{Failure, Success}
abstract class StateManager[F[+_]: Async, State] {
def state: State
def subscribe[U](f: State => U): StateManager.Unsubscribe
def onDestroy[U](f: () => U): StateManager.Unsubscribe
def destroy(): Unit
def apply(transition: StateManager.Transition[State]): F[Unit]
def update(state: State): F[Unit]
}
object StateManager {
type Unsubscribe = () => Unit
type Transition[State] = PartialFunction[State, State]
def apply[F[+_]: Async, S](initialState: S): StateManager[F, S] = {
new StateManager[F, S] {
val queue = new ConcurrentLinkedQueue[(Async.Promise[F, Unit], Transition[S])]
@volatile var currentState = initialState
@volatile var subscribers = List.empty[S => _]
@volatile var onDestroyListeners = List.empty[() => _]
@volatile var inProgress = false
def update(state: S): F[Unit] = apply { case _ => state }
def apply(transition: Transition[S]): F[Unit] = {
def executeNext(): Unit = {
val (promise, transition) = queue.poll()
try {
transition.lift(currentState) match {
case Some(newState) =>
currentState = newState
subscribers.foreach(f => f(newState))
promise.complete(Success(()))
case None =>
promise.complete(Failure(new Exception("Transition don't fit this state")))
}
} catch {
case e: Throwable =>
promise.complete(Failure(e))
} finally {
if (queue.isEmpty) inProgress = false
else executeNext()
}
}
val promise = Async[F].promise[Unit]
queue.add(promise -> transition)
if (!inProgress) {
inProgress = true
executeNext()
}
promise.future
}
def destroy(): Unit = {
for (listener <- onDestroyListeners)
listener()
}
def state: S = currentState
def onDestroy[U](f: () => U): Unsubscribe = {
onDestroyListeners = f :: onDestroyListeners
() => onDestroyListeners = onDestroyListeners.filter(_ != f)
}
def subscribe[U](f: S => U): StateManager.Unsubscribe = {
subscribers = f :: subscribers
() => subscribers = subscribers.filter(_ != f)
}
}
}
}
|
PhilAndrew/JumpMicro
|
JMCloner/src/main/scala/korolev/StateManager.scala
|
Scala
|
mit
| 2,467 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.util
import org.apache.spark.SparkException
import org.apache.spark.annotation.Since
/**
* Trait for models that provides Training summary.
*
* @tparam T Summary instance type
*/
@Since("3.0.0")
private[ml] trait HasTrainingSummary[T] {
private[ml] final var trainingSummary: Option[T] = None
/** Indicates whether a training summary exists for this model instance. */
@Since("3.0.0")
def hasSummary: Boolean = trainingSummary.isDefined
/**
* Gets summary of model on training set. An exception is
* thrown if if `hasSummary` is false.
*/
@Since("3.0.0")
def summary: T = trainingSummary.getOrElse {
throw new SparkException(
s"No training summary available for this ${this.getClass.getSimpleName}")
}
private[ml] def setSummary(summary: Option[T]): this.type = {
this.trainingSummary = summary
this
}
}
|
maropu/spark
|
mllib/src/main/scala/org/apache/spark/ml/util/HasTrainingSummary.scala
|
Scala
|
apache-2.0
| 1,692 |
package scavlink.sbt.mavgen
import org.scalatest.WordSpec
import scavlink.sbt.mavgen.MessageGenerator._
import scala.xml.XML
class MagicSpec extends WordSpec {
val xml = XML.load(getClass.getResourceAsStream("/mavgen/common.xml"))
// taken from ardupilot project
val sourceMagics = Map(0 -> 50, 1 -> 124, 2 -> 137, 4 -> 237, 5 -> 217, 6 -> 104, 7 -> 119, 11 -> 89, 20 -> 214,
21 -> 159, 22 -> 220, 23 -> 168, 24 -> 24, 25 -> 23, 26 -> 170, 27 -> 144, 28 -> 67, 29 -> 115, 30 -> 39, 31 -> 246,
32 -> 185, 33 -> 104, 34 -> 237, 35 -> 244, 36 -> 222, 37 -> 212, 38 -> 9, 39 -> 254, 40 -> 230, 41 -> 28, 42 -> 28,
43 -> 132, 44 -> 221, 45 -> 232, 46 -> 11, 47 -> 153, 48 -> 41, 49 -> 39, 50 -> 214, 51 -> 223, 52 -> 141, 53 -> 33,
54 -> 15, 55 -> 3, 56 -> 100, 57 -> 24, 58 -> 239, 59 -> 238, 60 -> 30, 61 -> 153, 62 -> 183, 63 -> 51,
64 -> 82, 65 -> 118, 66 -> 148, 67 -> 21, 69 -> 243, 70 -> 124, 73 -> 38, 74 -> 20, 75 -> 158, 76 -> 152,
77 -> 143, 80 -> 127, 81 -> 106, 82 -> 49, 83 -> 22, 84 -> 143, 85 -> 140, 86 -> 5,
87 -> 150, 89 -> 231, 90 -> 183, 91 -> 63, 92 -> 54, 100 -> 175, 101 -> 102, 102 -> 158, 103 -> 208, 104 -> 56,
105 -> 93, 106 -> 138, 107 -> 108, 108 -> 32, 109 -> 185, 110 -> 84, 111 -> 34, 112 -> 124, 113 -> 124,
114 -> 237, 115 -> 4, 116 -> 76, 117 -> 128, 118 -> 56, 119 -> 116, 120 -> 134, 121 -> 237, 122 -> 203, 123 -> 250,
124 -> 87, 125 -> 203, 126 -> 220, 127 -> 25, 128 -> 226, 130 -> 29, 131 -> 223, 132 -> 85, 133 -> 6, 134 -> 229,
135 -> 203, 136 -> 1, 147 -> 154, 148 -> 49, 149 -> 15, 248 -> 8, 249 -> 204, 250 -> 49, 251 -> 170,
252 -> 44, 253 -> 83, 254 -> 46)
"the magic number calculator" should {
"compute magic numbers that match the values from droidplanner for all common messages" in {
val messages = unmarshal("common", xml)
val magics = messages.map(m => m.id -> m.magic)
val mismatches =
for ((id, magic) <- magics if sourceMagics.isDefinedAt(id) && sourceMagics(id) != magic)
yield id ->(magic, sourceMagics(id))
assertResult(Map.empty)(mismatches.toMap)
}
}
}
|
nickolasrossi/sbt-mavgen
|
src/test/scala/scavlink/sbt/mavgen/MagicSpec.scala
|
Scala
|
mit
| 2,125 |
package im.actor.server.presences
import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContext, Future }
import akka.actor._
import akka.contrib.pattern.ShardRegion.Passivate
import akka.contrib.pattern.{ ClusterSharding, ShardRegion }
import akka.pattern.ask
import akka.util.Timeout
import org.joda.time.DateTime
import slick.driver.PostgresDriver.api._
import im.actor.server.db.DbExtension
import im.actor.server.{ models, persist }
case class PresenceManagerRegion(val ref: ActorRef)
sealed trait Presence
@SerialVersionUID(1L)
case class PresenceState(userId: Int, presence: Presence, lastSeenAt: Option[DateTime])
object Presences {
@SerialVersionUID(1L)
case object Online extends Presence
@SerialVersionUID(1L)
case object Offline extends Presence
}
object PresenceManager {
import Presences._
private val InitRetryTimeout = 5.seconds
private sealed trait Message
@SerialVersionUID(1L)
private case class UserPresenceChange(presence: Presence, timeout: Long) extends Message
@SerialVersionUID(1L)
private case class Subscribe(consumer: ActorRef) extends Message
@SerialVersionUID(1L)
private case class SubscribeAck(consumer: ActorRef)
@SerialVersionUID(1L)
private case class Unsubscribe(consumer: ActorRef) extends Message
@SerialVersionUID(1L)
private case class UnsubscribeAck(consumer: ActorRef)
@SerialVersionUID(1L)
private case class Envelope(userId: Int, payload: Message)
@SerialVersionUID(1L)
private case class Initialized(lastSeenAt: Option[DateTime])
private val idExtractor: ShardRegion.IdExtractor = {
case env @ Envelope(userId, payload) ⇒ (userId.toString, env)
}
private val shardResolver: ShardRegion.ShardResolver = msg ⇒ msg match {
case Envelope(userId, _) ⇒ (userId % 32).toString // TODO: configurable
}
private def startRegion(props: Option[Props])(implicit system: ActorSystem): PresenceManagerRegion =
PresenceManagerRegion(ClusterSharding(system).start(
typeName = "PresenceManager",
entryProps = props,
idExtractor = idExtractor,
shardResolver = shardResolver
))
def startRegion()(implicit system: ActorSystem): PresenceManagerRegion = startRegion(Some(props))
def startRegionProxy()(implicit system: ActorSystem): PresenceManagerRegion = startRegion(None)
def props = Props(classOf[PresenceManager])
def subscribe(userId: Int, consumer: ActorRef)(implicit region: PresenceManagerRegion, ec: ExecutionContext, timeout: Timeout): Future[Unit] = {
region.ref.ask(Envelope(userId, Subscribe(consumer))).mapTo[SubscribeAck].map(_ ⇒ ())
}
def subscribe(userIds: Set[Int], consumer: ActorRef)(implicit region: PresenceManagerRegion, ec: ExecutionContext, timeout: Timeout): Future[Unit] =
Future.sequence(userIds map (subscribe(_, consumer))) map (_ ⇒ ())
def unsubscribe(userId: Int, consumer: ActorRef)(implicit region: PresenceManagerRegion, ec: ExecutionContext, timeout: Timeout): Future[Unit] = {
region.ref.ask(Envelope(userId, Unsubscribe(consumer))).mapTo[UnsubscribeAck].map(_ ⇒ ())
}
def presenceSetOnline(userId: Int, timeout: Long)(implicit region: PresenceManagerRegion): Unit = {
region.ref ! Envelope(userId, UserPresenceChange(Online, timeout))
}
def presenceSetOffline(userId: Int, timeout: Long)(implicit region: PresenceManagerRegion): Unit = {
region.ref ! Envelope(userId, UserPresenceChange(Offline, timeout))
}
}
class PresenceManager extends Actor with ActorLogging with Stash {
import Presences._
import PresenceManager._
implicit val ec: ExecutionContext = context.dispatcher
private val db: Database = DbExtension(context.system).db
private val receiveTimeout = 15.minutes // TODO: configurable
context.setReceiveTimeout(receiveTimeout)
private[this] var scheduledTimeout: Option[Cancellable] = None
private[this] var consumers = Set.empty[ActorRef]
private[this] var lastChange = UserPresenceChange(Offline, 0)
private[this] var lastSeenAt: Option[DateTime] = None
private def initialize(userId: Int): Unit = {
db.run(persist.presences.UserPresence.find(userId).map {
case Some(userPresence) ⇒
self ! Initialized(userPresence.lastSeenAt)
case None ⇒
db.run(persist.presences.UserPresence.createOrUpdate(models.presences.UserPresence(userId, None)))
self ! Initialized(None)
}) onFailure {
case e ⇒
log.error(e, "Failed to recover PresenceManager state. Retry in {}", InitRetryTimeout)
context.system.scheduler.scheduleOnce(InitRetryTimeout) {
initialize(userId)
}
}
}
def receive = {
case Envelope(userId, _) ⇒
stash()
initialize(userId)
case Initialized(lastSeenAt: Option[DateTime]) ⇒
unstashAll()
this.lastSeenAt = lastSeenAt
context.become(working)
case msg ⇒ stash()
}
def working: Receive = {
case Envelope(userId, Subscribe(consumer)) ⇒
if (!consumers.contains(consumer)) {
context.watch(consumer)
consumers += consumer
}
sender ! SubscribeAck(consumer)
deliverState(userId)
case Envelope(userId, Unsubscribe(consumer)) ⇒
consumers -= consumer
context.unwatch(consumer)
sender ! UnsubscribeAck(consumer)
case Terminated(consumer) if consumers.contains(consumer) ⇒
consumers -= consumer
case Envelope(userId, change @ UserPresenceChange(presence, timeout)) ⇒
log.debug("userId: {}, change: {}", userId, change)
scheduledTimeout map (_.cancel())
val needDeliver = this.lastChange.presence != presence
this.lastChange = change
if (presence == Online) {
this.lastSeenAt = Some(new DateTime)
// TODO: handle failures
db.run(persist.presences.UserPresence.createOrUpdate(models.presences.UserPresence(userId, this.lastSeenAt)))
scheduledTimeout = Some(
context.system.scheduler.scheduleOnce(timeout.millis, self, Envelope(userId, UserPresenceChange(Offline, 0)))
)
}
if (needDeliver) {
deliverState(userId)
}
case ReceiveTimeout ⇒
if (consumers.isEmpty) {
context.parent ! Passivate(stopMessage = PoisonPill)
}
}
private def deliverState(userId: Int): Unit = {
consumers foreach { consumer ⇒
consumer ! PresenceState(userId, this.lastChange.presence, this.lastSeenAt)
}
}
}
|
supertanglang/actor-platform
|
actor-server/actor-presences/src/main/scala/im/actor/server/presences/PresenceManager.scala
|
Scala
|
mit
| 6,464 |
/* sbt -- Simple Build Tool
* Copyright 2009, 2010 Mark Harrah
*/
package xsbt.boot
// <boot.directory>
// scala-<scala.version>/ [baseDirectoryName]
// lib/ [ScalaDirectoryName]
// <app.name>-<app.version>/ [appDirectoryName]
//
// see also ProjectProperties for the set of constants that apply to the build.properties file in a project
private object BootConfiguration
{
// these are the Scala module identifiers to resolve/retrieve
val ScalaOrg = "org.scala-lang"
val CompilerModuleName = "scala-compiler"
val LibraryModuleName = "scala-library"
val JUnitName = "junit"
val SbtOrg = "org.scala-tools.sbt"
/** The Ivy conflict manager to use for updating.*/
val ConflictManagerName = "latest-revision"
/** The name of the local Ivy repository, which is used when compiling sbt from source.*/
val LocalIvyName = "local"
/** The pattern used for the local Ivy repository, which is used when compiling sbt from source.*/
val LocalPattern = "[organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext]"
/** The artifact pattern used for the local Ivy repository.*/
def LocalArtifactPattern = LocalPattern
/** The Ivy pattern used for the local Ivy repository.*/
def LocalIvyPattern = LocalPattern
final val FjbgPackage = "ch.epfl.lamp.fjbg."
/** The class name prefix used to hide the Scala classes used by this loader from the application */
final val ScalaPackage = "scala."
/** The class name prefix used to hide the Ivy classes used by this loader from the application*/
final val IvyPackage = "org.apache.ivy."
/** The class name prefix used to hide the launcher classes from the application.
* Note that access to xsbti classes are allowed.*/
final val SbtBootPackage = "xsbt.boot."
/** The prefix for JLine resources.*/
final val JLinePackagePath = "jline/"
/** The loader will check that these classes can be loaded and will assume that their presence indicates
* the Scala compiler and library have been downloaded.*/
val TestLoadScalaClasses = "scala.Option" :: "scala.tools.nsc.Global" :: Nil
val ScalaHomeProperty = "scala.home"
val UpdateLogName = "update.log"
val DefaultChecksums = "sha1" :: "md5" :: Nil
val DefaultIvyConfiguration = "default"
/** The name of the directory within the boot directory to retrieve scala to. */
val ScalaDirectoryName = "lib"
/** The Ivy pattern to use for retrieving the scala compiler and library. It is relative to the directory
* containing all jars for the requested version of scala. */
val scalaRetrievePattern = ScalaDirectoryName + "/[artifact](-[classifier]).[ext]"
def artifactType(classifier: String) =
classifier match
{
case "sources" => "src"
case "javadoc" => "doc"
case _ => "jar"
}
/** The Ivy pattern to use for retrieving the application and its dependencies. It is relative to the directory
* containing all jars for the requested version of scala. */
def appRetrievePattern(appID: xsbti.ApplicationID) = appDirectoryName(appID, "/") + "(/[component])/[artifact]-[revision](-[classifier]).[ext]"
/** The name of the directory to retrieve the application and its dependencies to.*/
def appDirectoryName(appID: xsbti.ApplicationID, sep: String) = appID.groupID + sep + appID.name + sep + appID.version
/** The name of the directory in the boot directory to put all jars for the given version of scala in.*/
def baseDirectoryName(scalaVersion: String) = if(scalaVersion.isEmpty) "other" else "scala-" + scalaVersion
}
private object ProxyProperties
{
val HttpProxyEnv = "http_proxy"
val HttpProxyUser = "http_proxy_user"
val HttpProxyPassword = "http_proxy_pass"
val ProxyHost = "http.proxyHost"
val ProxyPort = "http.proxyPort"
val ProxyUser = "http.proxyUser"
val ProxyPassword = "http.proxyPassword"
}
|
kuochaoyi/xsbt
|
launch/BootConfiguration.scala
|
Scala
|
bsd-3-clause
| 3,799 |
/**
* A simple text based RPG
*
* @package simplerpg
* @copyright 2015
*/
package simplerpg.action
import simplerpg.Player
import simplerpg.World
final class StatsAction(categories: Array[String]) extends Action {
def run(currentPlayer: Player, world: World): Option[Action] = {
val builder = new StringBuilder
val includeAll = categories.isEmpty
builder.append("Player Stats\n")
if (includeAll || categories.contains("strength")) {
builder.append(s"""- Strength: ${currentPlayer.stats.strength}\n""")
}
if (includeAll || categories.contains("magic")) {
builder.append(s"""- Magic: ${currentPlayer.stats.magic}\n""")
}
if (includeAll || categories.contains("stamina")) {
builder.append(s"""- Stamina: ${currentPlayer.stats.stamina}\n""")
}
printAction(builder.toString)
}
}
|
mcross1882/SimpleRPG
|
src/main/scala/simplerpg/action/StatsAction.scala
|
Scala
|
mit
| 912 |
package org.jetbrains.plugins.scala.actions
import javax.swing.Icon
import com.intellij.ide.fileTemplates.{FileTemplate, FileTemplateManager}
import com.intellij.ide.fileTemplates.actions.CreateFromTemplateActionBase
import com.intellij.openapi.project.{DumbAware, Project}
import com.intellij.psi.PsiDirectory
import org.jetbrains.annotations.Nls
abstract class LazyFileTemplateAction(
templateName: String, // defined in plugin xml file with <internalFileTemplate ... /> tag
@Nls title: String,
@Nls description: String,
val icon: Icon
) extends CreateFromTemplateActionBase(
title,
description,
icon
) with DumbAware {
private lazy val template = FileTemplateManager.getDefaultInstance.getInternalTemplate(templateName)
override def getTemplate(project: Project, dir: PsiDirectory): FileTemplate = template
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/actions/LazyFileTemplateAction.scala
|
Scala
|
apache-2.0
| 834 |
import scala.tools.nsc.doc.model._
import scala.tools.partest.ScaladocModelTest
object Test extends ScaladocModelTest {
override def code = """
import language.higherKinds
trait T[M[_]]
class C extends T[Function0]
class D extends T[Tuple1]
"""
def scaladocSettings = ""
def testModel(rootPackage: Package) = {
import access._
// did not crash
}
}
|
felixmulder/scala
|
test/scaladoc/run/t7876.scala
|
Scala
|
bsd-3-clause
| 398 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.subjects
import cats.implicits._
import monix.execution.Ack
import monix.execution.Ack.{Continue, Stop}
import monix.execution.exceptions.DummyException
import monix.reactive.Observer
import scala.concurrent.Future
import scala.util.Success
object ProfunctorSubjectSuite extends BaseSubjectSuite {
def alreadyTerminatedTest(expectedElems: Seq[Long]) = {
val s = BehaviorSubject[Long](-1)
Sample(s, expectedElems.lastOption.getOrElse(-1))
}
def continuousStreamingTest(expectedElems: Seq[Long]) = {
val s = BehaviorSubject[Long](0)
Some(Sample(s, expectedElems.sum))
}
test("should protect against user-code in left mapping function") { implicit s =>
val dummy = new RuntimeException("dummy")
val subject = BehaviorSubject[String]("10").dimap[Int, Int](_ => throw dummy)(_.toInt)
var received = 0
var wasCompleted = 0
var errorThrown: Throwable = null
for (i <- 0 until 10)
subject.unsafeSubscribeFn(new Observer[Int] {
def onNext(elem: Int): Future[Ack] = {
received += elem
Continue
}
def onError(ex: Throwable): Unit = errorThrown = ex
def onComplete(): Unit = wasCompleted += 1
})
assertEquals(subject.onNext(1), Stop)
subject.onComplete()
s.tick()
assertEquals(received, 100)
assertEquals(wasCompleted, 0)
assertEquals(errorThrown, dummy)
}
test("should protect against user-code in right mapping function") { implicit s =>
val dummy = new RuntimeException("dummy")
val subject = BehaviorSubject[String]("10").dimap[Int, Int](_.toString)(_ => throw dummy)
var received = 0
var wasCompleted = 0
var errorThrown: Throwable = null
for (i <- 0 until 10)
subject.unsafeSubscribeFn(new Observer[Int] {
def onNext(elem: Int): Future[Ack] = {
received += elem
Continue
}
def onError(ex: Throwable): Unit = errorThrown = ex
def onComplete(): Unit = wasCompleted += 1
})
subject.onNext(1); s.tick()
assertEquals(subject.onNext(2), Continue)
assertEquals(subject.onNext(3), Continue)
subject.onComplete()
s.tick()
assertEquals(received, 0)
assertEquals(wasCompleted, 0)
assertEquals(errorThrown, dummy)
}
test("should work synchronously for synchronous subscribers") { implicit s =>
val subject = BehaviorSubject[String]("10").dimap[Int, Int](_.toString)(_.toInt)
var received = 0
var wasCompleted = 0
for (i <- 0 until 10)
subject.unsafeSubscribeFn(new Observer[Int] {
def onNext(elem: Int): Future[Ack] = {
received += elem
Continue
}
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = wasCompleted += 1
})
subject.onNext(1); s.tick()
assertEquals(subject.onNext(2), Continue)
assertEquals(subject.onNext(3), Continue)
subject.onComplete()
s.tick()
assertEquals(received, 160)
assertEquals(wasCompleted, 10)
}
test("should work with asynchronous subscribers") { implicit s =>
val subject = BehaviorSubject[String]("10").dimap[Int, Int](_.toString)(_.toInt)
var received = 0
var wasCompleted = 0
for (i <- 0 until 10)
subject.unsafeSubscribeFn(new Observer[Int] {
def onNext(elem: Int) = Future {
received += elem
Continue
}
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = wasCompleted += 1
})
for (i <- 1 to 10) {
val ack = subject.onNext(i)
assert(!ack.isCompleted)
s.tick()
assert(ack.isCompleted)
assertEquals(received, (1 to i).sum * 10 + 100)
}
subject.onComplete()
assertEquals(received, 5 * 11 * 10 + 100)
assertEquals(wasCompleted, 10)
}
test("subscribe after complete should complete immediately") { implicit s =>
val subject = BehaviorSubject[String]("10").dimap[Int, Int](_.toString)(_.toInt)
var received = 0
subject.onComplete()
var wasCompleted = false
subject.unsafeSubscribeFn(new Observer[Int] {
def onNext(elem: Int) = { received += elem; Continue }
def onError(ex: Throwable): Unit = ()
def onComplete(): Unit = wasCompleted = true
})
assert(wasCompleted)
assertEquals(received, 10)
}
test("onError should terminate current and future subscribers") { implicit s =>
val subject = BehaviorSubject[String]("10").dimap[Int, Int](_.toString)(_.toInt)
val dummy = DummyException("dummy")
var elemsReceived = 0
var errorsReceived = 0
for (_ <- 0 until 10)
subject.unsafeSubscribeFn(new Observer[Int] {
def onNext(elem: Int) = { elemsReceived += elem; Continue }
def onComplete(): Unit = ()
def onError(ex: Throwable): Unit = ex match {
case `dummy` => errorsReceived += 1
case _ => ()
}
})
subject.onNext(1); s.tick()
subject.onError(dummy)
subject.unsafeSubscribeFn(new Observer[Int] {
def onNext(elem: Int) = { elemsReceived += elem; Continue }
def onComplete(): Unit = ()
def onError(ex: Throwable): Unit = ex match {
case `dummy` => errorsReceived += 1
case _ => ()
}
})
s.tick()
assertEquals(elemsReceived, 110)
assertEquals(errorsReceived, 11)
}
test("can stop streaming while connecting") { implicit s =>
val subject = BehaviorSubject[String]("10").dimap[Int, Int](_.toString)(_.toInt)
val future1 = subject.runAsyncGetFirst
val future2 = subject.drop(1).runAsyncGetFirst
s.tick()
assertEquals(future1.value, Some(Success(Some(10))))
assertEquals(subject.size, 1)
assertEquals(subject.onNext(20), Continue)
assertEquals(future2.value, Some(Success(Some(20))))
assertEquals(subject.size, 0)
}
test("unsubscribe after onComplete") { implicit s =>
var result: Int = 0
val subject = BehaviorSubject[String]("0").dimap[Int, Int](_.toString)(_.toInt)
val c = subject.subscribe { e => result = e; Continue }
subject.onNext(1)
subject.onComplete()
s.tick()
c.cancel()
assertEquals(result, 1)
}
}
|
Wogan/monix
|
monix-reactive/shared/src/test/scala/monix/reactive/subjects/ProfunctorSubjectSuite.scala
|
Scala
|
apache-2.0
| 6,862 |
package com.twitter.finagle.netty4
import com.twitter.app.GlobalFlag
import com.twitter.conversions.time._
import com.twitter.concurrent.NamedPoolThreadFactory
import com.twitter.finagle.netty4.util.Netty4Timer
import com.twitter.finagle.stats.{FinagleStatsReceiver, StatsReceiver}
import com.twitter.finagle.util.ServiceLoadedTimer
import com.twitter.logging.Logger
import com.twitter.util.{Duration, Time}
import java.util.concurrent.TimeUnit
/**
* Configures `ticksPerWheel` on the singleton instance of `HashedWheelTimer`.
*/
private object timerTicksPerWheel extends GlobalFlag[Int](
512,
"Netty 4 timer ticks per wheel")
/**
* Configures `tickDuration` on the singleton instance of `HashedWheelTimer`.
*/
private object timerTickDuration extends GlobalFlag[Duration](
10.milliseconds,
"Netty 4 timer tick duration")
/**
* A Netty timer for use with [[Netty4HashedWheelTimer]].
*/
private class HashedWheelTimer(
statsReceiver: StatsReceiver,
tickDuration: Duration,
ticksPerWheel: Int)
extends io.netty.util.HashedWheelTimer(
new NamedPoolThreadFactory("Netty 4 Timer", /*daemon = */true),
tickDuration.inMilliseconds, TimeUnit.MILLISECONDS,
ticksPerWheel,
/*leakDetection = */false) { self =>
private[this] val statsPollInterval = 10.seconds
private object deviationStat extends io.netty.util.TimerTask {
private[this] val tickDuration = timerTickDuration()
private[this] val deviationMs = statsReceiver.stat("timer", "deviation_ms")
private[this] var nextAt = Time.now + tickDuration
def run(timeout: io.netty.util.Timeout): Unit = {
val now = Time.now
val delta = now - nextAt
nextAt = now + tickDuration
deviationMs.add(delta.inMilliseconds)
self.newTimeout(this, tickDuration.inMilliseconds, TimeUnit.MILLISECONDS)
}
}
private object pendingTasksStat extends io.netty.util.TimerTask {
private[this] val pendingTasks = statsReceiver.stat("timer", "pending_tasks")
def run(timeout: io.netty.util.Timeout): Unit = {
pendingTasks.add(self.pendingTimeouts)
self.newTimeout(pendingTasksStat, statsPollInterval.inSeconds, TimeUnit.SECONDS)
}
}
self.newTimeout(deviationStat, timerTickDuration().inMilliseconds, TimeUnit.MILLISECONDS)
self.newTimeout(pendingTasksStat, statsPollInterval.inSeconds, TimeUnit.SECONDS)
}
private object HashedWheelTimer {
/**
* A singleton instance of [[HashedWheelTimer]] that is used for the all service loaded
* instances of [[Netty4HashedWheelTimer]]. Configuration is done via global flags.
*
* @note Stats are reported into the "finagle" scope.
*/
val instance: HashedWheelTimer = {
new HashedWheelTimer(
FinagleStatsReceiver,
timerTickDuration(),
timerTicksPerWheel()
)
}
}
/**
* A default implementation of [[Netty4Timer]] that's based on `HashedWheelTimer` and uses
* the default `ticksPerWheel` size of 512 and 10 millisecond ticks, which gives ~5100
* milliseconds worth of scheduling. This should suffice for most usage without having
* tasks scheduled for a later round.
* This class is intended to be service-loaded instead of directly instantiated.
* See [[com.twitter.finagle.util.LoadService]] and [[com.twitter.finagle.util.DefaultTimer]].
*
* This timer also exports metrics under `finagle/timer` (see
* [[https://twitter.github.io/finagle/guide/Metrics.html#timer metrics documentation]]):
*
* 1. `deviation_ms`
* 2. `pending_tasks`
*
* To configure this timer use the following CLI flags:
*
* 1. `-com.twitter.finagle.netty4.timerTickDuration=100.milliseconds`
* 2. `-com.twitter.finagle.netty4.timerTicksPerWheel=512`
*/
private[netty4] class Netty4HashedWheelTimer
extends Netty4Timer(HashedWheelTimer.instance)
with ServiceLoadedTimer {
private[this] val log = Logger.get()
// This timer is "unstoppable".
override def stop(): Unit =
log.warning(s"Ignoring call to `Timer.stop()` on an unstoppable Netty4Timer.\n" +
s"Current stack trace: ${Thread.currentThread.getStackTrace.mkString("\n")}")
}
|
koshelev/finagle
|
finagle-netty4/src/main/scala/com/twitter/finagle/netty4/Netty4HashedWheelTimer.scala
|
Scala
|
apache-2.0
| 4,095 |
package models.services
import java.util.UUID
import javax.inject.Inject
import com.mohiva.play.silhouette.api.LoginInfo
import com.mohiva.play.silhouette.impl.providers.CommonSocialProfile
import models.User
import models.daos.UserDAO
import play.api.libs.concurrent.Execution.Implicits._
import scala.concurrent.Future
/**
* Handles actions to users.
*
* @param userDAO The user DAO implementation.
*/
class UserServiceImpl @Inject() (userDAO: UserDAO) extends UserService {
/**
* Retrieves a user that matches the specified login info.
*
* @param loginInfo The login info to retrieve a user.
* @return The retrieved user or None if no user could be retrieved for the given login info.
*/
def retrieve(loginInfo: LoginInfo): Future[Option[User]] = userDAO.find(loginInfo)
/**
* Create a user.
*
* @param user The user to create.
* @return The created user.
*/
def create(user: User) = userDAO.create(user)
/**
* Create the social profile for a user.
*
* If a user exists for this profile then update the user, otherwise create a new user with the given profile.
*
* @param profile The social profile to create.
* @return The user for whom the profile was created.
*/
def create(profile: CommonSocialProfile) = {
userDAO.find(profile.loginInfo).flatMap {
case Some(user) => // Update user with profile
userDAO.create(user.copy(
email = profile.email,
profile = user.profile,
avatarURL = profile.avatarURL
))
case None => // Insert a new user
userDAO.create(User(
userID = UUID.randomUUID(),
loginInfo = profile.loginInfo,
email = profile.email,
profile = models.Profile(None,None,None),
avatarURL = profile.avatarURL
))
}
}
/**
* update a user.
*
* @param user The user to update.
* @return The updated user.
*/
def update(user: User): Future[User] = userDAO.update(user)
}
|
renexdev/Play-Auth-Slick-Seed-Load-Schema
|
app/models/services/UserServiceImpl.scala
|
Scala
|
apache-2.0
| 2,004 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.compiler.v2_3.executionplan.builders
import org.neo4j.cypher.internal.compiler.v2_3.commands.expressions.Identifier
import org.neo4j.cypher.internal.compiler.v2_3.executionplan.PartiallySolvedQuery
import org.neo4j.cypher.internal.compiler.v2_3.mutation.DeleteEntityAction
import org.neo4j.cypher.internal.compiler.v2_3.spi.PlanContext
class DeleteAndPropertySetBuilderTest extends BuilderTest {
val builder = new UpdateActionBuilder
val planContext = mock[PlanContext]
test("does_not_offer_to_solve_done_queries") {
val q = PartiallySolvedQuery().
copy(updates = Seq(Solved(DeleteEntityAction(Identifier("x"), forced = false))))
withClue("Should not be able to build on this")(builder.canWorkWith(plan(q), planContext)) should equal(false)
}
test("offers_to_solve_queries") {
val q = PartiallySolvedQuery().
copy(updates = Seq(Unsolved(DeleteEntityAction(Identifier("x"), forced = false))))
val pipe = createPipe(nodes = Seq("x"))
val executionPlan = plan(pipe, q)
withClue("Should accept this")(builder.canWorkWith(executionPlan, planContext)) should equal(true)
val resultPlan = builder(executionPlan, planContext)
val resultQ = resultPlan.query
resultQ should equal(q.copy(updates = q.updates.map(_.solve)))
withClue("Execution plan should contain transaction")(resultPlan.isUpdating) should equal(true)
}
test("does_not_offer_to_delete_something_not_yet_there") {
val q = PartiallySolvedQuery().
copy(updates = Seq(Unsolved(DeleteEntityAction(Identifier("x"), forced = false))))
val executionPlan = plan(q)
withClue("Should not accept this")(builder.canWorkWith(executionPlan, planContext)) should equal(false)
}
}
|
HuangLS/neo4j
|
community/cypher/cypher-compiler-2.3/src/test/scala/org/neo4j/cypher/internal/compiler/v2_3/executionplan/builders/DeleteAndPropertySetBuilderTest.scala
|
Scala
|
apache-2.0
| 2,548 |
package dhg.pos.tagdict
trait StartEndTags[Tag] {
def startTag: Tag
def endTag: Tag
def swap: StartEndTags[Tag] = SimpleStartEndTags(endTag, startTag)
}
case class SimpleStartEndTags[Tag](startTag: Tag, endTag: Tag) extends StartEndTags[Tag]
|
dhgarrette/low-resource-pos-tagging-2014
|
src/main/scala/dhg/pos/tagdict/StartEndTags.scala
|
Scala
|
apache-2.0
| 249 |
package protocol
import java.nio.ByteBuffer
import scala.Array.canBuildFrom
import scala.Option.option2Iterable
import scala.io.Codec
import scala.util.Try
import org.apache.http.entity.ContentType
import org.apache.http.impl.EnglishReasonPhraseCatalog
import ch.ethz.inf.vs.californium.coap.{Message => CoapMessage}
import ch.ethz.inf.vs.californium.coap.{Option => CoapOption}
import ch.ethz.inf.vs.californium.coap.registries.CodeRegistry
import ch.ethz.inf.vs.californium.coap.registries.MediaTypeRegistry
import ch.ethz.inf.vs.californium.coap.registries.OptionNumberRegistry
import play.api.Configuration
import play.api.Play
object Translator {
private val KEY_COAP_CODE: String = "coap.response.code."
private val KEY_COAP_OPTION: String = "CoapMessage.option."
private val KEY_COAP_MEDIA: String = "CoapMessage.media."
private val KEY_HTTP_CODE: String = "http.response.code."
private val KEY_HTTP_METHOD: String = "http.request.method."
private val KEY_HTTP_HEADER: String = "http.message.header."
private val KEY_HTTP_CONTENT_TYPE: String = "http.message.content-type."
private val configuration: Configuration = Play.current.configuration
def getCoapStatusCode(httpStatusCode: Int): Option[Int] =
configuration.getInt(KEY_HTTP_CODE + httpStatusCode)
def getCoapStatusText(httpStatusCode: Int): Option[String] =
getCoapStatusCode(httpStatusCode).map(CodeRegistry.toString(_))
def getCoapOptions(httpHeaders: java.util.Map[String, Array[String]]): Seq[CoapOption] =
getCoapOptions(scala.collection.JavaConversions.mapAsScalaMap(httpHeaders))
def getCoapOptions(httpHeaders: Iterable[(String, Array[String])]): Seq[CoapOption] = {
val options: Iterable[Vector[CoapOption]] = for {
(key, values) <- httpHeaders
headerName = key.toLowerCase
headerValues = values.map(_.toLowerCase)
// get the mapping from the property file
optionCodeString <- configuration.getString(KEY_HTTP_HEADER + headerName)
// ignore the header if not found in the properties file
if !optionCodeString.isEmpty
// get the option number
// ignore the option if not recognized
optionNumber <- Try { optionCodeString.toInt }.toOption
// ignore the content-type because it will be handled within the payload
if optionNumber != OptionNumberRegistry.CONTENT_TYPE
} yield optionNumber match {
case OptionNumberRegistry.ACCEPT =>
// iterate for each content-type indicated
val options = for {
value <- headerValues
// translate the content-type
coapContentTypes = if (value.contains("*"))
MediaTypeRegistry.parseWildcard(value).to[Vector]
else
Vector[Integer](MediaTypeRegistry.parse(value))
// return present a conversions for the content-types
coapContentType <- coapContentTypes
if coapContentType != MediaTypeRegistry.UNDEFINED
} yield {
val opt = new CoapOption(optionNumber)
opt.setIntValue(coapContentType)
opt
}
options.to[Vector]
case OptionNumberRegistry.MAX_AGE =>
val option = if (headerValues.contains("no-cache")) {
val opt = new CoapOption(optionNumber)
opt.setIntValue(0)
Some(opt)
} else {
for {
headerValue <- headerValues.headOption
index = headerValue.indexOf('=')
if index >= 0
value <- Try {
headerValue.substring(index + 1).toInt
}.toOption
} yield {
val opt = new CoapOption(optionNumber)
opt.setIntValue(value)
opt
}
}
option.to[Vector]
case _ =>
val option = for {
headerValue <- headerValues.headOption
opt = new CoapOption(optionNumber)
_ <- Try {
OptionNumberRegistry.getFormatByNr(optionNumber) match {
case OptionNumberRegistry.optionFormats.INTEGER => opt.setIntValue(headerValue.toInt);
case OptionNumberRegistry.optionFormats.OPAQUE => opt.setValue(headerValue.getBytes(Codec.ISO8859.charSet));
case _ => opt.setStringValue(headerValue);
}
}.toOption
} yield opt
option.to[Vector]
}
options.flatten.to[Vector]
}
def getHttpStatusCode(coapStatusCode: Int): Option[Int] =
configuration.getInt(KEY_COAP_CODE + coapStatusCode)
def getHttpStatusText(coapStatusCode: Int): Option[String] =
getHttpStatusCode(coapStatusCode).map(EnglishReasonPhraseCatalog.INSTANCE.getReason(_, null))
def getHttpHeaders(options: java.lang.Iterable[CoapOption]): Map[String, Array[String]] =
getHttpHeaders(scala.collection.JavaConversions.iterableAsScalaIterable(options))
def getHttpHeaders(options: Iterable[CoapOption]): Map[String, Array[String]] = {
val headers = for {
option <- options
optionNumber = option.getOptionNumber
if optionNumber != OptionNumberRegistry.CONTENT_TYPE
if optionNumber != OptionNumberRegistry.PROXY_URI
headerName <- configuration.getString(KEY_COAP_OPTION + optionNumber)
if !headerName.isEmpty
stringOptionValue <- OptionNumberRegistry.getFormatByNr(optionNumber) match {
case OptionNumberRegistry.optionFormats.STRING => Some(option.getStringValue())
case OptionNumberRegistry.optionFormats.INTEGER => Some(option.getIntValue().toString)
case OptionNumberRegistry.optionFormats.UNKNOWN => Some(option.getRawValue().toString)
case _ => None
}
} yield {
if (optionNumber == OptionNumberRegistry.MAX_AGE) {
(headerName, Array("max-age=" + stringOptionValue))
} else {
(headerName, Array(stringOptionValue))
}
}
headers.toMap
}
def getContentType(coapMessage: CoapMessage): ContentType = Try {
// get the coap content-type
val coapContentType = coapMessage.getContentType();
if (coapContentType == MediaTypeRegistry.UNDEFINED) {
ContentType.APPLICATION_OCTET_STREAM
} else {
// search for the media type inside the property file
val coapContentTypeString =
configuration.
getString(KEY_COAP_MEDIA + coapContentType)
.getOrElse {
val plain = MediaTypeRegistry.toString(coapContentType);
// if the coap content-type is printable, it is needed to
// set the default charset (i.e., UTF-8)
if (MediaTypeRegistry.isPrintable(coapContentType))
plain + "; charset=UTF-8"
else
plain
}
ContentType.parse(coapContentTypeString)
}
}.getOrElse(ContentType.APPLICATION_OCTET_STREAM)
def getContent(coapMessage: CoapMessage): String = {
// check if coap request has a payload
val payload = coapMessage.getPayload()
if (payload == null || payload.length == 0)
return ""
// get the charset
val contentType = getContentType(coapMessage)
val charset = contentType.getCharset()
if (charset == null)
return payload.toString
// if there is a charset, means that the content is not binary
// according to the class ContentType the default content-type with
// UTF-8 charset is application/json. If the content-type
// parsed is different, or is not ison encoded, it is needed a
// translation
if (charset.equals(Codec.ISO8859.charSet) || contentType == ContentType.APPLICATION_JSON)
return payload.toString
Try(charset.decode(ByteBuffer.wrap(payload)).toString).getOrElse("")
}
}
|
liamjjmcnamara/sicsthsense
|
web/app/protocol/Translator.scala
|
Scala
|
apache-2.0
| 7,664 |
/*
* Copyright (C) 2013-2015 by Michael Hombre Brinkmann
*/
package net.twibs.web
import net.twibs.util.{Request, ResponseRequest}
abstract class LoadingCacheResponder(delegate: Responder) extends CacheResponder {
def respond(request: Request): Option[Response] =
request.use {
val requestCacheKey = request.responseRequest
if (!request.useCache) {
cache.invalidate(requestCacheKey)
}
getIfPresentAndNotModified(requestCacheKey) getOrElse respond(requestCacheKey)
}
def getIfPresentAndNotModified(requestCacheKey: ResponseRequest) =
Option(cache.getIfPresent(requestCacheKey)).flatMap {
case Some(response) if !response.isModified =>
Some(Some(response))
case any =>
cache.invalidate(requestCacheKey)
None
}
}
|
hombre/twibs
|
twibs-web/src/main/scala/net/twibs/web/LoadingCacheResponder.scala
|
Scala
|
apache-2.0
| 804 |
package freecli
package core
package api
case class Merger[F](f: F) {
def ::[F2](merger: Merger[F2])(implicit ev: CanMerge[F2, F]): ev.Out = {
ev(merger.f, f)
}
}
trait CanMerge[F1, F2] {
type Out
def apply(f1: F1, f2: F2): Out
}
object CanMerge {
type Aux[F1, F2, Out0] = CanMerge[F1, F2] { type Out = Out0 }
}
|
pavlosgi/freecli
|
core/src/main/scala/freecli/core/api/Merger.scala
|
Scala
|
apache-2.0
| 329 |
package org.example1.usage
import org.example1.declaration.{Y, Z}
import org.example1.declaration.data.{A, B, C, X}
trait Usage_MergeToExisting_Imports_3_3 {
val a: A = ???
val b: B = ???
val c: C = ???
val x: X = ???
}
|
JetBrains/intellij-scala
|
scala/scala-impl/testdata/move/allInOne/after/org/example1/usage/Usage_MergeToExisting_Imports_3_3.scala
|
Scala
|
apache-2.0
| 231 |
package com.cloudray.scalapress.search
import org.scalatest.{OneInstancePerTest, FunSuite}
import org.scalatest.mock.MockitoSugar
import javax.servlet.http.HttpServletRequest
import com.cloudray.scalapress.folder.Folder
import com.cloudray.scalapress.search.tag.CorpusResultSnippetTag
import com.cloudray.scalapress.util.UrlGenerator
import com.cloudray.scalapress.framework.{ScalapressRequest, ScalapressContext}
/** @author Stephen Samuel */
class CorpusResultSnippetTest extends FunSuite with OneInstancePerTest with MockitoSugar {
val context = new ScalapressContext()
val req = mock[HttpServletRequest]
val folder = new Folder
folder.id = 124
folder.name = "a team"
val r = new CorpusResult(folder.name, UrlGenerator.url(folder), "...hannibal loves it when a plan comes together...")
test("corpus result snippet tag uses snippet from corpus result") {
val actual = new CorpusResultSnippetTag().render(ScalapressRequest(req, context).withResult(r))
assert("...hannibal loves it when a plan comes together..." === actual.get)
}
}
|
vidyacraghav/scalapress
|
src/test/scala/com/cloudray/scalapress/search/CorpusResultSnippetTest.scala
|
Scala
|
apache-2.0
| 1,087 |
//package dhg.ccg.parse.pcfg
//
//import org.junit.Test
//import org.junit.Assert._
//import dhg.ccg.parse._
//import dhg.ccg.parse.pcfg.mcmc._
//import dhg.ccg.prob._
//import dhg.ccg.cat._
//import dhg.ccg.tagdict._
//import dhg.util._
//import dhg.util._
//import dhg.ccg.util._
//import dhg.util._
//import dhg.util._
//import scala.util.Random
//
//class SupPcfgTrainerITests {
//
// val A: Cat = cat"A"
// val B: Cat = cat"B"
// val C: Cat = cat"C"
// val D: Cat = cat"D"
// val E: Cat = cat"E"
// val F: Cat = cat"F"
// // val G: Cat = cat"G"
// // val H: Cat = cat"H"
// val Z: Cat = cat"Z"
//
// @Test
// def i_test_UnsmoothedSupPcfgTrainer_train {
// throw new NotImplementedError("Test not written")
// }
//
// @Test
// def i_test_AlphaBetaSupPcfgTrainer {
// type Word = String
//
// val alphaRoot = LogDouble(2.1)
// val alphaBiny = LogDouble(2.3)
// val alphaUnry = LogDouble(2.7)
// val alphaTerm = LogDouble(2.9)
// val alphaLambda = LogDouble(30.3)
//
// val catIndexer = SimpleIndexer(Vector(A, B, C, D, E, F, Z))
// val wordIndexer = SimpleIndexer("a1,a2,b1,c1,c2,c3,d1,d2,e1,z1".lsplit(","))
// val numCats = catIndexer.size
// val numWords = wordIndexer.size
//
// val s1: CcgTree = CcgLeaf(A, "a1")
// val s2: CcgTree = CcgLeaf(B, "b1")
//
// val priorRootDistO: Map[Cat, LogDouble] = Map(
// A -> 0.91,
// B -> 0.92,
// C -> 0.93,
// D -> 0.94,
// E -> 0.95,
// F -> 0.96)
// .mapVals(LogDouble(_))
//
// val priorBinyDistO: Vector[(Cat, (BinaryProd, LogDouble))] = Vector(
// (A, BinaryProd(B, C) -> 0.11),
// (A, BinaryProd(E, D) -> 0.12),
// (A, BinaryProd(E, F) -> 0.13),
//
// (C, BinaryProd(A, D) -> 0.14),
// (C, BinaryProd(D, E) -> 0.15),
// (C, BinaryProd(D, F) -> 0.16),
// (C, BinaryProd(E, D) -> 0.17),
// (C, BinaryProd(E, F) -> 0.18),
//
// (D, BinaryProd(B, C) -> 0.19),
//
// (E, BinaryProd(D, F) -> 0.21),
// (E, BinaryProd(B, C) -> 0.22))
// .map { case (a, (b, c)) => (a, (b, LogDouble(c))) }
//
// val priorUnryDistO: Vector[(Cat, (UnaryProd, LogDouble))] = Vector(
// (A, UnaryProd(B) -> 0.23),
// (A, UnaryProd(D) -> 0.24),
// (A, UnaryProd(E) -> 0.25),
// //(A, UnaryProd(Z) -> 0.41),
//
// (C, UnaryProd(A) -> 0.26),
// (C, UnaryProd(D) -> 0.27),
// (C, UnaryProd(E) -> 0.28),
//
// (D, UnaryProd(B) -> 0.42),
//
// (E, UnaryProd(B) -> 0.43),
// (E, UnaryProd(D) -> 0.44) // ,
// //(Z, UnaryProd(Z) -> 0.45)
// )
// .map { case (a, (b, c)) => (a, (b, LogDouble(c))) }
//
// val priorTermDistO: Vector[(Cat, (TermProd, LogDouble))] = Vector(
// (A, TermProd("a1") -> 0.31),
// (A, TermProd("a2") -> 0.32),
// //(A, TermProd("z") -> 0.46),
//
// (C, TermProd("c1") -> 0.33),
// (C, TermProd("c2") -> 0.34),
// (C, TermProd("c3") -> 0.35),
//
// (D, TermProd("d2") -> 0.36),
//
// (E, TermProd("e1") -> 0.37) //,
// //(Z, TermProd("z") -> 0.47)
// )
// .map { case (a, (b, c)) => (a, (b, LogDouble(c))) }
//
// val knownRoots: Array[Int] = priorRootDistO.keys.map(catIndexer).toArray.sorted
// val knownBinys: Array[IndirectSparseVec[Array[Int]]] = DenseVec(priorBinyDistO.groupByKey.map { case (t, prods) => catIndexer(t) -> IndirectSparseVec(prods.collect { case (BinaryProd(u, v), _) => (catIndexer(u), catIndexer(v)) }.groupByKey.mapVals(_.toArray.sorted), numCats) }, numCats).values
// val knownUnrys: IndirectSparseVec[Array[Int]] = IndirectSparseVec(priorUnryDistO.groupByKey.map { case (t, prods) => catIndexer(t) -> prods.collect { case (UnaryProd(u), _) => catIndexer(u) }.toArray.sorted }, numCats)
// val knownTerms: Array[Array[Int]] = DenseVec(priorTermDistO.groupByKey.map { case (t, prods) => catIndexer(t) -> prods.collect { case (TermProd(w), _) => wordIndexer(w) }.toArray.sorted }, numCats).values
//
// val priorRootDist: IndirectSparseVec[LogDouble] = IndirectSparseVec(priorRootDistO.mapKeys(catIndexer), numCats)
// val priorBinyDist: Array[IndirectSparseVec[IndirectSparseVec[LogDouble]]] = DenseVec(priorBinyDistO.groupByKey.map { case (t, prods) => catIndexer(t) -> IndirectSparseVec(prods.toVector.collect { case (BinaryProd(u, v), p) => (catIndexer(u), (catIndexer(v), p)) }.groupByKey.mapVals(vps => IndirectSparseVec(vps.toMap, numCats)), numCats) }, numCats).values
// val priorUnryDist: IndirectSparseVec[IndirectSparseVec[LogDouble]] = IndirectSparseVec(priorUnryDistO.groupByKey.map { case (t, prods) => catIndexer(t) -> IndirectSparseVec(prods.collect { case (UnaryProd(u), p) => (catIndexer(u), p) }, numCats) }, numCats)
// val priorTermDist: Array[IndirectSparseVec[LogDouble]] = DenseVec(priorTermDistO.groupByKey.map { case (t, prods) => catIndexer(t) -> IndirectSparseVec(prods.collect { case (TermProd(w), p) => (wordIndexer(w), p) }, numWords) }, numCats).values
//
// val mockResultingParser: PcfgParserI = new PcfgParserI(
// rootDist = IndirectSparseVec.empty[LogDouble](0), // t -> p
// binyDist = Array[IndirectSparseVec[IndirectSparseVec[LogDouble]]](), // t -> u -> v -> p
// unryDist = IndirectSparseVec.empty[IndirectSparseVec[LogDouble]](0), // t -> u -> p
// termDist = Array[IndirectSparseVec[LogDouble]](), // t -> w -> p
// mixDist = Array[(LogDouble, LogDouble, LogDouble)]())( // t -> (bmix,umix,tmix)
// catIndexer = SimpleIndexer[Cat](Vector()), wordIndexer = SimpleIndexer[String](Vector()))
//
// val mockPcfgParserInstantiater: PcfgParserInstantiaterI = new PcfgParserInstantiaterI {
// def apply(
// rootDist: IndirectSparseVec[LogDouble], // t -> p
// binyDist: Array[IndirectSparseVec[IndirectSparseVec[LogDouble]]], // t -> u -> v -> p
// unryDist: IndirectSparseVec[IndirectSparseVec[LogDouble]], // t -> u -> p
// termDist: Array[IndirectSparseVec[LogDouble]], // t -> w -> p
// mixDist: Array[(LogDouble, LogDouble, LogDouble)]) // t -> (bmix,umix,tmix)
// (catIndexer: Indexer[Cat], wordIndexer: Indexer[String]) = {
//
// /* ROOTS
// * A: 11+21 + (2.1 * 0.91) = 12.911 / 119.781 = 0.2831083393860462
// * B: 25 + (2.1 * 0.92) = 26.932 / 119.781 = 0.22484367303662514
// * C: 14 + (2.1 * 0.93) = 15.953 / 119.781 = 0.13318472879672066
// * D: 15+22 + (2.1 * 0.94) = 38.974 / 119.781 = 0.3253771466259256
// * E: 0 + (2.1 * 0.95) = 1.995 / 119.781 = 0.016655396097878628
// * F: 0 + (2.1 * 0.96) = 2.016 / 119.781 = 0.016830716056803665
// * ----- ---- -------
// * 108 + 2.1 * 5.61 = 119.781
// */
// assertEquals(7, rootDist.length)
// assertEquals(6, rootDist.activeCount)
// assertEqualsLog(LogDouble((32 + (2.1 * 0.91)) / (108 + 2.1 * 5.61)), rootDist(catIndexer(A)), 1e-9)
// assertEqualsLog(LogDouble((25 + (2.1 * 0.92)) / (108 + 2.1 * 5.61)), rootDist(catIndexer(B)), 1e-9)
// assertEqualsLog(LogDouble((14 + (2.1 * 0.93)) / (108 + 2.1 * 5.61)), rootDist(catIndexer(C)), 1e-9)
// assertEqualsLog(LogDouble((37 + (2.1 * 0.94)) / (108 + 2.1 * 5.61)), rootDist(catIndexer(D)), 1e-9)
// assertEqualsLog(LogDouble((0 + (2.1 * 0.95)) / (108 + 2.1 * 5.61)), rootDist(catIndexer(E)), 1e-9)
// assertEqualsLog(LogDouble((0 + (2.1 * 0.96)) / (108 + 2.1 * 5.61)), rootDist(catIndexer(F)), 1e-9)
//
// /*
// * BINARY PRODS
// */
//
// assertEquals(7, binyDist.length)
// assertNull(binyDist(catIndexer(Z)))
//
// /*
// * A -> BC 45+11 + (2.3 * 0.11) = 56.253 / 82.828 =
// * A -> ED 0 + (2.3 * 0.12) = 0.276 / 82.828 =
// * A -> EF 26 + (2.3 * 0.13) = 26.299 / 82.828 =
// * ----- ---- ------
// * 82 + (2.3 * 0.36) = 82.828 / 82.828 =
// */
//
// assertEquals(7, binyDist(catIndexer(A)).length)
// assertEquals(2, binyDist(catIndexer(A)).activeCount)
// assertEquals(7, binyDist(catIndexer(A))(catIndexer(B)).length)
// assertEquals(1, binyDist(catIndexer(A))(catIndexer(B)).activeCount)
// assertEqualsLog(LogDouble((45 + 11 + (2.3 * 0.11)) / (82 + (2.3 * 0.36))), binyDist(catIndexer(A))(catIndexer(B))(catIndexer(C)), 1e-9)
// assertEquals(7, binyDist(catIndexer(A))(catIndexer(B)).length)
// assertEquals(2, binyDist(catIndexer(A))(catIndexer(E)).activeCount)
// assertEqualsLog(LogDouble((0 + (2.3 * 0.12)) / (82 + (2.3 * 0.36))), binyDist(catIndexer(A))(catIndexer(E))(catIndexer(D)), 1e-9)
// assertEqualsLog(LogDouble((26 + (2.3 * 0.13)) / (82 + (2.3 * 0.36))), binyDist(catIndexer(A))(catIndexer(E))(catIndexer(F)), 1e-9)
//
// /*
// * UNARY PRODS
// */
//
// assertEquals(7, unryDist.length)
// assertEquals(4, unryDist.activeCount)
//
// /*
// * A -> B 14+88 + (2.7 * 0.23) = 102.621 / 169.944 = 0.603851857082333
// * A -> D 0 + (2.7 * 0.24) = 0.648 / 169.944 = 0.0038130207597796926
// * A -> E 66 + (2.7 * 0.25) = 66.675 / 169.944 = 0.3923351221578873
// * ----- ---- -------
// * 168 + (2.7 * 0.72) = 169.944
// */
//
// assertEquals(7, unryDist(catIndexer(A)).length)
// assertEquals(3, unryDist(catIndexer(A)).activeCount)
// assertEqualsLog(LogDouble((14 + 88 + (2.7 * 0.23)) / (168 + (2.7 * 0.72))), unryDist(catIndexer(A))(catIndexer(B)), 1e-9)
// assertEqualsLog(LogDouble((0 + (2.7 * 0.24)) / (168 + (2.7 * 0.72))), unryDist(catIndexer(A))(catIndexer(D)), 1e-9)
// assertEqualsLog(LogDouble((66 + (2.7 * 0.25)) / (168 + (2.7 * 0.72))), unryDist(catIndexer(A))(catIndexer(E)), 1e-9)
//
// /*
// * TERMINAL PRODS
// */
//
// assertEquals(7, termDist.length)
// assertNull(termDist(catIndexer(Z)))
//
// /*
// * A -> a1 17+62 + (2.9 * 0.31) = 79.899 / 116.827 = 0.6839086854922236
// * A -> a2 36 + (2.9 * 0.32) = 36.928 / 116.827 = 0.31609131450777644
// * ----- ---- -------
// * 115 + (2.9 * 0.63) = 116.827
// */
//
// assertEquals(10, termDist(catIndexer(A)).length)
// assertEquals(2, termDist(catIndexer(A)).activeCount)
// assertEqualsLog(LogDouble((17 + 62 + (2.9 * 0.31)) / (115 + (2.9 * 0.63))), termDist(catIndexer(A))(wordIndexer("a1")), 1e-9)
// assertEqualsLog(LogDouble((36 + (2.9 * 0.32)) / (115 + (2.9 * 0.63))), termDist(catIndexer(A))(wordIndexer("a2")), 1e-9)
//
// /*
// * PROD MIX
// */
//
// assertEquals(7, mixDist.length)
//
// /*
// * A -> BC 45+11
// * A -> ED 0
// * A -> EF 26
// * -----
// * 82 + 30.3*0.5 = 97.15 / 398.33 = 0.2438932543368564
// *
// * A -> B 14+88
// * A -> D 0
// * A -> E 66
// * -----
// * 168 + 30.3*0.4 = 180.12 / 398.33 = 0.45218788441744284
// *
// * A -> a1 17+62
// * A -> a2 36
// * -----
// * 115 + 30.3*0.2 = 121.06 / 398.33 = 0.30391886124570083
// */
//
// assertEqualsLog(LogDouble((82 + 30.3 * 0.5) / (365 + 30.3 * 1.1)), mixDist(catIndexer(A))._1, 1e-9)
// assertEqualsLog(LogDouble((168 + 30.3 * 0.4) / (365 + 30.3 * 1.1)), mixDist(catIndexer(A))._2, 1e-9)
// assertEqualsLog(LogDouble((115 + 30.3 * 0.2) / (365 + 30.3 * 1.1)), mixDist(catIndexer(A))._3, 1e-9)
//
// /*
// * 0 + 30.3*0.5 = 15.15 / 33.33 =
// * 0 + 30.3*0.4 = 12.12 / 33.33 =
// * 0 + 30.3*0.2 = 6.06 / 33.33 =
// * --------
// * 30.3*1.1
// */
//
// assertEqualsLog(LogDouble((30.3 * 0.5) / (30.3 * 1.1)), mixDist(catIndexer(Z))._1, 1e-9)
// assertEqualsLog(LogDouble((30.3 * 0.4) / (30.3 * 1.1)), mixDist(catIndexer(Z))._2, 1e-9)
// assertEqualsLog(LogDouble((30.3 * 0.2) / (30.3 * 1.1)), mixDist(catIndexer(Z))._3, 1e-9)
//
// mockResultingParser
// }
// }
//
// val sampledTrees: Array[CcgTreeI] = Array(s1, s2).map(CcgTreeI.to(_, catIndexer, wordIndexer))
//
// val mockProductionFinder: PcfgProductionCounterI = new PcfgProductionCounterI {
// def counts(trees: Array[CcgTreeI], numTrees: Int,
// paramKnownRoots: Array[Int], // ts
// paramKnownBinys: Array[IndirectSparseVec[Array[Int]]], // t -> u -> vs
// paramKnownUnrys: IndirectSparseVec[Array[Int]], // t -> us
// paramKnownTerms: Array[Array[Int]], // t -> ws
// paramNumCats: Int, paramNumWords: Int): ( //
// IndirectSparseVec[Int], // t -> c
// Array[IndirectSparseVec[IndirectSparseVec[Int]]], // t -> u -> v -> c
// IndirectSparseVec[IndirectSparseVec[Int]], // t -> u -> c
// Array[IndirectSparseVec[Int]]) // t -> w -> c
// = {
//
// assertSame(sampledTrees, trees)
// assertEquals(sampledTrees.length, numTrees)
// assertSame(knownRoots, paramKnownRoots)
// assertSame(knownBinys, paramKnownBinys)
// assertSame(knownUnrys, paramKnownUnrys)
// assertSame(knownTerms, paramKnownTerms)
// assertEquals(numCats, paramNumCats)
// assertEquals(numWords, paramNumWords)
//
// val rootCounts: IndirectSparseVec[Int] =
// IndirectSparseVec(paramKnownRoots.map { t =>
// t -> Map(A -> 11, B -> 25, C -> 14, D -> 15).withDefaultValue(0)(catIndexer.obj(t))
// }, numCats)
//
// val binyCounts: Array[IndirectSparseVec[IndirectSparseVec[Int]]] =
// paramKnownBinys.zipWithIndex.map {
// case (tKnownBinys, t) =>
// if (tKnownBinys != null) {
// IndirectSparseVec(tKnownBinys.activePairs.map {
// case (u, vs) =>
// u -> IndirectSparseVec(vs.map { v =>
// val m = Map(
// A -> Map(BinaryProd(B, C) -> 45, BinaryProd(E, F) -> 26),
// C -> Map(BinaryProd(D, E) -> 47, BinaryProd(E, F) -> 27),
// D -> Map(BinaryProd(B, C) -> 23))
// .mapVals(_.withDefaultValue(0))
// .withDefaultValue(Map[BinaryProd, Int]().withDefaultValue(0))
// v -> m(catIndexer.obj(t))(BinaryProd(catIndexer.obj(u), catIndexer.obj(v)))
// }, numCats)
// }, numCats)
// }
// else null
// }
//
// val unryCounts: IndirectSparseVec[IndirectSparseVec[Int]] =
// IndirectSparseVec(paramKnownUnrys.activePairs.map {
// case (t, us) =>
// t -> IndirectSparseVec(us.map { u =>
// val m = Map(
// A -> Map(UnaryProd(B) -> 88, UnaryProd(E) -> 66),
// C -> Map(UnaryProd(D) -> 90, UnaryProd(E) -> 67),
// D -> Map(UnaryProd(B) -> 26))
// .mapVals(_.withDefaultValue(0))
// .withDefaultValue(Map[UnaryProd, Int]().withDefaultValue(0))
// u -> m(catIndexer.obj(t))(UnaryProd(catIndexer.obj(u)))
// }, numCats)
// }, numCats)
//
// val termCounts: Array[IndirectSparseVec[Int]] =
// paramKnownTerms.zipWithIndex.map {
// case (ws, t) =>
// if (ws != null) {
// IndirectSparseVec(ws.map { w =>
// val m = Map(
// A -> Map(TermProd("a1") -> 62, TermProd("a2") -> 36),
// C -> Map(TermProd("c1") -> 65, TermProd("c2") -> 29, TermProd("c3") -> 38),
// E -> Map(TermProd("e1") -> 39))
// .mapVals(_.withDefaultValue(0))
// .withDefaultValue(Map[TermProd, Int]().withDefaultValue(0))
// w -> m(catIndexer.obj(t))(TermProd(wordIndexer.obj(w)))
// }, numWords)
// }
// else null
// }
//
// (rootCounts, binyCounts, unryCounts, termCounts)
// }
// }
//
// val goldRootCounts: IndirectSparseVec[Int] =
// IndirectSparseVec(knownRoots.map { t =>
// t -> Map(A -> 21, D -> 22).withDefaultValue(0)(catIndexer.obj(t))
// }, numCats)
//
// val goldBinyCounts: Array[IndirectSparseVec[IndirectSparseVec[Int]]] =
// knownBinys.zipWithIndex.map {
// case (tKnownBinys, t) =>
// if (tKnownBinys != null) {
// IndirectSparseVec(tKnownBinys.activePairs.map {
// case (u, vs) =>
// u -> IndirectSparseVec(vs.map { v =>
// val m = Map(
// A -> Map(BinaryProd(B, C) -> 11),
// C -> Map(BinaryProd(D, E) -> 12, BinaryProd(E, D) -> 13))
// .mapVals(_.withDefaultValue(0))
// .withDefaultValue(Map[BinaryProd, Int]().withDefaultValue(0))
// v -> m(catIndexer.obj(t))(BinaryProd(catIndexer.obj(u), catIndexer.obj(v)))
// }, numCats)
// }, numCats)
// }
// else null
// }
//
// val goldUnryCounts: IndirectSparseVec[IndirectSparseVec[Int]] =
// IndirectSparseVec(knownUnrys.activePairs.map {
// case (t, us) =>
// t -> IndirectSparseVec(us.map { u =>
// val m = Map(
// A -> Map(UnaryProd(B) -> 14),
// C -> Map(UnaryProd(A) -> 15, UnaryProd(D) -> 16))
// .mapVals(_.withDefaultValue(0))
// .withDefaultValue(Map[UnaryProd, Int]().withDefaultValue(0))
// u -> m(catIndexer.obj(t))(UnaryProd(catIndexer.obj(u)))
// }, numCats)
// }, numCats)
//
// val goldTermCounts: Array[IndirectSparseVec[Int]] =
// knownTerms.zipWithIndex.map {
// case (ws, t) =>
// if (ws != null) {
// IndirectSparseVec(ws.map { w =>
// val m = Map(
// A -> Map(TermProd("a1") -> 17),
// C -> Map(TermProd("c1") -> 18))
// .mapVals(_.withDefaultValue(0))
// .withDefaultValue(Map[TermProd, Int]().withDefaultValue(0))
// w -> m(catIndexer.obj(t))(TermProd(wordIndexer.obj(w)))
// }, numWords)
// }
// else null
// }
//
// val absct =
// new AlphaBetaSupPcfgTrainerI(
// priorRootDist: IndirectSparseVec[LogDouble], // t -> p
// priorBinyDist: Array[IndirectSparseVec[IndirectSparseVec[LogDouble]]], // t -> u -> v -> p
// priorUnryDist: IndirectSparseVec[IndirectSparseVec[LogDouble]], // t -> u -> p
// priorTermDist: Array[IndirectSparseVec[LogDouble]], // t -> w -> p
// alphaRoot: LogDouble, alphaBiny: LogDouble, alphaUnry: LogDouble, alphaTerm: LogDouble,
// alphaLambda: LogDouble, priorBinyProdMix = LogDouble(0.5), priorUnryProdMix = LogDouble(0.4), priorTermProdMix = LogDouble(0.2),
// mockProductionFinder: PcfgProductionCounterI,
// mockPcfgParserInstantiater: PcfgParserInstantiaterI,
// knownRoots: Array[Int], // ts
// knownBinys: Array[IndirectSparseVec[Array[Int]]], // t -> u -> vs
// knownUnrys: IndirectSparseVec[Array[Int]], // t -> us
// knownTerms: Array[Array[Int]], // t -> ws
// numCats: Int, numWords: Int,
// goldRootCounts: IndirectSparseVec[Int], // t -> c
// goldBinyCounts: Array[IndirectSparseVec[IndirectSparseVec[Int]]], // t -> u -> v -> c
// goldUnryCounts: IndirectSparseVec[IndirectSparseVec[Int]], // t -> u -> c
// goldTermCounts: Array[IndirectSparseVec[Int]] // t -> w -> c
// )(catIndexer: Indexer[Cat], wordIndexer: Indexer[String])
//
// val parser: PcfgParserI = absct.train(sampledTrees)
//
// assertSame(mockResultingParser, parser)
// }
//
// def assertEqualsLog(a: LogDouble, b: LogDouble, e: Double) {
// assertEquals(a.toDouble, b.toDouble, e)
// }
//}
|
dhgarrette/2015-ccg-parsing
|
src/test/scala/dhg/ccg/parse/pcfg/SupPcfgTrainerITests.scala
|
Scala
|
apache-2.0
| 20,778 |
package com.twitter.finagle.mysql.transport
import com.twitter.io.Buf
import org.scalatest.FunSuite
class PacketTest extends FunSuite {
val seq = 2.toShort
val bytes = Array[Byte](0x01, 0x02, 0x03, 0x04)
val body = Buf.ByteArray.Owned(bytes)
val packet = Packet(seq, body)
test("Encode a Packet") {
val br = MysqlBuf.reader(packet.toBuf)
assert(bytes.size == br.readMediumLE())
assert(seq == br.readByte())
assert(bytes === br.take(br.remaining))
}
}
|
luciferous/finagle
|
finagle-mysql/src/test/scala/com/twitter/finagle/mysql/unit/transport/PacketTest.scala
|
Scala
|
apache-2.0
| 482 |
package simple_interpreter
import Token.TokenType
class IllegalInterpreterExpression extends Exception
class Interpreter(private val lexer: Lexer) {
var currentToken: Option[Token] = Option(lexer.getNextToken)
def error = throw new IllegalInterpreterExpression
def eat(tokenType: TokenType): Unit = {
if (currentToken.get.tokenType == tokenType) {
currentToken = Option(lexer.getNextToken)
}
else {
error
}
}
def factor(): Int = {
val token = getCurrentToken()
token match {
case IntegerToken(value) => getIntegerToken(value)
case LeftParenthesis() => performParenthesisCalculation
}
}
def performParenthesisCalculation: Int = {
eat(Token.LeftParenthesis)
val result = expr
eat(Token.RightParenthesis)
result
}
def getIntegerToken(value: Int): Int = {
eat(Token.Integer)
value
}
def term(accumulator: Int): Int = {
val nextToken = getCurrentToken()
nextToken match {
case MultiplicationToken() => term(handleMultiplication(accumulator))
case DivisionToken() => term(handleDivision(accumulator))
case _ => accumulator
}
}
def term(): Int = {
val left = factor()
val operator = getCurrentToken()
operator match {
case MultiplicationToken() | DivisionToken() => term(left)
case _ => left
}
}
def handleAddition(left: Int): Int = {
eat(Token.Plus)
val right = term()
left + right
}
def handleSubstraction(left: Int): Int = {
eat(Token.Minus)
val right = term()
left - right
}
def handleMultiplication(left: Int): Int = {
eat(Token.Multiplication)
val right = factor()
left * right
}
def handleDivision(left: Int): Int = {
eat(Token.Division)
val right = factor()
left / right
}
def getCurrentToken(): Token = {
val op = currentToken.getOrElse(EmptyToken).asInstanceOf[Token]
op
}
def expr(accumulator: Int): Int = {
val nextToken = getCurrentToken()
nextToken match {
case PlusToken() => expr(handleAddition(accumulator))
case MinusToken() => expr(handleSubstraction(accumulator))
case _ => accumulator
}
}
def expr: Int = {
val left = term()
expr(left)
}
}
object Interpreter {
def build(string: String): Interpreter = {
new Interpreter(Lexer.build(string))
}
}
|
CucumisSativus/simple-interpreter-scala
|
src/main/scala/simple_interpreter/Interpreter.scala
|
Scala
|
mit
| 2,373 |
package com.programmaticallyspeaking.ncd.boot
import java.net.ConnectException
import akka.actor.ActorSystem
import com.programmaticallyspeaking.ncd.chrome.domains.EventEmitHook
import com.programmaticallyspeaking.ncd.chrome.net.FilePublisher
import com.programmaticallyspeaking.ncd.config.Conf
import com.programmaticallyspeaking.ncd.host.{ScriptEvent, ScriptHost}
import com.programmaticallyspeaking.ncd.ioc.Container
import com.programmaticallyspeaking.ncd.messaging.Observer
import com.programmaticallyspeaking.ncd.nashorn.{NashornDebugger, NashornDebuggerConnector, NashornScriptHost}
import org.slf4s.Logging
import scala.concurrent.{Future, Promise}
import scala.util.control.NonFatal
import scala.util.{Failure, Success}
case class BrokerConnection(host: NashornScriptHost, disconnect: () => Unit)
class Broker(conf: Conf)(implicit actorSystem: ActorSystem) extends Logging {
import scala.concurrent.ExecutionContext.Implicits._
def connect(errorCallback: Option[Throwable] => Unit): Future[BrokerConnection] = {
val connectAddr = conf.connect()
val connector = new NashornDebuggerConnector(connectAddr.host, connectAddr.port)
val debuggerReady = connector.connect().map(vm => new NashornDebugger().create(vm))
val connectionPromise = Promise[BrokerConnection]()
debuggerReady.onComplete {
case Success(host) =>
startListening(host, errorCallback)
try {
def disconnect(): Unit = {
host.virtualMachine.inner.dispose()
}
// Writing just 'disconnect' results in compilation warning about deprecated ETA expansion.
val conn = BrokerConnection(host, disconnect _)
connectionPromise.success(conn)
} catch {
case NonFatal(t) =>
log.error("Binding failed", t)
connectionPromise.failure(new RuntimeException("connection failed"))
}
case Failure(t) =>
t match {
case _: ConnectException =>
log.error("Failed to connect to the debug target.")
log.error("Please make sure that the debug target is started with debug VM arguments, for example:")
log.error(s" -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=${connectAddr.host}:${connectAddr.port}")
case _ =>
log.error("Failed to start the debugger", t)
}
// Wrap in RuntimeException if needed, otherwise we'll get UndeclaredThrowableException wrapping the cause.
val error = if (t.isInstanceOf[RuntimeException]) t else new RuntimeException(t)
connectionPromise.failure(error)
}
connectionPromise.future
}
private def startListening(host: NashornScriptHost, errorCallback: Option[Throwable] => Unit) = {
host.events.subscribe(new Observer[ScriptEvent] {
override def onNext(item: ScriptEvent): Unit = {}
override def onError(error: Throwable): Unit = {
log.error("Unknown error", error)
errorCallback(Some(error))
}
override def onComplete(): Unit = {
log.info("The debug target disconnected")
errorCallback(None)
}
})
}
}
class BootContainer(filePublisher: FilePublisher, scriptHost: ScriptHost) extends Container(Seq(filePublisher, scriptHost, new EventEmitHook))
|
provegard/ncdbg
|
src/main/scala/com/programmaticallyspeaking/ncd/boot/Broker.scala
|
Scala
|
bsd-3-clause
| 3,300 |
/**
* Clase para representar colecciones no vacias
*
* @param mensajeInicial
* @param resto
*/
class TendenciaNoVacia(mensajeInicial: Tweet, resto: Tendencia) extends Tendencia {
/**
* Se agrega mensaje al final de la secuencia
*
* @param mensaje
* @return
*/
def +(mensaje: Tweet): Tendencia = new TendenciaNoVacia(mensajeInicial, resto + mensaje)
/**
* Devuelve el mensaje inicial
*
* @return
*/
def head: Tweet = mensajeInicial
/**
* Devuelve el resto de mensajes
*
* @return
*/
def tail: Tendencia = resto
/**
* Indica si la coleccion esta vacia: no por definicion
*
* @return
*/
def isEmpty: Boolean = false
/**
* Longitud de la tendencia
*
* @return
*/
def length: Integer = resto.length + 1 // Llamada recursiva aumentando el "contador"
/**
* Metodo toString
*
* @return
*/
override def toString = "TendenciaNoVacia(" + mensajeInicial.retweets + ", " + resto + ")"
}
|
fblupi/grado_informatica-NTP
|
Practicas/P4/src/TendenciaNoVacia.scala
|
Scala
|
gpl-2.0
| 1,013 |
// A simple nonblocking counter. Can be used to implement semaphores,
// or the equivalent to asynchronous unit channels in the join calculus.
package chemistry
final class Counter(init: Int = 0) {
private val state = Ref[java.lang.Integer](init)
val get = state.upd[Int] { case i => (i, i) }
val inc = state.upd[Int] { case i => (i+1, i) }
val dec = state.upd[Int] { case n if (n > 0) => (n-1, n) }
val tryDec = state.upd[Option[Int]] {
case n if (n == 0) => (0, None)
case n => (n-1, Some(n))
}
}
|
aturon/ChemistrySet
|
src/main/scala/data/Counter.scala
|
Scala
|
bsd-2-clause
| 539 |
package com.typesafe.scalalogging
trait LazyLogging extends com.typesafe.scalalogging.slf4j.LazyLogging
trait StrictLogging extends com.typesafe.scalalogging.slf4j.StrictLogging
|
nagavallia/geomesa
|
geomesa-logger/src/main/scala_2.10/com/typesafe/scalalogging/Logging.scala
|
Scala
|
apache-2.0
| 180 |
/*
* Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.internal.parboiled2
package object support {
private[parboiled2] def `n/a` = throw new IllegalStateException("Untranslated compile-time only call")
}
|
ZizhengTai/http4s
|
parboiled2/src/main/scala/org/http4s/internal/parboiled2/support/package.scala
|
Scala
|
apache-2.0
| 796 |
/*
* Copyright (c) <2013>, Amanj Sherwany <http://www.amanj.me>
* All rights reserved.
* */
package ch.usi.inf.l3.mina.store
import ch.usi.inf.l3.mina.eval._
import ch.usi.inf.l3.mina._
trait HPEEnvironmentWrapper {
self: HPE =>
import self.global._
// ---------------------- Environment ---------------------------------------
class Environment private (private val location: Map[Symbol, Int],
private val store: Map[Int, Value],
private val loc: Int) {
private def this() {
this(Map.empty, Map.empty, -1)
}
def getValue(s: Symbol, it: Any = null): Value = {
if (s == null || s == NoSymbol)
throw new HPEError(s"Symbol should not be null or NoSymbol ${s}\\nand it is: ${it}")
location.get(s) match {
case None => Bottom
case Some(x) => store(x)
}
}
def addValues(valvars: List[(Symbol, Value)]): Environment = {
var env = this
var tail = valvars
while (tail != Nil) {
val (vr, vl) = tail.head
env = env.addValue(vr, vl)
tail = tail.tail
}
env
}
def updateValue(s: Symbol, value: Value): Environment = {
if (s == null || s == NoSymbol)
throw new HPEError(s"Symbol should not be null or NoSymbol ${s}")
location.get(s) match {
case None =>
addValue(s, value)
case Some(l) =>
val old = store(l)
old match {
case Bottom | _ if (old.tpe == value.tpe) =>
val s = store + (l -> value)
new Environment(location, s, l)
case _ => throw new HPEError(s"Once you make a variable, CT, " +
"Abstract or Top you may not change it ${v}")
}
}
}
def addValue(s: Symbol, value: Value): Environment = {
if (s == null || s == NoSymbol)
throw new HPEError(s"Symbol should not be null or NoSymbol ${s}")
val l = loc + 1
val m = location + (s -> l) //location + (s -> l)
val st = store + (l -> value)
new Environment(m, st, l)
}
private def makeConsistent(x: Environment, y: Environment): Environment = {
var r = this
for ((t, l) <- x.location) {
val vx = x.getValue(t)
val vy = y.getValue(t)
if (vx == vy) r.addValue(t, vx)
else if (vy == Top || vx == Top) r.addValue(t, Top)
else r.remove(t)
}
r
}
def makeConsistent(envs: List[Environment]): Environment = {
envs match {
case Nil => Environment.empty
case x :: Nil => x
case x :: y :: Nil =>
if (x.location.size >= y.location.size) makeConsistent(x, y)
else makeConsistent(y, x)
case x :: y :: xs =>
val x1 = makeConsistent(x :: y :: Nil)
makeConsistent(x1 :: xs)
}
}
/**
* A very important method for handling function calls on objects accurately.
*
* This method makes sure to correctly pass all the parameter to the called
* method while still preserving the store of the object.
*
* @param params a list of tuple of method parameters
* @param sourse the source environment, to look for variables values from
* @return a new environment which has all the information of its parameters
*/
def addBatch(vars: List[Symbol],
source: Environment): Environment = {
var tail = vars
var tempStore = this
while (tail != Nil) {
val head = tail.head
tail = tail.tail
val l = source.location(head)
val value = source.store(l)
val loc = tempStore.loc + 1
tempStore = new Environment(tempStore.location + (head -> l),
tempStore.store + (l -> value),
loc)
}
tempStore
}
/**
* Removes a variable from the environment
*
* @param s the variable to be removed
* @return a new environment, which has the bindings of variable v removed
*/
def remove(s: Symbol): Environment = {
val location2 = location - (s)
new Environment(location2, store, loc)
}
def remove(vars: List[Symbol]): Environment = {
var tail = vars
var tempStore = this
while (tail != Nil) {
val head = tail.head
tail = tail.tail
tempStore = remove(head)
}
tempStore
}
private def getPEValue(s: Symbol): Option[Value] = {
location.get(s) match {
case Some(loc) => store.get(loc)
case _ => None
}
}
def isCT(s: Symbol): Boolean = {
getPEValue(s) match {
case Some(CTValue(_)) => true
case _ => false
}
}
def isRT(s: Symbol): Boolean = {
getPEValue(s) match {
case Some(Top) => true
case _ => false
}
}
override def toString: String = location.toString + "\\n" + store.toString
}
object Environment {
def empty: Environment = { new Environment }
def apply(varval: List[(Symbol, Value)]): Environment = {
newStore(varval)
}
def apply(varval: (List[Symbol], List[Value])): Environment = {
newStore(varval._1 zip varval._2)
}
def newStore(varval: List[(Symbol, Value)]): Environment = {
var env = new Environment
var tail = varval
while (tail != Nil) {
val (x, v) = tail.head
env = env.addValue(x, v)
tail = tail.tail
}
env
}
}
// ---------------------- Value -----------------------------------------
sealed trait Value {
protected val BOTTOM = 0
protected val CT = 1
protected val RT = 2
protected val TOP = 3
def value: Option[HPEAny];
val isCT = false
def tpe: Int;
}
case object Bottom extends Value {
override def value: Option[HPEAny] = None
def tpe: Int = BOTTOM
}
case object Top extends Value {
override def value: Option[HPEAny] = None
def tpe: Int = TOP
}
case class CTValue(v: HPEAny) extends Value {
override def value: Option[HPEAny] = Some(v)
override def toString: String = value.get.toString
def toTree = v.tree
override val isCT = true
def tpe: Int = CT
}
case class AbsValue(v: HPEAny) extends Value {
override def value: Option[HPEAny] = Some(v)
def toCTValue = CTValue(v)
def tpe: Int = RT
}
// ---------------------- Simulating Runtime Object -----------------------
trait HPEAny {
val tree: Tree;
val tpe: Type;
}
case class HPEObject(val tree: Tree, val tpe: Type,
val store: Environment) extends HPEAny {
override def equals(that: Any): Boolean = {
that match {
case HPEObject(_, `tpe`, `store`) => true
case _ => false
}
}
override def toString: String = tree.toString.replaceAll("[\\\\[\\\\]]", "")
override def hashCode = 71 * 5 + tpe.## + store.##
}
case class HPELiteral(override val tree: Literal,
override val tpe: Type) extends HPEAny {
override def equals(that: Any): Boolean = {
that match {
case HPELiteral(t, `tpe`) => tree.value.value == t.value.value
case _ => false
}
}
override def toString: String = tree.value.toString
override def hashCode = 71 * 5 + tree.value.value.## + tpe.##
}
case class HPETree(val tree: Tree) extends HPEAny {
val tpe: Type = tree.tpe
}
}
|
amanjpro/piuma
|
mina/src/main/scala/ch/usi/inf/l3/mina/store/HPEEnvironmentWrapper.scala
|
Scala
|
bsd-3-clause
| 7,325 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
/**
* A trait that should be mixed into query operators where an single instance might appear multiple
* times in a logical query plan. It is invalid to have multiple copies of the same attribute
* produced by distinct operators in a query tree as this breaks the guarantee that expression
* ids, which are used to differentiate attributes, are unique.
*
* During analysis, operators that include this trait may be asked to produce a new version
* of itself with globally unique expression ids.
*/
trait MultiInstanceRelation {
def newInstance(): LogicalPlan
}
|
practice-vishnoi/dev-spark-1
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/MultiInstanceRelation.scala
|
Scala
|
apache-2.0
| 1,486 |
// Copyright 2017 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.spindle.codegen.runtime
import com.fasterxml.jackson.databind.ObjectMapper
import com.fasterxml.jackson.databind.node.ObjectNode
/* Renders thrift annotations from a [[ScalaProgram]] into json. Entry-point [[jsonBody]]. */
case class RenderJson() {
val mapper = new ObjectMapper
/* for each class with nonempty annotations, yield a json dictionary from the annotations */
def allJsonAnnotations(program: ScalaProgram): Seq[(ScalaClass, ObjectNode)] =
// TODO(awinter): in theory Extraction.unflatten does this, but I think unflatten has weird
// type inference we don't want
for (clazz <- program.structs if !clazz.annotations.toSeq.isEmpty)
yield {
val node = mapper.createObjectNode()
for ((k, v) <- clazz.annotations.toSeq)
node.put(k, v)
(clazz, node)
}
// TODO(awinter): double check binaryName is what we want and that this is a binaryName
/* throws if program.pkg is None */
def binaryName(program: ScalaProgram, clazz: ScalaClass): String = {
require(program.pkg.isDefined, "--write_json_annotations requires package names in structs")
program.pkg.get + "." + clazz.name
}
/*
Render program into a list of json dicts using [[allJsonAnnotations]] to render each struct.
Skip classes with no annotations.
Return includes number of processed classes (so you can skip writing the file if empty).
*/
def jsonBody(program: ScalaProgram): (Int, String) = {
val node = mapper.createObjectNode()
var nfields = 0
for ((clazz, json) <- allJsonAnnotations(program)) {
nfields += 1
node.set(binaryName(program, clazz), json)
}
(
nfields,
mapper.writerWithDefaultPrettyPrinter.writeValueAsString(node)
)
}
}
|
foursquare/fsqio
|
src/jvm/io/fsq/spindle/codegen/runtime/RenderJson.scala
|
Scala
|
apache-2.0
| 1,828 |
package feh.tec.world
import feh.tec.world.Simple2dDirection.Simple2dDirection
trait AbstractSquareMap[Tile <: SquareTile[Tile, (Int, Int)]]
extends AbstractWorld[Tile, (Int, Int)] with AbstractWorldOps[Tile, (Int, Int), Simple2dDirection]
{
implicit class TileCoordinatesWrapper(t: Tile){
def x = t.coordinate._1
def y = t.coordinate._2
}
val coordinates: CoordinatesMeta
protected def assertDefinedAtAllCoordinates()
trait CoordinatesMeta {
def xRange: Range
def yRange: Range
}
}
trait EnclosedSquareMap[Tile <: SquareTile[Tile, (Int, Int)]] extends AbstractSquareMap[Tile] with EnclosedWorld[Tile, (Int, Int)]{
/** todo: description!!
* @return A relative position of `of` relatively to `relativelyTo` as a set of directions.
* one direction in set means that coordinates have the same vert/horiz position;
* three directions can be returned by Enclosed maps,
* case of 3 means that the distances between two coordinates are equal for both possible vertical or horizontal routes
* two directions are returned in the rest of cases, e.g. A is up and left of B
* Set() is returned if `of` == `relativelyTo`
*/
def relativePosition(of: (Int, Int), relativelyTo: (Int, Int)): Set[Simple2dDirection] = {
import coordinates._
import Simple2dDirection._
if(of == relativelyTo) return Set()
def leftUpDist(ofProjection: Int, relProjection: Int, sizeProjection: => Int) =
if(ofProjection < relProjection) relProjection - ofProjection else relProjection + sizeProjection - ofProjection
def rightDownDist(ofProjection: Int, relProjection: Int, sizeProjection: => Int) =
if(ofProjection > relProjection) ofProjection - relProjection else sizeProjection - relProjection + ofProjection
def leftDist = leftUpDist(of._1, relativelyTo._1, xRange.length)
def rightDist = rightDownDist(of._1, relativelyTo._1, xRange.length)
def upDist = leftUpDist(of._2, relativelyTo._2, yRange.length)
def downDist = rightDownDist(of._2, relativelyTo._2, yRange.length)
def selectDirection(dir1: Simple2dDirection, dir2: Simple2dDirection)(dist1: Int, dist2: Int) =
if(dist1 == dist2) dir1 :: dir2 :: Nil
else if(dist1 < dist2) dir1 :: Nil
else dir2 :: Nil
val horDir = if(of._1 == relativelyTo._1) Nil else selectDirection(Left, Right)(leftDist, rightDist)
val vertDir = if(of._2 == relativelyTo._2) Nil else selectDirection(Up, Down)(upDist, downDist)
horDir.toSet ++ vertDir.toSet
}
def relativeNeighboursPosition(of: (Int, Int), relativelyTo: (Int, Int)): Simple2dDirection = {
import coordinates._
import Simple2dDirection._
relativelyTo -> of match{
case ((x1, y1), (x2, y2)) if x1 == x2 && (y2 == y1 + 1 || y1 == yRange.max && y2 == yRange.min) => Down
case ((x1, y1), (x2, y2)) if x1 == x2 && (y2 == y1 - 1 || y1 == yRange.min && y2 == yRange.max) => Up
case ((x1, y1), (x2, y2)) if y1 == y2 && (x2 == x1 - 1 || x1 == xRange.min && x2 == xRange.max) => Left
case ((x1, y1), (x2, y2)) if y1 == y2 && (x2 == x1 + 1 || x1 == xRange.max && x2 == xRange.min) => Right
case (c1, c2) => sys.error(s"$c1 and $c2 are not neighbouring tiles")
}
}
}
trait SquareTile[Tile <: SquareTile[Tile, Coordinate], Coordinate] extends WorldAtom[Tile, Coordinate]
|
fehu/agent-tareas
|
agent/src/main/scala/feh/tec/world/AbstractSquareMap.scala
|
Scala
|
mit
| 3,381 |
package com.kakao.shaded.jackson.module.scala
import com.kakao.shaded.jackson.module.scala.deser.EnumerationDeserializerModule
import com.kakao.shaded.jackson.module.scala.ser.EnumerationSerializerModule
/**
* Adds serialization and deserization support for Scala Enumerations.
*
* @author Christopher Currie <[email protected]>
*/
trait EnumerationModule extends EnumerationSerializerModule with EnumerationDeserializerModule {
}
|
kakao/mango
|
mango-shaded/src/main/scala/com/kakao/shaded/jackson/module/scala/EnumerationModule.scala
|
Scala
|
apache-2.0
| 441 |
package webserviceclients.fakes
import pages.changekeeper.PrivateKeeperDetailsPage.{PostcodeValid, PostcodeWithoutAddresses}
import play.api.http.Status.OK
import play.api.i18n.Lang
import play.api.libs.json.Json
import play.api.libs.ws.WSResponse
import uk.gov.dvla.vehicles.presentation.common.clientsidesession.TrackingId
import uk.gov.dvla.vehicles.presentation.common.webserviceclients.addresslookup.AddressLookupWebService
import uk.gov.dvla.vehicles.presentation.common.webserviceclients.addresslookup.ordnanceservey.AddressDto
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
final class FakeAddressLookupWebServiceImpl(responseOfPostcodeWebService: Future[WSResponse])
extends AddressLookupWebService {
override def callAddresses(postcode: String, trackingId: TrackingId)
(implicit lang: Lang): Future[WSResponse] =
if (postcode == PostcodeWithoutAddresses.toUpperCase) Future {
FakeResponse(status = OK, fakeJson = None)
}
else responseOfPostcodeWebService
}
object FakeAddressLookupWebServiceImpl {
final val selectedAddress = "presentationProperty stub, 123, property stub, street stub, town stub, area stub, QQ99QQ"
private def addressSeq(houseName: String, houseNumber: String): Seq[String] = {
Seq(houseName, houseNumber, "property stub", "street stub", "town stub", "area stub", PostcodeValid)
}
def addressesResponseValid: Seq[AddressDto] = {
val result = Seq(
AddressDto(addressSeq("presentationProperty stub", "123").mkString(", "),
None,
s"123",
None,
None,
s"town stub",
PostcodeValid
),
AddressDto(addressSeq("presentationProperty stub", "456").mkString(", "),
None,
s"123",
None,
None,
s"town stub",
PostcodeValid),
AddressDto(addressSeq("presentationProperty stub", "789").mkString(", "),
None,
s"123",
None,
None,
s"town stub",
PostcodeValid)
)
result
}
def responseValidForPostcodeToAddress: Future[WSResponse] = {
val inputAsJson = Json.toJson(addressesResponseValid)
Future {
FakeResponse(status = OK, fakeJson = Some(inputAsJson))
}
}
def responseValidForPostcodeToAddressNotFound: Future[WSResponse] = {
val inputAsJson = Json.toJson(Seq.empty[AddressDto])
Future {
FakeResponse(status = OK, fakeJson = Some(inputAsJson))
}
}
}
|
dvla/vehicles-change-keeper-online
|
test/webserviceclients/fakes/FakeAddressLookupWebServiceImpl.scala
|
Scala
|
mit
| 2,535 |
package scala.build
import aQute.bnd.osgi.Builder
import aQute.bnd.osgi.Constants._
import java.util.jar.Attributes
import sbt._
import sbt.Keys._
import collection.JavaConverters._
import VersionUtil.versionProperties
/** OSGi packaging for the Scala build, distilled from sbt-osgi. We do not use sbt-osgi because it
* depends on a newer version of BND which gives slightly different output (probably OK to upgrade
* in the future, now that the Ant build has been removed) and does not allow a crucial bit of
* configuration that we need: Setting the classpath for BND. In sbt-osgi this is always
* `fullClasspath in Compile` whereas we want `products in Compile in packageBin`. */
object Osgi {
val bundle = TaskKey[File]("osgiBundle", "Create an OSGi bundle.")
val bundleName = SettingKey[String]("osgiBundleName", "The Bundle-Name for the manifest.")
val bundleSymbolicName = SettingKey[String]("osgiBundleSymbolicName", "The Bundle-SymbolicName for the manifest.")
val headers = SettingKey[Seq[(String, String)]]("osgiHeaders", "Headers and processing instructions for BND.")
val jarlist = SettingKey[Boolean]("osgiJarlist", "List classes in manifest.")
def settings: Seq[Setting[_]] = Seq(
bundleName := description.value,
bundleSymbolicName := organization.value + "." + name.value,
headers := {
val v = VersionUtil.versionProperties.value.osgiVersion
Seq(
"Bundle-Name" -> bundleName.value,
"Bundle-SymbolicName" -> bundleSymbolicName.value,
"ver" -> v,
"Export-Package" -> "*;version=${ver};-split-package:=merge-first",
"Import-Package" -> raw"""scala.*;version="$${range;[==,=+);$${ver}}",*""",
"Bundle-Version" -> v,
"Bundle-RequiredExecutionEnvironment" -> "JavaSE-1.8",
"-eclipse" -> "false"
)
},
jarlist := false,
bundle := Def.task {
val cp = (products in Compile in packageBin).value
val licenseFiles = License.licenseMapping.value.map(_._1)
bundleTask(headers.value.toMap, jarlist.value, cp,
(artifactPath in (Compile, packageBin)).value, cp ++ licenseFiles, streams.value)
}.value,
packagedArtifact in (Compile, packageBin) := (((artifact in (Compile, packageBin)).value, bundle.value)),
// Also create OSGi source bundles:
packageOptions in (Compile, packageSrc) += Package.ManifestAttributes(
"Bundle-Name" -> (description.value + " Sources"),
"Bundle-SymbolicName" -> (bundleSymbolicName.value + ".source"),
"Bundle-Version" -> versionProperties.value.osgiVersion,
"Eclipse-SourceBundle" -> (bundleSymbolicName.value + ";version=\\"" + versionProperties.value.osgiVersion + "\\";roots:=\\".\\"")
),
Keys.`package` := bundle.value
)
def bundleTask(headers: Map[String, String], jarlist: Boolean, fullClasspath: Seq[File], artifactPath: File,
resourceDirectories: Seq[File], streams: TaskStreams): File = {
val log = streams.log
val builder = new Builder
builder.setClasspath(fullClasspath.toArray)
headers foreach { case (k, v) => builder.setProperty(k, v) }
// https://github.com/scala/scala-dev/issues/254
// Must be careful not to include scala-asm.jar within scala-compiler.jar!
def resourceDirectoryRef(f: File) = (if (f.getName endsWith ".jar") "@" else "") + f.getAbsolutePath
val includeRes = resourceDirectories.filter(_.exists).map(resourceDirectoryRef).mkString(",")
if (!includeRes.isEmpty) builder.setProperty(INCLUDERESOURCE, includeRes)
builder.getProperties.asScala.foreach { case (k, v) => log.debug(s"bnd: $k: $v") }
// builder.build is not thread-safe because it uses a static SimpleDateFormat. This ensures
// that all calls to builder.build are serialized.
val jar = synchronized { builder.build }
builder.getWarnings.asScala.foreach(s => log.warn(s"bnd: $s"))
builder.getErrors.asScala.foreach(s => log.error(s"bnd: $s"))
IO.createDirectory(artifactPath.getParentFile)
if (jarlist) {
val entries = jar.getManifest.getEntries
for ((name, resource) <- jar.getResources.asScala if name.endsWith(".class")) {
entries.put(name, new Attributes)
}
}
jar.write(artifactPath)
artifactPath
}
}
|
martijnhoekstra/scala
|
project/Osgi.scala
|
Scala
|
apache-2.0
| 4,260 |
package octopus.async.cats
import monix.eval.Task
import monix.execution.Scheduler.Implicits.global
import octopus.AppError
import octopus.async.AsyncValidationSpec
import octopus.async.cats.ToFutureImplicits._
class MonixTaskIntegrationSpec extends AsyncValidationSpec[Task] {
implicit lazy val appError: AppError[Task] = octopus.async.cats.implicits.catsAppError
}
|
krzemin/octopus
|
octopusCats/src/test/scala/octopus/async/cats/MonixTaskIntegrationSpec.scala
|
Scala
|
apache-2.0
| 371 |
package com.atomist.project.common.template
import com.atomist.source.ArtifactSource
class MustacheMergeToolCreator
extends MergeToolCreator {
override def createMergeTool(templateContent: ArtifactSource): MergeTool =
new MustacheMergeTool(templateContent)
}
|
atomist/rug
|
src/main/scala/com/atomist/project/common/template/MustacheMergeToolCreator.scala
|
Scala
|
gpl-3.0
| 270 |
/*
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*/
package org.locationtech.geomesa.compute.spark.sql
import java.sql.Timestamp
import java.util.concurrent.atomic.AtomicInteger
import java.util.{Date, List => jList, Map => jMap, UUID}
import com.typesafe.scalalogging.slf4j.Logging
import com.vividsolutions.jts.geom.Geometry
import org.apache.hadoop.conf.Configuration
import org.apache.metamodel.query.FilterClause
import org.apache.metamodel.{DataContext, query}
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Row, _}
import org.apache.spark.{SparkConf, SparkContext}
import org.geotools.data.{DataStoreFinder, DataUtilities, Query}
import org.geotools.factory.CommonFactoryFinder
import org.geotools.filter.text.ecql.ECQL
import org.geotools.filter.visitor.DuplicatingFilterVisitor
import org.locationtech.geomesa.compute.spark.GeoMesaSpark
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes._
import org.locationtech.geomesa.utils.text.WKTUtils
import org.opengis.feature.`type`.AttributeDescriptor
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
import org.opengis.filter.expression.PropertyName
import scala.collection.JavaConversions._
/**
* Class to manage running sql queries against geomesa using spark.
*
* There can only be a single spark context running in a given jvm, so it has to be managed using the
* start/stop/register methods.
*/
object GeoMesaSparkSql extends Logging {
private val ff = CommonFactoryFinder.getFilterFactory2
// state to keep track of our sfts and data store connection parameters
private val dsParams = scala.collection.mutable.Set.empty[Map[String, String]]
private val sfts = scala.collection.mutable.Set.empty[SimpleFeatureType]
// singleton spark context
private var sc: SparkContext = null
private var sparkSql: GeoMesaSparkSql = null
private var running = false
private val executing = new AtomicInteger(0)
/**
* Register a data store. This makes all schemas in the data store available for querying.
* Synchronized to ensure it's only called when the spark context is not running.
*/
def registerDataStore(params: Map[String, String]): Unit = synchronized {
require(!running, "Can't register a data store in a running instance")
val ds = DataStoreFinder.getDataStore(params)
require(ds != null, "No data store found using provided parameters")
dsParams += params
sfts ++= ds.getTypeNames.map(ds.getSchema)
}
/**
* Starts the spark context, if not already running.
*/
def start(configs: Map[String, String] = Map.empty,
distributedJars: Seq[String] = Seq.empty): Boolean = synchronized {
if (running) {
logger.debug("Trying to start an already started instance")
false
} else {
val conf = GeoMesaSpark.init(new SparkConf(), sfts.toSeq)
conf.setAppName("GeoMesaSql")
conf.setMaster("yarn-client")
conf.setJars(distributedJars)
configs.foreach { case (k, v) => conf.set(k, v) }
sc = new SparkContext(conf)
sparkSql = new GeoMesaSparkSql(sc, dsParams.toSeq)
running = true
true
}
}
/**
* Stops the spark context, if running. Blocks until all current processes have finished executing.
* Note that the synchronization on this method will prevent new tasks from executing.
*
* @param wait
* if < 0, will block indefinitely
* if >= 0, will return after that many millis
* @return true if successfully stopped, else false
*/
def stop(wait: Long = -1): Boolean = synchronized {
if (running) {
val start = System.currentTimeMillis()
// wait for current queries to stop
while (executing.get() > 0 && (wait == -1 || System.currentTimeMillis() - start < wait)) {
Thread.sleep(1000)
}
if (executing.get() > 0) {
return false
}
sc.stop()
sc = null
sparkSql = null
running = false
} else {
logger.debug("Trying to stop an already stopped instance")
}
true
}
/**
* Execute a sql query against geomesa. Where clause is interpreted as CQL.
*/
def execute(sql: String, splits: Option[Int] = None): (StructType, Array[Row]) = {
val canStart = synchronized {
// we need to compare and modify the state inside the synchronized block
if (running) {
executing.incrementAndGet()
}
running
}
require(canStart, "Can only execute in a running instance")
try {
val results = sparkSql.query(sql, splits)
// return the result schema and rows
(results.schema, results.collect())
} finally {
executing.decrementAndGet()
}
}
/**
* Extracts CQL from the SQL query.
*/
private def extractCql(where: FilterClause,
context: DataContext,
sftNames: Seq[String]): Map[String, Filter] = {
val sqlVisitor = new SqlVisitor(context, sftNames)
val result = scala.collection.mutable.Map.empty[String, Filter]
// items should have an expression if they can't be parsed as SQL
// we interpret that to mean that they are CQL instead
where.getItems.flatMap(i => Option(i.getExpression)).map(ECQL.toFilter).foreach { filter =>
sqlVisitor.referencedSfts.clear()
val updated = filter.accept(sqlVisitor, null).asInstanceOf[Filter]
require(sqlVisitor.referencedSfts.size == 1, "CQL filters across multiple tables are not supported")
val typeName = sqlVisitor.referencedSfts.head
result.put(typeName, result.get(typeName).map(c => ff.and(updated, c)).getOrElse(updated))
}
result.toMap
}
/**
* Get the attribute names referenced in the query - used to select a subset of attributes from geomesa
*/
def extractAttributeNames(sql: query.Query, cql: Map[String, Filter]): Map[String, Set[String]] = {
val namesFromCql = cql.mapValues(DataUtilities.attributeNames(_).toSet)
val namesFromSql = scala.collection.mutable.Map.empty[String, Set[String]]
// we ignore the 'having' clause as it should always reference something from the select
val selects = sql.getSelectClause.getItems ++
sql.getWhereClause.getEvaluatedSelectItems ++
sql.getGroupByClause.getEvaluatedSelectItems ++
sql.getOrderByClause.getEvaluatedSelectItems
selects.flatMap(s => Option(s.getColumn)).foreach { c =>
val table = c.getTable.getName
namesFromSql.put(table, namesFromSql.get(table).map(_ ++ Set(c.getName)).getOrElse(Set(c.getName)))
}
// combine the two maps
namesFromSql.toMap ++ namesFromCql.map { case (k,v) =>
k -> namesFromSql.get(k).map(_ ++ v.toSet).getOrElse(v.toSet)
}
}
/**
* Converts a simple feature attribute into a SQL data type
*/
private def types(d: AttributeDescriptor): DataType = {
val clas = d.getType.getBinding
if (classOf[jList[_]].isAssignableFrom(clas)) {
val listClass = d.getUserData.get(USER_DATA_LIST_TYPE).asInstanceOf[Class[_]]
DataTypes.createArrayType(types(listClass))
} else if (classOf[jMap[_, _]].isAssignableFrom(clas)) {
val keyClass = d.getUserData.get(USER_DATA_MAP_KEY_TYPE).asInstanceOf[Class[_]]
val valueClass = d.getUserData.get(USER_DATA_MAP_VALUE_TYPE).asInstanceOf[Class[_]]
DataTypes.createMapType(types(keyClass), types(valueClass))
} else {
types(clas)
}
}
/**
* Converts a simple class type into a SQL data type
*/
private def types(clas: Class[_]): DataType = {
if (classOf[java.lang.String].isAssignableFrom(clas)) {
StringType
} else if (classOf[java.lang.Integer].isAssignableFrom(clas)) {
IntegerType
} else if (classOf[java.lang.Long].isAssignableFrom(clas)) {
LongType
} else if (classOf[java.lang.Float].isAssignableFrom(clas)) {
FloatType
} else if (classOf[java.lang.Double].isAssignableFrom(clas)) {
DoubleType
} else if (classOf[java.lang.Boolean].isAssignableFrom(clas)) {
BooleanType
} else if (classOf[java.util.Date].isAssignableFrom(clas)) {
TimestampType
} else if (classOf[UUID].isAssignableFrom(clas)) {
StringType
} else if (classOf[Geometry].isAssignableFrom(clas)) {
StringType
} else {
throw new NotImplementedError(s"Binding $clas is not supported")
}
}
}
class GeoMesaSparkSql(sc: SparkContext, dsParams: Seq[Map[String, String]]) {
// load up our sfts
val sftsByName = dsParams.flatMap { params =>
val ds = DataStoreFinder.getDataStore(params)
require(ds != null, "No data store found using provided parameters")
ds.getTypeNames.map { name =>
val schema = ds.getSchema(name)
name -> (schema, params)
}
}.foldLeft(Map.empty[String, (SimpleFeatureType, Map[String, String])])(_ + _)
private val dataContext = new GeoMesaDataContext(sftsByName.mapValues(_._1))
/**
* Execute a sql query against geomesa. Where clause is interpreted as CQL.
*/
def query(sql: String, splits: Option[Int]): DataFrame = {
val parsedSql = dataContext.parseQuery(sql)
// extract the feature types from the from clause
val typeNames = parsedSql.getFromClause.getItems.map(_.getTable.getName)
val sftsWithParams = typeNames.map(sftsByName.apply)
// extract the cql from the where clause
val where = parsedSql.getWhereClause
val cql = GeoMesaSparkSql.extractCql(where, dataContext, typeNames)
// clear out the cql from the where clause so spark doesn't try to parse it
// if it' a sql expression, the expression field will be null
// otherwise it has the raw expression, which we assume is cql
where.getItems.filter(_.getExpression != null).foreach(where.removeItem)
val sqlWithoutCql = parsedSql.toSql
// restrict the attributes coming back to speed up the query
val attributesByType = GeoMesaSparkSql.extractAttributeNames(parsedSql, cql)
val sqlContext = new SQLContext(sc)
// for each input sft, set up the sql table with the results from querying geomesa with the cql filter
sftsWithParams.foreach { case (sft, params) =>
val typeName = sft.getTypeName
val allAttributes = sft.getAttributeDescriptors.map(_.getLocalName)
val attributes = {
val extracted = attributesByType(typeName).toList
if (extracted.sorted == allAttributes.sorted) {
None // if we've got all attributes, we don't need a transform
} else {
Some(extracted.toArray)
}
}
val filter = cql.getOrElse(typeName, Filter.INCLUDE)
val query = new Query(typeName, filter)
attributes.foreach(query.setPropertyNames)
// generate the sql schema based on the sft/query attributes
val fields = attributes.getOrElse(allAttributes.toArray).map { field =>
StructField(field, GeoMesaSparkSql.types(sft.getDescriptor(field)), nullable = true)
}
val schema = StructType(fields)
// create an rdd from the query
val features = GeoMesaSpark.rdd(new Configuration(), sc, params, query, splits)
// convert records to rows - convert the values to sql-compatible ones
val rowRdd = features.map { f =>
val sqlAttributes = f.getAttributes.map {
case g: Geometry => WKTUtils.write(g) // text
case d: Date => new Timestamp(d.getTime) // sql timestamp
case u: UUID => u.toString // text
case a => a // others should map natively without explict conversion
}
Row(sqlAttributes: _*)
}
// apply the schema to the rdd
val featuresDataFrame = sqlContext.createDataFrame(rowRdd, schema)
// register the data frame as a table, so that it's available to the sql engine
featuresDataFrame.registerTempTable(typeName)
}
// run the sql statement against our registered tables
sqlContext.sql(sqlWithoutCql)
}
}
/**
* Extracts property names from a filter. Names are expected to either be qualified with the
* feature type name (e.g. mysft.myattr), or be unambiguous among the feature types being queried.
*/
class SqlVisitor(context: DataContext, sftNames: Seq[String]) extends DuplicatingFilterVisitor {
val referencedSfts = scala.collection.mutable.Set.empty[String]
override def visit(expression: PropertyName, extraData: AnyRef): AnyRef = {
val name = expression.getPropertyName
require(name != null && !name.isEmpty, "Property name is ambiguous: 'null'")
val parts = name.split("\\\\.|/") // ECQL converts '.' into '/' in properties, so we have to match both
require(parts.length < 3, s"Ambiguous property name in filter: '$name")
if (parts.length == 2) {
// qualified by sft name
val matching = sftNames.filter(_ == parts.head)
require(matching.nonEmpty, s"Property name does not match a table in from clause: '$name")
referencedSfts.add(matching.head)
getFactory(extraData).property(parts(1), expression.getNamespaceContext)
} else {
// not qualified - see if it unambiguously matches any of the tables
val matching = sftNames.map(context.getTableByQualifiedLabel).flatMap(_.getColumns.find(_.getName == name))
require(matching.nonEmpty, s"Property name does not match a table in from clause: '$name")
require(matching.length == 1, s"Property name is ambiguous: '$name'")
referencedSfts.add(matching.head.getTable.getName)
expression
}
}
}
|
setumaven/geomesa
|
geomesa-compute/src/main/scala/org/locationtech/geomesa/compute/spark/sql/GeoMesaSparkSql.scala
|
Scala
|
apache-2.0
| 13,765 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.connector.{InMemoryPartitionTableCatalog, SchemaRequiredDataSource}
import org.apache.spark.sql.execution.datasources.LogicalRelation
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.SimpleInsertSource
import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils}
import org.apache.spark.sql.types._
// The base trait for char/varchar tests that need to be run with different table implementations.
trait CharVarcharTestSuite extends QueryTest with SQLTestUtils {
def format: String
def checkColType(f: StructField, dt: DataType): Unit = {
assert(f.dataType == CharVarcharUtils.replaceCharVarcharWithString(dt))
assert(CharVarcharUtils.getRawType(f.metadata) == Some(dt))
}
def checkPlainResult(df: DataFrame, dt: String, insertVal: String): Unit = {
val dataType = CatalystSqlParser.parseDataType(dt)
checkColType(df.schema(1), dataType)
dataType match {
case CharType(len) =>
// char value will be padded if (<= len) or trimmed if (> len)
val fixLenStr = if (insertVal != null) {
insertVal.take(len).padTo(len, " ").mkString
} else null
checkAnswer(df, Row("1", fixLenStr))
case VarcharType(len) =>
// varchar value will be remained if (<= len) or trimmed if (> len)
val varLenStrWithUpperBound = if (insertVal != null) {
insertVal.take(len)
} else null
checkAnswer(df, Row("1", varLenStrWithUpperBound))
}
}
test("apply char padding/trimming and varchar trimming: top-level columns") {
Seq("CHAR(5)", "VARCHAR(5)").foreach { typ =>
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c $typ) USING $format")
(0 to 5).map(n => "a" + " " * n).foreach { v =>
sql(s"INSERT OVERWRITE t VALUES ('1', '$v')")
checkPlainResult(spark.table("t"), typ, v)
}
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkPlainResult(spark.table("t"), typ, null)
}
}
}
test("char type values should be padded or trimmed: partitioned columns") {
// via dynamic partitioned columns
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c CHAR(5)) USING $format PARTITIONED BY (c)")
(0 to 5).map(n => "a" + " " * n).foreach { v =>
sql(s"INSERT OVERWRITE t VALUES ('1', '$v')")
checkPlainResult(spark.table("t"), "CHAR(5)", v)
}
}
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c CHAR(5)) USING $format PARTITIONED BY (c)")
(0 to 5).map(n => "a" + " " * n).foreach { v =>
// via dynamic partitioned columns with drop partition command
sql(s"INSERT INTO t VALUES ('1', '$v')")
checkPlainResult(spark.table("t"), "CHAR(5)", v)
sql(s"ALTER TABLE t DROP PARTITION(c='a')")
checkAnswer(spark.table("t"), Nil)
// via static partitioned columns with drop partition command
sql(s"INSERT INTO t PARTITION (c ='$v') VALUES ('1')")
checkPlainResult(spark.table("t"), "CHAR(5)", v)
sql(s"ALTER TABLE t DROP PARTITION(c='a')")
checkAnswer(spark.table("t"), Nil)
}
}
}
test("varchar type values length check and trim: partitioned columns") {
(0 to 5).foreach { n =>
// SPARK-34192: we need to create a a new table for each round of test because of
// trailing spaces in partition column will be treated differently.
// This is because Mysql and Derby(used in tests) considers 'a' = 'a '
// whereas others like (Postgres, Oracle) doesn't exhibit this problem.
// see more at:
// https://issues.apache.org/jira/browse/HIVE-13618
// https://issues.apache.org/jira/browse/SPARK-34192
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c VARCHAR(5)) USING $format PARTITIONED BY (c)")
val v = "a" + " " * n
// via dynamic partitioned columns
sql(s"INSERT INTO t VALUES ('1', '$v')")
checkPlainResult(spark.table("t"), "VARCHAR(5)", v)
sql(s"ALTER TABLE t DROP PARTITION(c='$v')")
checkAnswer(spark.table("t"), Nil)
// via static partitioned columns
sql(s"INSERT INTO t PARTITION (c='$v') VALUES ('1')")
checkPlainResult(spark.table("t"), "VARCHAR(5)", v)
sql(s"ALTER TABLE t DROP PARTITION(c='$v')")
checkAnswer(spark.table("t"), Nil)
}
}
}
test("oversize char/varchar values for alter table partition operations") {
Seq("CHAR(5)", "VARCHAR(5)").foreach { typ =>
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c $typ) USING $format PARTITIONED BY (c)")
Seq("ADD", "DROP").foreach { op =>
val e = intercept[RuntimeException](sql(s"ALTER TABLE t $op PARTITION(c='abcdef')"))
assert(e.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
}
val e1 = intercept[RuntimeException] {
sql(s"ALTER TABLE t PARTITION (c='abcdef') RENAME TO PARTITION (c='2')")
}
assert(e1.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
val e2 = intercept[RuntimeException] {
sql(s"ALTER TABLE t PARTITION (c='1') RENAME TO PARTITION (c='abcdef')")
}
assert(e2.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
}
}
}
test("SPARK-34233: char/varchar with null value for partitioned columns") {
Seq("CHAR(5)", "VARCHAR(5)").foreach { typ =>
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c $typ) USING $format PARTITIONED BY (c)")
sql("INSERT INTO t VALUES ('1', null)")
checkPlainResult(spark.table("t"), typ, null)
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkPlainResult(spark.table("t"), typ, null)
sql("INSERT OVERWRITE t PARTITION (c=null) VALUES ('1')")
checkPlainResult(spark.table("t"), typ, null)
sql("ALTER TABLE t DROP PARTITION(c=null)")
checkAnswer(spark.table("t"), Nil)
}
}
}
test("char/varchar type values length check: partitioned columns of other types") {
Seq("CHAR(5)", "VARCHAR(5)").foreach { typ =>
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c $typ) USING $format PARTITIONED BY (c)")
Seq(1, 10, 100, 1000, 10000).foreach { v =>
sql(s"INSERT OVERWRITE t VALUES ('1', $v)")
checkPlainResult(spark.table("t"), typ, v.toString)
sql(s"ALTER TABLE t DROP PARTITION(c=$v)")
checkAnswer(spark.table("t"), Nil)
}
val e1 = intercept[SparkException](sql(s"INSERT OVERWRITE t VALUES ('1', 100000)"))
assert(e1.getCause.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
val e2 = intercept[RuntimeException](sql("ALTER TABLE t DROP PARTITION(c=100000)"))
assert(e2.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
}
}
}
test("char type values should be padded: nested in struct") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c STRUCT<c: CHAR(5)>) USING $format")
sql("INSERT INTO t VALUES ('1', struct('a'))")
checkAnswer(spark.table("t"), Row("1", Row("a" + " " * 4)))
checkColType(spark.table("t").schema(1), new StructType().add("c", CharType(5)))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', struct(null))")
checkAnswer(spark.table("t"), Row("1", Row(null)))
}
}
test("char type values should be padded: nested in array") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c ARRAY<CHAR(5)>) USING $format")
sql("INSERT INTO t VALUES ('1', array('a', 'ab'))")
checkAnswer(spark.table("t"), Row("1", Seq("a" + " " * 4, "ab" + " " * 3)))
checkColType(spark.table("t").schema(1), ArrayType(CharType(5)))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', array(null))")
checkAnswer(spark.table("t"), Row("1", Seq(null)))
}
}
test("char type values should be padded: nested in map key") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c MAP<CHAR(5), STRING>) USING $format")
sql("INSERT INTO t VALUES ('1', map('a', 'ab'))")
checkAnswer(spark.table("t"), Row("1", Map(("a" + " " * 4, "ab"))))
checkColType(spark.table("t").schema(1), MapType(CharType(5), StringType))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
}
}
test("char type values should be padded: nested in map value") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c MAP<STRING, CHAR(5)>) USING $format")
sql("INSERT INTO t VALUES ('1', map('a', 'ab'))")
checkAnswer(spark.table("t"), Row("1", Map(("a", "ab" + " " * 3))))
checkColType(spark.table("t").schema(1), MapType(StringType, CharType(5)))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', map('a', null))")
checkAnswer(spark.table("t"), Row("1", Map("a" -> null)))
}
}
test("char type values should be padded: nested in both map key and value") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c MAP<CHAR(5), CHAR(10)>) USING $format")
sql("INSERT INTO t VALUES ('1', map('a', 'ab'))")
checkAnswer(spark.table("t"), Row("1", Map(("a" + " " * 4, "ab" + " " * 8))))
checkColType(spark.table("t").schema(1), MapType(CharType(5), CharType(10)))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
}
}
test("char type values should be padded: nested in struct of array") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c STRUCT<c: ARRAY<CHAR(5)>>) USING $format")
sql("INSERT INTO t VALUES ('1', struct(array('a', 'ab')))")
checkAnswer(spark.table("t"), Row("1", Row(Seq("a" + " " * 4, "ab" + " " * 3))))
checkColType(spark.table("t").schema(1),
new StructType().add("c", ArrayType(CharType(5))))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', struct(null))")
checkAnswer(spark.table("t"), Row("1", Row(null)))
sql("INSERT OVERWRITE t VALUES ('1', struct(array(null)))")
checkAnswer(spark.table("t"), Row("1", Row(Seq(null))))
}
}
test("char type values should be padded: nested in array of struct") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c ARRAY<STRUCT<c: CHAR(5)>>) USING $format")
sql("INSERT INTO t VALUES ('1', array(struct('a'), struct('ab')))")
checkAnswer(spark.table("t"), Row("1", Seq(Row("a" + " " * 4), Row("ab" + " " * 3))))
checkColType(spark.table("t").schema(1),
ArrayType(new StructType().add("c", CharType(5))))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', array(null))")
checkAnswer(spark.table("t"), Row("1", Seq(null)))
sql("INSERT OVERWRITE t VALUES ('1', array(struct(null)))")
checkAnswer(spark.table("t"), Row("1", Seq(Row(null))))
}
}
test("char type values should be padded: nested in array of array") {
withTable("t") {
sql(s"CREATE TABLE t(i STRING, c ARRAY<ARRAY<CHAR(5)>>) USING $format")
sql("INSERT INTO t VALUES ('1', array(array('a', 'ab')))")
checkAnswer(spark.table("t"), Row("1", Seq(Seq("a" + " " * 4, "ab" + " " * 3))))
checkColType(spark.table("t").schema(1), ArrayType(ArrayType(CharType(5))))
sql("INSERT OVERWRITE t VALUES ('1', null)")
checkAnswer(spark.table("t"), Row("1", null))
sql("INSERT OVERWRITE t VALUES ('1', array(null))")
checkAnswer(spark.table("t"), Row("1", Seq(null)))
sql("INSERT OVERWRITE t VALUES ('1', array(array(null)))")
checkAnswer(spark.table("t"), Row("1", Seq(Seq(null))))
}
}
private def testTableWrite(f: String => Unit): Unit = {
withTable("t") { f("char") }
withTable("t") { f("varchar") }
}
test("length check for input string values: top-level columns") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c $typeName(5)) USING $format")
sql("INSERT INTO t VALUES (null)")
checkAnswer(spark.table("t"), Row(null))
val e = intercept[SparkException](sql("INSERT INTO t VALUES ('123456')"))
assert(e.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: partitioned columns") {
// DS V2 doesn't support partitioned table.
if (!conf.contains(SQLConf.DEFAULT_CATALOG.key)) {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(i INT, c $typeName(5)) USING $format PARTITIONED BY (c)")
sql("INSERT INTO t VALUES (1, null)")
checkAnswer(spark.table("t"), Row(1, null))
val e = intercept[SparkException](sql("INSERT INTO t VALUES (1, '123456')"))
assert(e.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
}
test("length check for input string values: nested in struct") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c STRUCT<c: $typeName(5)>) USING $format")
sql("INSERT INTO t SELECT struct(null)")
checkAnswer(spark.table("t"), Row(Row(null)))
val e = intercept[SparkException](sql("INSERT INTO t SELECT struct('123456')"))
assert(e.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in array") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c ARRAY<$typeName(5)>) USING $format")
sql("INSERT INTO t VALUES (array(null))")
checkAnswer(spark.table("t"), Row(Seq(null)))
val e = intercept[SparkException](sql("INSERT INTO t VALUES (array('a', '123456'))"))
assert(e.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in map key") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c MAP<$typeName(5), STRING>) USING $format")
val e = intercept[SparkException](sql("INSERT INTO t VALUES (map('123456', 'a'))"))
assert(e.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in map value") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c MAP<STRING, $typeName(5)>) USING $format")
sql("INSERT INTO t VALUES (map('a', null))")
checkAnswer(spark.table("t"), Row(Map("a" -> null)))
val e = intercept[SparkException](sql("INSERT INTO t VALUES (map('a', '123456'))"))
assert(e.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in both map key and value") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c MAP<$typeName(5), $typeName(5)>) USING $format")
val e1 = intercept[SparkException](sql("INSERT INTO t VALUES (map('123456', 'a'))"))
assert(e1.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
val e2 = intercept[SparkException](sql("INSERT INTO t VALUES (map('a', '123456'))"))
assert(e2.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in struct of array") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c STRUCT<c: ARRAY<$typeName(5)>>) USING $format")
sql("INSERT INTO t SELECT struct(array(null))")
checkAnswer(spark.table("t"), Row(Row(Seq(null))))
val e = intercept[SparkException](sql("INSERT INTO t SELECT struct(array('123456'))"))
assert(e.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in array of struct") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c ARRAY<STRUCT<c: $typeName(5)>>) USING $format")
sql("INSERT INTO t VALUES (array(struct(null)))")
checkAnswer(spark.table("t"), Row(Seq(Row(null))))
val e = intercept[SparkException](sql("INSERT INTO t VALUES (array(struct('123456')))"))
assert(e.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: nested in array of array") {
testTableWrite { typeName =>
sql(s"CREATE TABLE t(c ARRAY<ARRAY<$typeName(5)>>) USING $format")
sql("INSERT INTO t VALUES (array(array(null)))")
checkAnswer(spark.table("t"), Row(Seq(Seq(null))))
val e = intercept[SparkException](sql("INSERT INTO t VALUES (array(array('123456')))"))
assert(e.getCause.getMessage.contains(s"Exceeds char/varchar type length limitation: 5"))
}
}
test("length check for input string values: with trailing spaces") {
withTable("t") {
sql(s"CREATE TABLE t(c1 CHAR(5), c2 VARCHAR(5)) USING $format")
sql("INSERT INTO t VALUES ('12 ', '12 ')")
sql("INSERT INTO t VALUES ('1234 ', '1234 ')")
checkAnswer(spark.table("t"), Seq(
Row("12" + " " * 3, "12 "),
Row("1234 ", "1234 ")))
}
}
test("length check for input string values: with implicit cast") {
withTable("t") {
sql(s"CREATE TABLE t(c1 CHAR(5), c2 VARCHAR(5)) USING $format")
sql("INSERT INTO t VALUES (1234, 1234)")
checkAnswer(spark.table("t"), Row("1234 ", "1234"))
val e1 = intercept[SparkException](sql("INSERT INTO t VALUES (123456, 1)"))
assert(e1.getCause.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
val e2 = intercept[SparkException](sql("INSERT INTO t VALUES (1, 123456)"))
assert(e2.getCause.getMessage.contains("Exceeds char/varchar type length limitation: 5"))
}
}
private def testConditions(df: DataFrame, conditions: Seq[(String, Boolean)]): Unit = {
checkAnswer(df.selectExpr(conditions.map(_._1): _*), Row.fromSeq(conditions.map(_._2)))
}
test("char type comparison: top-level columns") {
withTable("t") {
sql(s"CREATE TABLE t(c1 CHAR(2), c2 CHAR(5)) USING $format")
sql("INSERT INTO t VALUES ('a', 'a')")
testConditions(spark.table("t"), Seq(
("c1 = 'a'", true),
("'a' = c1", true),
("c1 = 'a '", true),
("c1 > 'a'", false),
("c1 IN ('a', 'b')", true),
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true),
("c1 <=> null", false)))
}
}
test("char type comparison: partitioned columns") {
withTable("t") {
sql(s"CREATE TABLE t(i INT, c1 CHAR(2), c2 CHAR(5)) USING $format PARTITIONED BY (c1, c2)")
sql("INSERT INTO t VALUES (1, 'a', 'a')")
testConditions(spark.table("t"), Seq(
("c1 = 'a'", true),
("'a' = c1", true),
("c1 = 'a '", true),
("c1 > 'a'", false),
("c1 IN ('a', 'b')", true),
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true),
("c1 <=> null", false)))
}
}
private def testNullConditions(df: DataFrame, conditions: Seq[String]): Unit = {
conditions.foreach { cond =>
checkAnswer(df.selectExpr(cond), Row(null))
}
}
test("SPARK-34233: char type comparison with null values") {
val conditions = Seq("c = null", "c IN ('e', null)", "c IN (null)")
withTable("t") {
sql(s"CREATE TABLE t(c CHAR(2)) USING $format")
sql("INSERT INTO t VALUES ('a')")
testNullConditions(spark.table("t"), conditions)
}
withTable("t") {
sql(s"CREATE TABLE t(i INT, c CHAR(2)) USING $format PARTITIONED BY (c)")
sql("INSERT INTO t VALUES (1, 'a')")
testNullConditions(spark.table("t"), conditions)
}
}
test("char type comparison: partition pruning") {
withTable("t") {
sql(s"CREATE TABLE t(i INT, c1 CHAR(2), c2 VARCHAR(5)) USING $format PARTITIONED BY (c1, c2)")
sql("INSERT INTO t VALUES (1, 'a', 'a')")
Seq(("c1 = 'a'", true),
("'a' = c1", true),
("c1 = 'a '", true),
("c1 > 'a'", false),
("c1 IN ('a', 'b')", true),
("c2 = 'a '", false),
("c2 = 'a'", true),
("c2 IN ('a', 'b')", true)).foreach { case (con, res) =>
val df = spark.table("t")
withClue(con) {
checkAnswer(df.where(con), df.where(res.toString))
}
}
}
}
test("char type comparison: join") {
withTable("t1", "t2") {
sql(s"CREATE TABLE t1(c CHAR(2)) USING $format")
sql(s"CREATE TABLE t2(c CHAR(5)) USING $format")
sql("INSERT INTO t1 VALUES ('a')")
sql("INSERT INTO t2 VALUES ('a')")
checkAnswer(sql("SELECT t1.c FROM t1 JOIN t2 ON t1.c = t2.c"), Row("a "))
}
}
test("char type comparison: nested in struct") {
withTable("t") {
sql(s"CREATE TABLE t(c1 STRUCT<c: CHAR(2)>, c2 STRUCT<c: CHAR(5)>) USING $format")
sql("INSERT INTO t VALUES (struct('a'), struct('a'))")
testConditions(spark.table("t"), Seq(
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true)))
}
}
test("char type comparison: nested in array") {
withTable("t") {
sql(s"CREATE TABLE t(c1 ARRAY<CHAR(2)>, c2 ARRAY<CHAR(5)>) USING $format")
sql("INSERT INTO t VALUES (array('a', 'b'), array('a', 'b'))")
testConditions(spark.table("t"), Seq(
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true)))
}
}
test("char type comparison: nested in struct of array") {
withTable("t") {
sql("CREATE TABLE t(c1 STRUCT<a: ARRAY<CHAR(2)>>, c2 STRUCT<a: ARRAY<CHAR(5)>>) " +
s"USING $format")
sql("INSERT INTO t VALUES (struct(array('a', 'b')), struct(array('a', 'b')))")
testConditions(spark.table("t"), Seq(
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true)))
}
}
test("char type comparison: nested in array of struct") {
withTable("t") {
sql("CREATE TABLE t(c1 ARRAY<STRUCT<c: CHAR(2)>>, c2 ARRAY<STRUCT<c: CHAR(5)>>) " +
s"USING $format")
sql("INSERT INTO t VALUES (array(struct('a')), array(struct('a')))")
testConditions(spark.table("t"), Seq(
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true)))
}
}
test("char type comparison: nested in array of array") {
withTable("t") {
sql("CREATE TABLE t(c1 ARRAY<ARRAY<CHAR(2)>>, c2 ARRAY<ARRAY<CHAR(5)>>) " +
s"USING $format")
sql("INSERT INTO t VALUES (array(array('a')), array(array('a')))")
testConditions(spark.table("t"), Seq(
("c1 = c2", true),
("c1 < c2", false),
("c1 IN (c2)", true)))
}
}
test("SPARK-33892: DESCRIBE TABLE w/ char/varchar") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), c CHAR(5)) USING $format")
checkAnswer(sql("desc t").selectExpr("data_type").where("data_type like '%char%'"),
Seq(Row("char(5)"), Row("varchar(3)")))
}
}
test("SPARK-34003: fix char/varchar fails w/ both group by and order by ") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), i INT) USING $format")
sql("INSERT INTO t VALUES ('c', 1)")
checkAnswer(sql("SELECT v, sum(i) FROM t GROUP BY v ORDER BY v"), Row("c", 1))
}
}
test("SPARK-34003: fix char/varchar fails w/ order by functions") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), i INT) USING $format")
sql("INSERT INTO t VALUES ('c', 1)")
checkAnswer(sql("SELECT substr(v, 1, 2), sum(i) FROM t GROUP BY v ORDER BY substr(v, 1, 2)"),
Row("c", 1))
checkAnswer(sql("SELECT sum(i) FROM t GROUP BY v ORDER BY substr(v, 1, 2)"),
Row(1))
}
}
test("SPARK-34114: varchar type will strip tailing spaces to certain length at write time") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3)) USING $format")
sql("INSERT INTO t VALUES ('c ')")
checkAnswer(spark.table("t"), Row("c "))
}
}
test("SPARK-34114: varchar type will remain the value length with spaces at read time") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3)) USING $format")
sql("INSERT INTO t VALUES ('c ')")
checkAnswer(spark.table("t"), Row("c "))
}
}
test("SPARK-34833: right-padding applied correctly for correlated subqueries - join keys") {
withTable("t1", "t2") {
sql(s"CREATE TABLE t1(v VARCHAR(3), c CHAR(5)) USING $format")
sql(s"CREATE TABLE t2(v VARCHAR(5), c CHAR(8)) USING $format")
sql("INSERT INTO t1 VALUES ('c', 'b')")
sql("INSERT INTO t2 VALUES ('a', 'b')")
Seq("t1.c = t2.c", "t2.c = t1.c",
"t1.c = 'b'", "'b' = t1.c", "t1.c = 'b '", "'b ' = t1.c",
"t1.c = 'b '", "'b ' = t1.c").foreach { predicate =>
checkAnswer(sql(
s"""
|SELECT v FROM t1
|WHERE 'a' IN (SELECT v FROM t2 WHERE $predicate)
""".stripMargin),
Row("c"))
}
}
}
test("SPARK-34833: right-padding applied correctly for correlated subqueries - other preds") {
withTable("t") {
sql(s"CREATE TABLE t(c0 INT, c1 CHAR(5), c2 CHAR(7)) USING $format")
sql("INSERT INTO t VALUES (1, 'abc', 'abc')")
Seq("c1 = 'abc'", "'abc' = c1", "c1 = 'abc '", "'abc ' = c1",
"c1 = 'abc '", "'abc ' = c1", "c1 = c2", "c2 = c1",
"c1 IN ('xxx', 'abc', 'xxxxx')", "c1 IN ('xxx', 'abc ', 'xxxxx')",
"c1 IN ('xxx', 'abc ', 'xxxxx')",
"c1 IN (c2)", "c2 IN (c1)").foreach { predicate =>
checkAnswer(sql(
s"""
|SELECT c0 FROM t t1
|WHERE (
| SELECT count(*) AS c
| FROM t
| WHERE c0 = t1.c0 AND $predicate
|) > 0
""".stripMargin),
Row(1))
}
}
}
}
// Some basic char/varchar tests which doesn't rely on table implementation.
class BasicCharVarcharTestSuite extends QueryTest with SharedSparkSession {
import testImplicits._
test("user-specified schema in cast") {
def assertNoCharType(df: DataFrame): Unit = {
checkAnswer(df, Row("0"))
assert(df.schema.map(_.dataType) == Seq(StringType))
}
val logAppender = new LogAppender("The Spark cast operator does not support char/varchar" +
" type and simply treats them as string type. Please use string type directly to avoid" +
" confusion.")
withLogAppender(logAppender) {
assertNoCharType(spark.range(1).select($"id".cast("char(5)")))
assertNoCharType(spark.range(1).select($"id".cast(CharType(5))))
assertNoCharType(spark.range(1).selectExpr("CAST(id AS CHAR(5))"))
assertNoCharType(sql("SELECT CAST(id AS CHAR(5)) FROM range(1)"))
}
}
def failWithInvalidCharUsage[T](fn: => T): Unit = {
val e = intercept[AnalysisException](fn)
assert(e.getMessage contains "char/varchar type can only be used in the table schema")
}
test("invalidate char/varchar in functions") {
failWithInvalidCharUsage(sql("""SELECT from_json('{"a": "str"}', 'a CHAR(5)')"""))
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
val df = sql("""SELECT from_json('{"a": "str"}', 'a CHAR(5)')""")
checkAnswer(df, Row(Row("str")))
val schema = df.schema.head.dataType.asInstanceOf[StructType]
assert(schema.map(_.dataType) == Seq(StringType))
}
}
test("invalidate char/varchar in SparkSession createDataframe") {
val df = spark.range(10).map(_.toString).toDF()
val schema = new StructType().add("id", CharType(5))
failWithInvalidCharUsage(spark.createDataFrame(df.collectAsList(), schema))
failWithInvalidCharUsage(spark.createDataFrame(df.rdd, schema))
failWithInvalidCharUsage(spark.createDataFrame(df.toJavaRDD, schema))
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
val df1 = spark.createDataFrame(df.collectAsList(), schema)
checkAnswer(df1, df)
assert(df1.schema.head.dataType === StringType)
}
}
test("invalidate char/varchar in spark.read.schema") {
failWithInvalidCharUsage(spark.read.schema(new StructType().add("id", CharType(5))))
failWithInvalidCharUsage(spark.read.schema("id char(5)"))
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
val ds = spark.range(10).map(_.toString)
val df1 = spark.read.schema(new StructType().add("id", CharType(5))).csv(ds)
assert(df1.schema.map(_.dataType) == Seq(StringType))
val df2 = spark.read.schema("id char(5)").csv(ds)
assert(df2.schema.map(_.dataType) == Seq(StringType))
def checkSchema(df: DataFrame): Unit = {
val schemas = df.queryExecution.analyzed.collect {
case l: LogicalRelation => l.relation.schema
case d: DataSourceV2Relation => d.table.schema()
}
assert(schemas.length == 1)
assert(schemas.head.map(_.dataType) == Seq(StringType))
}
// user-specified schema in DataFrameReader: DSV1
checkSchema(spark.read.schema(new StructType().add("id", CharType(5)))
.format(classOf[SimpleInsertSource].getName).load())
checkSchema(spark.read.schema("id char(5)")
.format(classOf[SimpleInsertSource].getName).load())
// user-specified schema in DataFrameReader: DSV2
checkSchema(spark.read.schema(new StructType().add("id", CharType(5)))
.format(classOf[SchemaRequiredDataSource].getName).load())
checkSchema(spark.read.schema("id char(5)")
.format(classOf[SchemaRequiredDataSource].getName).load())
}
}
test("invalidate char/varchar in udf's result type") {
failWithInvalidCharUsage(spark.udf.register("testchar", () => "B", VarcharType(1)))
failWithInvalidCharUsage(spark.udf.register("testchar2", (x: String) => x, VarcharType(1)))
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
spark.udf.register("testchar", () => "B", VarcharType(1))
spark.udf.register("testchar2", (x: String) => x, VarcharType(1))
val df1 = spark.sql("select testchar()")
checkAnswer(df1, Row("B"))
assert(df1.schema.head.dataType === StringType)
val df2 = spark.sql("select testchar2('abc')")
checkAnswer(df2, Row("abc"))
assert(df2.schema.head.dataType === StringType)
}
}
test("invalidate char/varchar in spark.readStream.schema") {
failWithInvalidCharUsage(spark.readStream.schema(new StructType().add("id", CharType(5))))
failWithInvalidCharUsage(spark.readStream.schema("id char(5)"))
withSQLConf((SQLConf.LEGACY_CHAR_VARCHAR_AS_STRING.key, "true")) {
withTempPath { dir =>
spark.range(2).write.save(dir.toString)
val df1 = spark.readStream.schema(new StructType().add("id", CharType(5)))
.load(dir.toString)
assert(df1.schema.map(_.dataType) == Seq(StringType))
val df2 = spark.readStream.schema("id char(5)").load(dir.toString)
assert(df2.schema.map(_.dataType) == Seq(StringType))
}
}
}
}
class FileSourceCharVarcharTestSuite extends CharVarcharTestSuite with SharedSparkSession {
override def format: String = "parquet"
override protected def sparkConf: SparkConf = {
super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "parquet")
}
test("create table w/ location and fit length values") {
Seq("char", "varchar").foreach { typ =>
withTempPath { dir =>
withTable("t") {
sql("SELECT '12' as col").write.format(format).save(dir.toString)
sql(s"CREATE TABLE t (col $typ(2)) using $format LOCATION '$dir'")
val df = sql("select * from t")
checkAnswer(sql("select * from t"), Row("12"))
}
}
}
}
test("create table w/ location and over length values") {
Seq("char", "varchar").foreach { typ =>
withTempPath { dir =>
withTable("t") {
sql("SELECT '123456' as col").write.format(format).save(dir.toString)
sql(s"CREATE TABLE t (col $typ(2)) using $format LOCATION '$dir'")
checkAnswer(sql("select * from t"), Row("123456"))
}
}
}
}
test("alter table set location w/ fit length values") {
Seq("char", "varchar").foreach { typ =>
withTempPath { dir =>
withTable("t") {
sql("SELECT '12' as col").write.format(format).save(dir.toString)
sql(s"CREATE TABLE t (col $typ(2)) using $format")
sql(s"ALTER TABLE t SET LOCATION '$dir'")
checkAnswer(spark.table("t"), Row("12"))
}
}
}
}
test("alter table set location w/ over length values") {
Seq("char", "varchar").foreach { typ =>
withTempPath { dir =>
withTable("t") {
sql("SELECT '123456' as col").write.format(format).save(dir.toString)
sql(s"CREATE TABLE t (col $typ(2)) using $format")
sql(s"ALTER TABLE t SET LOCATION '$dir'")
checkAnswer(spark.table("t"), Row("123456"))
}
}
}
}
// TODO(SPARK-33875): Move these tests to super after DESCRIBE COLUMN v2 implemented
test("SPARK-33892: DESCRIBE COLUMN w/ char/varchar") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), c CHAR(5)) USING $format")
checkAnswer(sql("desc t v").selectExpr("info_value").where("info_value like '%char%'"),
Row("varchar(3)"))
checkAnswer(sql("desc t c").selectExpr("info_value").where("info_value like '%char%'"),
Row("char(5)"))
}
}
// TODO(SPARK-33898): Move these tests to super after SHOW CREATE TABLE for v2 implemented
test("SPARK-33892: SHOW CREATE TABLE w/ char/varchar") {
withTable("t") {
sql(s"CREATE TABLE t(v VARCHAR(3), c CHAR(5)) USING $format")
val rest = sql("SHOW CREATE TABLE t").head().getString(0)
assert(rest.contains("VARCHAR(3)"))
assert(rest.contains("CHAR(5)"))
}
}
test("SPARK-34114: should not trim right for read-side length check and char padding") {
Seq("char", "varchar").foreach { typ =>
withTempPath { dir =>
withTable("t") {
sql("SELECT '12 ' as col").write.format(format).save(dir.toString)
sql(s"CREATE TABLE t (col $typ(2)) using $format LOCATION '$dir'")
checkAnswer(spark.table("t"), Row("12 "))
}
}
}
}
}
class DSV2CharVarcharTestSuite extends CharVarcharTestSuite
with SharedSparkSession {
override def format: String = "foo"
protected override def sparkConf = {
super.sparkConf
.set("spark.sql.catalog.testcat", classOf[InMemoryPartitionTableCatalog].getName)
.set(SQLConf.DEFAULT_CATALOG.key, "testcat")
}
}
|
BryanCutler/spark
|
sql/core/src/test/scala/org/apache/spark/sql/CharVarcharTestSuite.scala
|
Scala
|
apache-2.0
| 36,053 |
package net.chwthewke.scala.protobuf
import scalaz.ReaderWriterState
package object plugin {
import interface._
type IndexedSeq[+X] = scala.collection.immutable.IndexedSeq[X]
type Process[X] = ReaderWriterState[CodeGeneratorRequest, Vector[String], Unit, X]
type ProcessW[W, X] = ReaderWriterState[CodeGeneratorRequest, W, Unit, X]
object syntax extends ProcessSyntax with MessageContainerSyntax
val process: syntax.Process.type = syntax.Process
}
|
chwthewke/scala-protobuf
|
scala-protobuf-plugin-core/src/main/scala/net/chwthewke/scala/protobuf/plugin/package.scala
|
Scala
|
apache-2.0
| 467 |
/* Copyright (c) 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.xml.combinators
import java.io.{InputStream, InputStreamReader, FileInputStream}
import scala.xml.{Node, Elem, MetaData, NamespaceBinding, Text, ProcInstr,
Comment, TopScope, Null, XML, parsing, EntityRef, Utility, Atom}
import scala.io.Source
/**
* This class encapsulate the state carried around
* when pickling or unpickling XML. This is an immutable data structure.
* Speaking from the point of view of unpickling, the store consists of a
* set of attributes not yet consumed, a set of nodes not yet consumed and
* a set of namespace bindings encountered so far.
*
* @author Iulian Dragos ([email protected])
*/
class LinearStore(ats: MetaData, nods: List[Node], bindings: NamespaceBinding)
extends XmlInputStore {
def attrs = ats
def nodes = nods
def ns = bindings
var skipNonElements = true
/**
* Set whitespace handling when looking for elements. Defaults to skipping whitespace,
* comments and processing instructions.
*/
def setSkipNonElements(v: Boolean): this.type = {
skipNonElements = v
this
}
/**
* Skips whitespace from the list of nodes. Whitespace is considered to be: empty (only
* space) text nodes, comments and processing instructions.
*/
private def doSkipWhitespace: List[Node] = {
def isWhiteSpace(n: Node): Boolean = n match {
case Text(str) => str.trim.isEmpty
case ProcInstr(_, _) | Comment(_) => true
case _ => false
}
if (!skipNonElements) nodes
else {
var n = nodes
while (n != Nil && isWhiteSpace(n.head)) {
n = n.tail
}
n
}
}
/**
* Accept the given element, or fail. Succeeds when the given element is the head of the node
* list. Comments, processing instructions and white space are skipped if 'skipsWhitespace' is
* set (default).
*/
def acceptElem(Label: String, uri: String): (Option[Node], XmlInputStore) = {
val n = doSkipWhitespace
if (n.isEmpty)
(None, this)
else
n.head match {
case e @ Elem(_, Label, _, scope, _*) if (e.namespace == uri) =>
(Some(e), mkState(attrs, n.tail, ns))
case _ => (None, this)
}
}
/**
* Accept the given prefixed attribute, or fail. Succeeds when the given attribute exists
* (order does not matter). Returns a Seq[Node], since attributes may contain text nodes
* interspersed with entity references.
*/
def acceptAttr(label: String, uri: String): (Option[Seq[Node]], XmlInputStore) = {
if (attrs.isEmpty)
(None, this)
else
attrs(uri, ns, label) match {
case null => (None, this)
case contents =>
(Some(contents), mkState(attrs.remove(uri, ns, label), nodes, ns))
}
}
/**
* Accept the given unprefixed attribute, or fail. Succeeds when the given attribute exists
* (order does not matter). Returns a Seq[Node], since attributes may contain text nodes
* interspersed with entity references.
*/
def acceptAttr(label: String): (Option[Seq[Node]], XmlInputStore) = {
if (attrs.isEmpty)
(None, this)
else
attrs(label) match {
case null => (None, this)
case contents =>
(Some(contents), mkState(attrs.remove(label), nodes, ns))
}
}
/** Accept a text node. Fails if the head of the node list is not a text node. */
def acceptText: (Option[Text], XmlInputStore) = {
if (nodes.isEmpty)
(Some(Text("")), this)
else
nodes.head match {
case t: Text => (Some(t), mkState(attrs, nodes.tail, ns))
case _ => (None, this)
}
}
protected def mkState(attrs: MetaData, nodes: Seq[Node], ns: NamespaceBinding, level: Int) =
LinearStore(attrs, nodes, ns).setSkipNonElements(true)
override def toString =
"LinearStore(" + attrs + ", " + nodes.mkString("", ",", "") + ", " + ns + ")"
/** Return a text node out of the sequence of nodes (which might contain entity references). */
private def unescapeText(ns: Seq[Node]) = {
def unescape(sb: StringBuilder, ns: Seq[Node]): StringBuilder = ns match {
case Seq(Text(txt), nss @ _*) =>
sb.append(txt)
unescape(sb, nss)
case Seq(EntityRef(entName), nss @ _*) =>
Utility.unescape(entName, sb)
unescape(sb, nss)
case Seq(a: Atom[_], nss @ _*) =>
sb.append(a.text)
unescape(sb, nss)
case _ =>
sb
}
unescape(new StringBuilder, ns).toString
}
}
/**
* Convenience object for creating LinearStores
*
* @author Iulian Dragos
*/
object LinearStore {
/** Return an empty pickler state. */
def empty: LinearStore =
empty(TopScope)
/** Return an empty pickler state with a given namespace scope. */
def empty(ns: NamespaceBinding) =
LinearStore(Null, Nil, ns)
/** Create a LinearStore with the given state.*/
def apply(attrs: MetaData, nodes: Seq[Node], ns: NamespaceBinding) =
new LinearStore(attrs, nodes.toList, ns)
def apply(store: XmlStore): XmlInputStore =
apply(store.attrs, store.nodes, store.ns)
/** Create a LinearStore from an element. */
def fromElem(e: Elem) =
LinearStore(e.attributes, List(e), TopScope)
/** Create a LinearStore from the given InputStream. */
def fromInputStream(in: InputStream) = {
val e = XML.load(in)
fromElem(e)
}
/** Create a LinearStore from the given filename. */
def fromFile(f: String) = {
fromInputStream(new FileInputStream(f))
}
/** Create a LinearStore for the contents of the given element. */
def enterElem(e: Elem) =
LinearStore(e.attributes, e.child.toList, e.scope)
}
|
jeppenejsum/gdata-scala-client
|
src/com/google/xml/combinators/LinearStore.scala
|
Scala
|
apache-2.0
| 6,292 |
package builder.api_json
import builder.JsonUtil
import core.{ServiceFetcher, Util}
import lib.{Primitives, Text}
import play.api.libs.json._
/**
* Just parses json with minimal validation - build to provide a way to
* generate meaningful validation messages back to user. Basic flow
*
* JSON => InternalService => Service
*
*/
private[api_json] case class InternalServiceForm(
json: JsValue,
fetcher: ServiceFetcher
) {
lazy val apidoc = (json \ "apidoc").asOpt[JsValue].map { InternalApidocForm(_) }
lazy val name = JsonUtil.asOptString(json \ "name")
lazy val key = JsonUtil.asOptString(json \ "key")
lazy val namespace = JsonUtil.asOptString(json \ "namespace")
lazy val baseUrl = JsonUtil.asOptString(json \ "base_url")
lazy val basePath = JsonUtil.asOptString(json \ "base_path")
lazy val description = JsonUtil.asOptString(json \ "description")
lazy val info = (json \ "info").asOpt[JsValue].map { InternalInfoForm(_) }
lazy val imports: Seq[InternalImportForm] = {
(json \ "imports").asOpt[JsArray] match {
case None => Seq.empty
case Some(values) => {
values.value.flatMap { _.asOpt[JsObject].map { InternalImportForm(_) } }
}
}
}
lazy val unions: Seq[InternalUnionForm] = {
(json \ "unions").asOpt[JsValue] match {
case Some(unions: JsObject) => {
unions.fields.flatMap { v =>
v match {
case(key, value) => value.asOpt[JsObject].map(InternalUnionForm(key, _))
}
}
}
case _ => Seq.empty
}
}
lazy val models: Seq[InternalModelForm] = {
(json \ "models").asOpt[JsValue] match {
case Some(models: JsObject) => {
models.fields.flatMap { v =>
v match {
case(key, value) => value.asOpt[JsObject].map(InternalModelForm(key, _))
}
}
}
case _ => Seq.empty
}
}
lazy val enums: Seq[InternalEnumForm] = {
(json \ "enums").asOpt[JsValue] match {
case Some(enums: JsObject) => {
enums.fields.flatMap { v =>
v match {
case(key, value) => value.asOpt[JsObject].map(InternalEnumForm(key, _))
}
}
}
case _ => Seq.empty
}
}
lazy val headers: Seq[InternalHeaderForm] = InternalHeaderForm(json)
lazy val resources: Seq[InternalResourceForm] = {
(json \ "resources").asOpt[JsValue] match {
case None => Seq.empty
case Some(resources: JsObject) => {
resources.fields.flatMap { v =>
v match {
case(typeName, value) => {
value.asOpt[JsObject].map(InternalResourceForm(typeName, models, enums, unions, _))
}
}
}
}
case _ => Seq.empty
}
}
lazy val attributes: Seq[InternalAttributeForm] = InternalAttributeForm.attributesFromJson((json \ "attributes").asOpt[JsArray])
lazy val typeResolver = TypeResolver(
defaultNamespace = namespace,
RecursiveTypesProvider(this)
)
}
case class InternalImportForm(
uri: Option[String],
warnings: Seq[String]
)
case class InternalApidocForm(
version: Option[String]
)
case class InternalInfoForm(
contact: Option[InternalInfoContactForm],
license: Option[InternalInfoLicenseForm],
warnings: Seq[String]
)
case class InternalInfoContactForm(
name: Option[String],
email: Option[String],
url: Option[String]
)
case class InternalInfoLicenseForm(
name: Option[String],
url: Option[String]
)
case class InternalDeprecationForm(
description: Option[String]
)
case class InternalModelForm(
name: String,
plural: String,
description: Option[String],
deprecation: Option[InternalDeprecationForm],
fields: Seq[InternalFieldForm],
attributes: Seq[InternalAttributeForm],
warnings: Seq[String]
)
case class InternalEnumForm(
name: String,
plural: String,
description: Option[String],
deprecation: Option[InternalDeprecationForm],
values: Seq[InternalEnumValueForm],
attributes: Seq[InternalAttributeForm],
warnings: Seq[String]
)
case class InternalEnumValueForm(
name: Option[String],
description: Option[String],
deprecation: Option[InternalDeprecationForm],
attributes: Seq[InternalAttributeForm],
warnings: Seq[String]
)
case class InternalUnionForm(
name: String,
plural: String,
discriminator: Option[String],
description: Option[String],
deprecation: Option[InternalDeprecationForm],
types: Seq[InternalUnionTypeForm],
attributes: Seq[InternalAttributeForm],
warnings: Seq[String]
)
case class InternalUnionTypeForm(
datatype: Option[InternalDatatype] = None,
description: Option[String],
deprecation: Option[InternalDeprecationForm],
attributes: Seq[InternalAttributeForm],
warnings: Seq[String]
)
case class InternalHeaderForm(
name: Option[String],
datatype: Option[InternalDatatype],
required: Boolean,
description: Option[String],
deprecation: Option[InternalDeprecationForm],
default: Option[String],
attributes: Seq[InternalAttributeForm],
warnings: Seq[String]
)
case class InternalResourceForm(
datatype: InternalDatatype,
description: Option[String],
deprecation: Option[InternalDeprecationForm],
path: Option[String],
operations: Seq[InternalOperationForm],
attributes: Seq[InternalAttributeForm],
warnings: Seq[String] = Seq.empty
)
case class InternalOperationForm(
method: Option[String],
path: Option[String],
description: Option[String],
deprecation: Option[InternalDeprecationForm],
namedPathParameters: Seq[String],
parameters: Seq[InternalParameterForm],
body: Option[InternalBodyForm],
responses: Seq[InternalResponseForm],
attributes: Seq[InternalAttributeForm],
warnings: Seq[String] = Seq.empty
) {
lazy val label = "%s %s".format(method.getOrElse(""), path).trim
}
case class InternalFieldForm(
name: Option[String] = None,
datatype: Option[InternalDatatype] = None,
description: Option[String] = None,
deprecation: Option[InternalDeprecationForm],
required: Boolean = true,
default: Option[String] = None,
example: Option[String] = None,
minimum: Option[Long] = None,
maximum: Option[Long] = None,
attributes: Seq[InternalAttributeForm],
warnings: Seq[String] = Seq.empty
)
case class InternalAttributeForm(
name: Option[String] = None,
value: Option[JsObject] = None,
description: Option[String] = None,
deprecation: Option[InternalDeprecationForm],
warnings: Seq[String] = Seq.empty
)
case class InternalParameterForm(
name: Option[String] = None,
datatype: Option[InternalDatatype] = None,
location: Option[String] = None,
description: Option[String] = None,
deprecation: Option[InternalDeprecationForm],
required: Boolean,
default: Option[String] = None,
example: Option[String] = None,
minimum: Option[Long] = None,
maximum: Option[Long] = None,
warnings: Seq[String] = Seq.empty
)
case class InternalBodyForm(
datatype: Option[InternalDatatype] = None,
description: Option[String] = None,
deprecation: Option[InternalDeprecationForm],
attributes: Seq[InternalAttributeForm],
warnings: Seq[String]
)
case class InternalResponseForm(
code: String,
datatype: Option[InternalDatatype] = None,
headers: Seq[InternalHeaderForm] = Nil,
description: Option[String] = None,
deprecation: Option[InternalDeprecationForm] = None,
warnings: Seq[String] = Seq.empty
) {
lazy val datatypeLabel: Option[String] = datatype.map(_.label)
}
object InternalApidocForm {
def apply(value: JsValue): InternalApidocForm = {
InternalApidocForm(
version = JsonUtil.asOptString(value \ "version")
)
}
}
object InternalInfoForm {
def apply(
value: JsValue
): InternalInfoForm = {
InternalInfoForm(
contact = (value \ "contact").asOpt[JsValue].map { o =>
InternalInfoContactForm(
name = JsonUtil.asOptString(o \ "name"),
email = JsonUtil.asOptString(o \ "email"),
url = JsonUtil.asOptString(o \ "url")
)
},
license = (value \ "license").asOpt[JsValue].map { o =>
InternalInfoLicenseForm(
name = JsonUtil.asOptString(o \ "name"),
url = JsonUtil.asOptString(o \ "url")
)
},
warnings = JsonUtil.validate(
value,
optionalObjects = Seq("contact", "license"),
optionalStrings = Seq("description")
)
)
}
}
object InternalDeprecationForm {
def apply(value: JsValue): InternalDeprecationForm = {
InternalDeprecationForm(
description = JsonUtil.asOptString(value \ "description")
)
}
def fromJsValue(json: JsValue): Option[InternalDeprecationForm] = {
(json \ "deprecation").asOpt[JsValue].map(InternalDeprecationForm(_))
}
}
object InternalUnionForm {
def apply(name: String, value: JsObject): InternalUnionForm = {
val description = JsonUtil.asOptString(value \ "description")
val types = (value \ "types").asOpt[JsArray] match {
case None => Seq.empty
case Some(a: JsArray) => {
a.value.flatMap { value =>
value.asOpt[JsObject].map { json =>
val typeName = JsonUtil.asOptString(json \ "type").map(InternalDatatype(_))
InternalUnionTypeForm(
datatype = typeName,
description = JsonUtil.asOptString(json \ "description"),
deprecation = InternalDeprecationForm.fromJsValue(json),
attributes = InternalAttributeForm.attributesFromJson((value \ "attributes").asOpt[JsArray]),
warnings = JsonUtil.validate(
json,
strings = Seq("type"),
optionalStrings = Seq("description"),
optionalObjects = Seq("deprecation"),
optionalArraysOfObjects = Seq("attributes"),
prefix = Some(s"Union[$name] type[${typeName.getOrElse("")}]")
)
)
}
}
}
}
InternalUnionForm(
name = name,
plural = JsonUtil.asOptString(value \ "plural").getOrElse( Text.pluralize(name) ),
discriminator = JsonUtil.asOptString(value \ "discriminator"),
description = description,
deprecation = InternalDeprecationForm.fromJsValue(value),
types = types,
attributes = InternalAttributeForm.attributesFromJson((value \ "attributes").asOpt[JsArray]),
warnings = JsonUtil.validate(
value,
optionalStrings = Seq("discriminator", "description", "plural"),
arraysOfObjects = Seq("types"),
optionalObjects = Seq("deprecation"),
optionalArraysOfObjects = Seq("attributes"),
prefix = Some(s"Union[$name]")
)
)
}
}
object InternalImportForm {
def apply(value: JsObject): InternalImportForm = {
InternalImportForm(
uri = JsonUtil.asOptString(value \ "uri"),
warnings = JsonUtil.validate(
value,
strings = Seq("uri"),
prefix = Some("Import")
)
)
}
}
object InternalModelForm {
def apply(name: String, value: JsObject): InternalModelForm = {
val description = JsonUtil.asOptString(value \ "description")
val plural: String = JsonUtil.asOptString(value \ "plural").getOrElse( Text.pluralize(name) )
val fields = (value \ "fields").asOpt[JsArray] match {
case None => Seq.empty
case Some(a: JsArray) => {
a.value.flatMap { _.asOpt[JsObject].map(InternalFieldForm(_)) }
}
}
InternalModelForm(
name = name,
plural = plural,
description = description,
deprecation = InternalDeprecationForm.fromJsValue(value),
fields = fields,
attributes = InternalAttributeForm.attributesFromJson((value \ "attributes").asOpt[JsArray]),
warnings = JsonUtil.validate(
value,
optionalStrings = Seq("description", "plural"),
arraysOfObjects = Seq("fields"),
optionalArraysOfObjects = Seq("attributes"),
optionalObjects = Seq("deprecation"),
prefix = Some(s"Model[$name]")
)
)
}
}
object InternalEnumForm {
def apply(name: String, value: JsObject): InternalEnumForm = {
val description = JsonUtil.asOptString(value \ "description")
val values = (value \ "values").asOpt[JsArray] match {
case None => Seq.empty
case Some(a: JsArray) => {
a.value.flatMap { value =>
value.asOpt[JsObject].map { json =>
val valueName = JsonUtil.asOptString(json \ "name")
InternalEnumValueForm(
name = valueName,
description = JsonUtil.asOptString(json \ "description"),
deprecation = InternalDeprecationForm.fromJsValue(json),
attributes = InternalAttributeForm.attributesFromJson((json \ "attributes").asOpt[JsArray]),
warnings = JsonUtil.validate(
json,
strings = Seq("name"),
optionalStrings = Seq("description"),
optionalObjects = Seq("deprecation"),
optionalArraysOfObjects = Seq("attributes"),
prefix = Some(s"Enum[$name] value[${valueName.getOrElse("")}]")
)
)
}
}
}
}
InternalEnumForm(
name = name,
plural = JsonUtil.asOptString(value \ "plural").getOrElse( Text.pluralize(name) ),
description = description,
deprecation = InternalDeprecationForm.fromJsValue(value),
values = values,
attributes = InternalAttributeForm.attributesFromJson((value \ "attributes").asOpt[JsArray]),
warnings = JsonUtil.validate(
value,
optionalStrings = Seq("name", "description", "plural"),
arraysOfObjects = Seq("values"),
optionalObjects = Seq("deprecation"),
optionalArraysOfObjects = Seq("attributes"),
prefix = Some(s"Enum[$name]")
)
)
}
}
object InternalHeaderForm {
def apply(json: JsValue): Seq[InternalHeaderForm] = {
(json \ "headers").asOpt[JsArray].map(_.value).getOrElse(Seq.empty).flatMap { el =>
el match {
case o: JsObject => {
val datatype = InternalDatatype(o)
val headerName = JsonUtil.asOptString(o \ "name")
Some(
InternalHeaderForm(
name = headerName,
datatype = datatype,
required = datatype.map(_.required).getOrElse(true),
description = JsonUtil.asOptString(o \ "description"),
deprecation = InternalDeprecationForm.fromJsValue(o),
default = JsonUtil.asOptString(o \ "default"),
attributes = InternalAttributeForm.attributesFromJson((o \ "attributes").asOpt[JsArray]),
warnings = JsonUtil.validate(
o,
strings = Seq("name", "type"),
optionalBooleans = Seq("required"),
optionalObjects = Seq("deprecation"),
optionalStrings = Seq("default", "description"),
optionalArraysOfObjects = Seq("attributes"),
prefix = Some(s"Header[${headerName.getOrElse("")}]".trim)
)
)
)
}
case _ => None
}
}
}
}
object InternalResourceForm {
def apply(
typeName: String,
models: Seq[InternalModelForm],
enums: Seq[InternalEnumForm],
unions: Seq[InternalUnionForm],
value: JsObject
): InternalResourceForm = {
val path: Option[String] = (value \ "path").asOpt[JsString] match {
case Some(v) => {
Some(v.value)
}
case None => {
enums.find(e => e.name == typeName) match {
case Some(enum) => Some("/" + enum.plural)
case None => {
models.find(m => m.name == typeName) match {
case Some(model) => Some("/" + model.plural)
case None => {
unions.find(u => u.name == typeName) match {
case Some(union) => Some("/" + union.plural)
case None => None
}
}
}
}
}
}
}
val operations = (value \ "operations").asOpt[JsArray] match {
case None => Seq.empty
case Some(a: JsArray) => {
a.value.flatMap { _.asOpt[JsObject].map(InternalOperationForm(path, _)) }
}
}
InternalResourceForm(
datatype = InternalDatatype(typeName),
description = JsonUtil.asOptString(value \ "description"),
deprecation = InternalDeprecationForm.fromJsValue(value),
path = path,
operations = operations,
attributes = InternalAttributeForm.attributesFromJson((value \ "attributes").asOpt[JsArray]),
warnings = JsonUtil.validate(
value,
optionalStrings = Seq("path", "description"),
optionalObjects = Seq("deprecation"),
arraysOfObjects = Seq("operations"),
optionalArraysOfObjects = Seq("attributes")
)
)
}
}
object InternalOperationForm {
private val NoContentResponse = InternalResponseForm(code = "204", datatype = Some(InternalDatatype("unit")))
def apply(resourcePath: Option[String], json: JsObject): InternalOperationForm = {
val operationPath = JsonUtil.asOptString(json \ "path")
val knownPath = Seq(resourcePath, operationPath).flatten.mkString("/")
val namedPathParameters = Util.namedParametersInPath(knownPath)
val parameters = (json \ "parameters").asOpt[JsArray] match {
case None => Seq.empty
case Some(a: JsArray) => {
a.value.flatMap { _.asOpt[JsObject].map(InternalParameterForm(_)) }
}
}
val responses: Seq[InternalResponseForm] = {
(json \ "responses").asOpt[JsObject] match {
case None => {
Seq(NoContentResponse)
}
case Some(responses: JsObject) => {
responses.fields.map {
case(code, value) => {
value match {
case o: JsObject => {
InternalResponseForm(code, o)
}
case other => {
InternalResponseForm(
code = code,
warnings = Seq("value must be an object")
)
}
}
}
}
}
}
}
val body = (json \ "body").asOpt[JsObject].map { o =>
InternalBodyForm(
datatype = JsonUtil.asOptString(o \ "type").map(InternalDatatype(_)),
description = JsonUtil.asOptString(o \ "description"),
deprecation = InternalDeprecationForm.fromJsValue(o),
attributes = InternalAttributeForm.attributesFromJson((o \ "attributes").asOpt[JsArray]),
warnings = JsonUtil.validate(
o,
strings = Seq("type"),
optionalStrings = Seq("description"),
optionalObjects = Seq("deprecation"),
optionalArraysOfObjects = Seq("attributes")
)
)
}
InternalOperationForm(
method = JsonUtil.asOptString(json \ "method").map(_.toUpperCase),
path = operationPath,
body = body,
description = JsonUtil.asOptString(json \ "description"),
deprecation = InternalDeprecationForm.fromJsValue(json),
responses = responses,
namedPathParameters = namedPathParameters,
parameters = parameters,
attributes = InternalAttributeForm.attributesFromJson((json \ "attributes").asOpt[JsArray]),
warnings = JsonUtil.validate(
json,
strings = Seq("method"),
optionalStrings = Seq("description", "path"),
optionalArraysOfObjects = Seq("parameters", "attributes"),
optionalObjects = Seq("body", "responses", "deprecation")
)
)
}
}
object InternalResponseForm {
def apply(code: String, json: JsObject): InternalResponseForm = {
InternalResponseForm(
code = code,
datatype = JsonUtil.asOptString(json \ "type").map(InternalDatatype(_)),
headers = InternalHeaderForm(json),
description = JsonUtil.asOptString(json \ "description"),
deprecation = InternalDeprecationForm.fromJsValue(json),
warnings = JsonUtil.validate(
json,
strings = Seq("type"),
optionalStrings = Seq("description"),
optionalArraysOfObjects = Seq("headers"),
optionalObjects = Seq("deprecation")
)
)
}
}
object InternalFieldForm {
def apply(json: JsObject): InternalFieldForm = {
val warnings = if (JsonUtil.hasKey(json, "enum") || JsonUtil.hasKey(json, "values")) {
Seq("Enumerations are now first class objects and must be defined in an explicit enum section")
} else {
Seq.empty
}
val datatype = InternalDatatype(json)
InternalFieldForm(
name = JsonUtil.asOptString(json \ "name"),
datatype = datatype,
description = JsonUtil.asOptString(json \ "description"),
deprecation = InternalDeprecationForm.fromJsValue(json),
required = datatype.map(_.required).getOrElse(true),
default = JsonUtil.asOptString(json \ "default"),
minimum = JsonUtil.asOptLong(json \ "minimum"),
maximum = JsonUtil.asOptLong(json \ "maximum"),
example = JsonUtil.asOptString(json \ "example"),
attributes = InternalAttributeForm.attributesFromJson((json \ "attributes").asOpt[JsArray]),
warnings = warnings ++ JsonUtil.validate(
json,
strings = Seq("name", "type"),
optionalStrings = Seq("description", "example"),
optionalObjects = Seq("deprecation"),
optionalBooleans = Seq("required"),
optionalNumbers = Seq("minimum", "maximum"),
optionalArraysOfObjects = Seq("attributes"),
optionalAnys = Seq("default")
)
)
}
}
object InternalAttributeForm {
def apply(json: JsObject): InternalAttributeForm = {
InternalAttributeForm (
name = JsonUtil.asOptString(json \ "name"),
value = (json \ "value").asOpt[JsObject],
description = JsonUtil.asOptString(json \ "description"),
deprecation = InternalDeprecationForm.fromJsValue(json),
warnings = JsonUtil.validate(
json,
strings = Seq("name"),
objects = Seq("value"),
optionalStrings = Seq("description")
)
)
}
def attributesFromJson(a: Option[JsArray]): Seq[InternalAttributeForm] = a match {
case None => Seq.empty
case Some(a: JsArray) => {
a.value.flatMap { _.asOpt[JsObject].map(InternalAttributeForm(_)) }
}
}
}
object InternalParameterForm {
def apply(json: JsObject): InternalParameterForm = {
val datatype = InternalDatatype(json)
InternalParameterForm(
name = JsonUtil.asOptString(json \ "name"),
datatype = datatype,
location = JsonUtil.asOptString(json \ "location"),
description = JsonUtil.asOptString(json \ "description"),
deprecation = InternalDeprecationForm.fromJsValue(json),
required = datatype.map(_.required).getOrElse(true),
default = JsonUtil.asOptString(json \ "default"),
minimum = JsonUtil.asOptLong(json \ "minimum"),
maximum = JsonUtil.asOptLong(json \ "maximum"),
example = JsonUtil.asOptString(json \ "example"),
warnings = JsonUtil.validate(
json,
strings = Seq("name", "type"),
optionalStrings = Seq("description", "example", "location"),
optionalObjects = Seq("deprecation"),
optionalBooleans = Seq("required"),
optionalNumbers = Seq("minimum", "maximum"),
optionalAnys = Seq("default")
)
)
}
}
sealed trait InternalDatatype {
def name: String
def required: Boolean
def label: String
protected def makeLabel(prefix: String = "", postfix: String = ""): String = {
prefix + name + postfix
}
}
private[api_json] object InternalDatatype {
case class List(name: String, required: Boolean) extends InternalDatatype {
override def label = makeLabel("[", "]")
}
case class Map(name: String, required: Boolean) extends InternalDatatype {
override def label = makeLabel("map[", "]")
}
case class Singleton(name: String, required: Boolean) extends InternalDatatype {
override def label = makeLabel()
}
private val ListRx = "^\\[(.*)\\]$".r
private val MapRx = "^map\\[(.*)\\]$".r
private val DefaultMapRx = "^map$".r
def apply(value: String): InternalDatatype = {
value match {
case ListRx(name) => InternalDatatype.List(formatName(name), true)
case MapRx(name) => InternalDatatype.Map(formatName(name), true)
case DefaultMapRx() => InternalDatatype.Map(Primitives.String.toString, true)
case _ => InternalDatatype.Singleton(formatName(value), true)
}
}
/**
* Make primitive datatype names case insensitive to user
* input. e.g. accept both 'UUID' and 'uuid' as the uuid type.
*/
private def formatName(name: String): String = {
Primitives(name) match {
case None => name
case Some(p) => p.toString
}
}
def apply(json: JsObject): Option[InternalDatatype] = {
JsonUtil.asOptString(json \ "type").map(InternalDatatype(_)).map { dt =>
JsonUtil.asOptBoolean(json \ "required") match {
case None => {
dt
}
case Some(true) => {
// User explicitly marked this required
dt match {
case InternalDatatype.List(name, _) => InternalDatatype.List(formatName(name), true)
case InternalDatatype.Map(name, _) => InternalDatatype.Map(formatName(name), true)
case InternalDatatype.Singleton(name, _) => InternalDatatype.Singleton(formatName(name), true)
}
}
case Some(false) => {
// User explicitly marked this optional
dt match {
case InternalDatatype.List(name, _) => InternalDatatype.List(formatName(name), false)
case InternalDatatype.Map(name, _) => InternalDatatype.Map(formatName(name), false)
case InternalDatatype.Singleton(name, _) => InternalDatatype.Singleton(formatName(name), false)
}
}
}
}
}
}
|
movio/apidoc
|
core/src/main/scala/core/builder/api_json/InternalServiceForm.scala
|
Scala
|
mit
| 26,037 |
package org.jetbrains.plugins.cbt.runner.internal
import com.intellij.execution.configurations.{ConfigurationFactory, ConfigurationType, RunConfiguration}
import com.intellij.openapi.module.Module
import com.intellij.openapi.project.Project
import org.jetbrains.plugins.cbt.runner.CbtProcessListener
class CbtBuildConfigurationFactory(task: String,
useDirect: Boolean,
module: Module,
options: Seq[String],
typez: ConfigurationType,
listener: CbtProcessListener) extends ConfigurationFactory(typez) {
override def createTemplateConfiguration(project: Project): RunConfiguration =
new CbtBuildConfiguration(task, useDirect, module, options, project, listener, this)
}
|
triplequote/intellij-scala
|
cbt/src/org/jetbrains/plugins/cbt/runner/internal/CbtBuildConfigurationFactory.scala
|
Scala
|
apache-2.0
| 850 |
/**
* Copyright (c) 2015, Cloudera, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.sparkts.stats
import breeze.linalg._
import com.cloudera.sparkts.models.ARModel
import org.apache.commons.math3.random.MersenneTwister
import org.scalatest.FunSuite
class AugmentedDickeyFullerSuite extends FunSuite {
test("non-stationary AR model") {
val rand = new MersenneTwister(10L)
val arModel = new ARModel(0.0, .95)
val sample = arModel.sample(500, rand)
val (adfStat, pValue) = TimeSeriesStatisticalTests.adftest(sample, 1)
assert(!java.lang.Double.isNaN(adfStat))
assert(!java.lang.Double.isNaN(pValue))
println("adfStat: " + adfStat)
println("pValue: " + pValue)
}
test("iid samples") {
val rand = new MersenneTwister(11L)
val iidSample = Array.fill(500)(rand.nextDouble())
val (adfStat, pValue) = TimeSeriesStatisticalTests.adftest(new DenseVector(iidSample), 1)
assert(!java.lang.Double.isNaN(adfStat))
assert(!java.lang.Double.isNaN(pValue))
println("adfStat: " + adfStat)
println("pValue: " + pValue)
}
}
|
SeelozInc/spark-timeseries
|
src/test/scala/com/cloudera/sparkts/stats/AugmentedDickeyFullerSuite.scala
|
Scala
|
apache-2.0
| 1,582 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
import com.intellij.psi.impl.java.stubs.PsiClassStub
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
/**
* @author ilyas
*/
trait ScTemplateDefinitionStub extends PsiClassStub[ScTemplateDefinition] with ScMemberOrLocal {
def javaQualifiedName: String
def isDotty: Boolean
def isPackageObject: Boolean
def isVisibleInJava: Boolean
def isScriptFileClass: Boolean
def isImplicitObject: Boolean
def isImplicitClass: Boolean
def additionalJavaNames: Array[String]
def javaName: String
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/psi/stubs/ScTemplateDefinitionStub.scala
|
Scala
|
apache-2.0
| 624 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.expressions
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.types.Row
import org.apache.flink.api.java.typeutils.RowTypeInfo
import org.apache.flink.table.api.scala._
import org.apache.flink.table.expressions.utils.ExpressionTestBase
import org.junit.{Ignore, Test}
/**
* Tests that can only be checked manually as they are non-deterministic.
*/
class NonDeterministicTests extends ExpressionTestBase {
@Ignore
@Test
def testCurrentDate(): Unit = {
testAllApis(
currentDate(),
"currentDate()",
"CURRENT_DATE",
"PLEASE CHECK MANUALLY")
}
@Ignore
@Test
def testCurrentTime(): Unit = {
testAllApis(
currentTime(),
"currentTime()",
"CURRENT_TIME",
"PLEASE CHECK MANUALLY")
}
@Ignore
@Test
def testCurrentTimestamp(): Unit = {
testAllApis(
currentTimestamp(),
"currentTimestamp()",
"CURRENT_TIMESTAMP",
"PLEASE CHECK MANUALLY")
}
@Ignore
@Test
def testLocalTimestamp(): Unit = {
testAllApis(
localTimestamp(),
"localTimestamp()",
"LOCALTIMESTAMP",
"PLEASE CHECK MANUALLY")
}
@Ignore
@Test
def testLocalTime(): Unit = {
testAllApis(
localTime(),
"localTime()",
"LOCALTIME",
"PLEASE CHECK MANUALLY")
}
@Ignore
@Test
def testUUID(): Unit = {
testAllApis(
uuid(),
"uuid()",
"UUID()",
"PLEASE CHECK MANUALLY")
}
// ----------------------------------------------------------------------------------------------
override def testData: Row = new Row(0)
override def typeInfo: TypeInformation[Any] =
new RowTypeInfo().asInstanceOf[TypeInformation[Any]]
}
|
hequn8128/flink
|
flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/expressions/NonDeterministicTests.scala
|
Scala
|
apache-2.0
| 2,555 |
package controller
import java.io.File
import java.nio.file.{Path, Files, Paths}
import console.model.World
import me.mtrupkin.console.control.{Border, Composite}
import me.mtrupkin.console.controller.ControllerStateMachine
import me.mtrupkin.console.layout.{Pos, Layout, Orientation}
import me.mtrupkin.console.screen.ConsoleKey
import me.mtrupkin.widget.{IndexListWidget, ListWidget}
import model.Saves
/**
* Created by mtrupkin on 11/29/2014.
*/
trait SaveGame { self: ControllerStateMachine =>
class SaveGameController(val world: World) extends ControllerState {
val window = new Composite(name = "window", layoutFlow = Orientation.VERTICAL) {
override def keyPressed(key: ConsoleKey) {
import scala.swing.event.Key._
key.keyValue match {
case Escape => revertState()
case _ => super.keyPressed(key)
}
}
}
val listWidget = new IndexListWidget("New Save" :: Saves.names, slot)
val listBoarder = new Composite(name = "list-border", border = Border.DOUBLE)
listBoarder.layout = Some(Layout(None, Pos.CENTER))
listBoarder.addControl(listWidget)
window.addControl(listBoarder)
override def update(elapsed: Int): Unit = {}
def slot(i: Int): Unit = {
if (i == 0) {
newSave()
}
}
def newSave(): Unit = {
val name = s"slot-${Saves.names().length}.sav"
Saves.saveGame(name, world)
revertState()
}
}
}
|
mtrupkin/console-lib
|
src/main/scala/controller/SaveGame.scala
|
Scala
|
mit
| 1,454 |
package co.blocke.scalajack
package yaml
package primitives
import TestUtil._
import munit._
import munit.internal.console
import java.lang.{Boolean => JBoolean, Byte => JByte, Double => JDouble, Float => JFloat, Integer => JInt, Long => JLong, Short => JShort}
import java.math.{BigDecimal => JBigDecimal, BigInteger => JBigInteger}
class JavaPrim() extends FunSuite:
val sj = ScalaJack(YamlFlavor())
test("BigDecimal must work") {
describe(
"---------------------------------\\n: Java Primitive Tests (YAML) :\\n---------------------------------", Console.BLUE
)
describe("+++ Positive Tests +++")
val inst = SampleJBigDecimal(
JBigDecimal.ZERO,
JBigDecimal.ONE,
JBigDecimal.TEN,
new JBigDecimal(
"0.1499999999999999944488848768742172978818416595458984375"
),
JBigDecimal.ZERO
)
val yaml = sj.render(inst)
val comparison = """bd5: !!float '0'
|bd1: !!float '0'
|bd3: !!float '10'
|bd4: 0.1499999999999999944488848768742172978818416595458984375
|bd2: !!float '1'""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet) )
assertEquals(inst, sj.read[SampleJBigDecimal](yaml))
}
test("BigInteger must work") {
val inst = SampleJBigInteger(
JBigInteger.ZERO,
JBigInteger.ONE,
JBigInteger.TEN,
new JBigInteger("-90182736451928374653345"),
new JBigInteger("90182736451928374653345"),
new JBigInteger("0"),
JBigInteger.ZERO
)
val yaml = sj.render(inst)
val comparison = """bi6: 0
|bi2: 1
|bi5: 90182736451928374653345
|bi1: 0
|bi7: 0
|bi3: 10
|bi4: -90182736451928374653345""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet) )
assertEquals(inst, sj.read[SampleJBigInteger](yaml))
}
test("Boolean must work") {
val inst =
SampleJBoolean(JBoolean.TRUE, JBoolean.FALSE, true, false, null)
val yaml = sj.render(inst)
val comparison = """bool5: null
|bool3: true
|bool4: false
|bool2: false
|bool1: true""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet) )
assertEquals(inst, sj.read[SampleJBoolean](yaml))
}
test("Byte must work") {
val inst = SampleJByte(
JByte.MAX_VALUE,
JByte.MIN_VALUE,
0.asInstanceOf[Byte],
64.asInstanceOf[Byte],
null
)
val yaml = sj.render(inst)
val comparison = """b5: null
|b1: 127
|b3: 0
|b2: -128
|b4: 64""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet))
assertEquals(inst, sj.read[SampleJByte](yaml))
}
test("Char must work") {
val inst = SampleJChar('Z', '\\u20A0', null)
val yaml = sj.render(inst)
val comparison = """c1: Z
|c2: ₠
|c3: null""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet))
assertEquals(inst, sj.read[SampleJChar](yaml))
}
test("Double must work") {
val inst = SampleJDouble(
JDouble.MAX_VALUE,
JDouble.MIN_VALUE,
0.0,
-123.4567,
null
)
val yaml = sj.render(inst)
val comparison =
"""d5: null
|d3: 0.0
|d4: -123.4567
|d2: !!float '4.9E-324'
|d1: !!float '1.7976931348623157E308'""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet))
assertEquals(inst, sj.read[SampleJDouble](yaml))
}
test("Float must work") {
val inst = SampleJFloat(
JFloat.MAX_VALUE,
JFloat.MIN_VALUE,
0.0F,
-123.4567F,
null
)
val yaml = sj.render(inst)
val comparison = """f4: -123.4567
|f5: null
|f3: 0.0
|f2: !!float '1.4E-45'
|f1: !!float '3.4028235E38'""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet) )
assertEquals(inst, sj.read[SampleJFloat](yaml))
}
test("Int must work") {
val inst = SampleJInt(JInt.MAX_VALUE, JInt.MIN_VALUE, 0, 123, null)
val yaml = sj.render(inst)
val comparison = """i2: -2147483648
|i4: 123
|i3: 0
|i1: 2147483647
|i5: null""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet) )
assertEquals(inst, sj.read[SampleJInt](yaml))
}
test("Long must work") {
val inst = SampleJLong(JLong.MAX_VALUE, JLong.MIN_VALUE, 0L, 123L, null)
val yaml = sj.render(inst)
val comparison = """l2: -9223372036854775808
|l1: 9223372036854775807
|l4: 123
|l3: 0
|l5: null""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet) )
assertEquals(inst, sj.read[SampleJLong](yaml))
}
test("Number must work") {
val inst = SampleJNumber(
JByte.valueOf("-128"),
JByte.valueOf("127"),
JShort.valueOf("-32768"),
JShort.valueOf("32767"),
JInt.valueOf("-2147483648"),
JInt.valueOf("2147483647"),
JLong.valueOf("-9223372036854775808"),
JLong.valueOf("9223372036854755807"),
null, //new JBigInteger("9923372036854755810"),
JByte.valueOf("0"),
JFloat.valueOf("3.4e-038"),
JFloat.valueOf("3.4e+038"),
JDouble.valueOf("1.7e-308"),
JDouble.valueOf("1.7e+308"),
null, //new JBigDecimal("1.8e+308"),
JFloat.valueOf("0.0"),
null
)
val yaml = sj.render(inst)
val comparison = """n10: 0
|n4: 32767
|n8: 9223372036854755807
|n16: 0.0
|n1: -128
|n3: -32768
|n15: null
|n14: !!float '1.7E308'
|n12: !!float '3.4E38'
|n13: !!float '1.7E-308'
|n6: 2147483647
|n5: -2147483648
|n9: null
|n7: -9223372036854775808
|n11: !!float '3.4E-38'
|n2: 127
|n17: null""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet) )
assertEquals(inst, sj.read[SampleJNumber](yaml))
}
test("Short must work") {
val inst = SampleJShort(
JShort.MAX_VALUE,
JShort.MIN_VALUE,
0.asInstanceOf[Short],
123.asInstanceOf[Short],
null
)
val yaml = sj.render(inst)
val comparison = """s4: 123
|s1: 32767
|s5: null
|s2: -32768
|s3: 0""".stripMargin
assertEquals(Set.empty[String], yaml.asInstanceOf[String].split("\\n").toSet.diff(comparison.split("\\n").toSet) )
assertEquals(inst, sj.read[SampleJShort](yaml))
}
test("BigDecimal must break") {
describe("--- Negative Tests ---")
val yaml =
"""bd1: 0
|bd2: 1
|bd3: 10
|bd4: [a,b,c]
|bd5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 3: Expected a Number value here: +SEQ"){
sj.read[SampleBigDecimal](yaml)
}
}
test("BigInteger must break") {
val yaml =
"""bi1: [a,b]
|bi2: 1
|bi3: 10
|bi4: -90182736451928374653345
|bi5: 90182736451928374653345
|bi6: 0
|bi7: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 0: Expected a Number value here: +SEQ"){
sj.read[SampleJBigInteger](yaml)
}
}
test("Boolean must break") {
val yaml =
"""bool1: true
|bool2: false
|bool3: true
|bool4: [a,b]
|bool5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 3: Expected a Boolean value here: +SEQ"){
sj.read[SampleJBoolean](yaml)
}
}
test("Byte must break") {
val yaml =
"""b1: 127
|b2: -128
|b3: false
|b4: 64
|b5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 2: Expected a Number value here: =VAL :false"){
sj.read[SampleJByte](yaml)
}
}
test("Char must break") {
val yaml =
"""c1: "Z"
|c2: [a,b]
|c3: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 1: Expected a String here: +SEQ"){
sj.read[SampleJChar](yaml)
}
val yaml2 =
"""c1: "Z"
|c2:
|c3: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 1: Tried to read a Character but empty string found"){
sj.read[SampleJChar](yaml2)
}
}
test("Double must break") {
val yaml =
"""d1: 1.7976931348623157E308
|d2: 4.9E-324
|d3: fred
|d4: -123.4567
|d5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 2: Expected a Number value here: =VAL :fred"){
sj.read[SampleJDouble](yaml)
}
}
test("Float must break") {
val yaml =
"""f1: 3.4028235E38
|f2: fred
|f3: 0.0
|f4: -123.4567
|f5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 1: Expected a Number value here: =VAL :fred"){
sj.read[SampleJFloat](yaml)
}
}
test("Int must break") {
val yaml =
"""i1: 2147483647
|i2: -2147483648
|i3: false
|i4: 123
|i5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 2: Expected a Number value here: =VAL :false"){
sj.read[SampleJInt](yaml)
}
val yaml2 =
"""i1: 2147483647
|i2: -2147483648
|i3: 0.3
|i4: 123
|i5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[java.lang.NumberFormatException]("For input string: \\"0.3\\""){
sj.read[SampleJInt](yaml2)
}
}
test("Long must break") {
val yaml =
"""l1: 9223372036854775807
|l2: -9223372036854775808
|l3: [a,b]
|l4: 123
|l5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 2: Expected a Number value here: +SEQ"){
sj.read[SampleJLong](yaml)
}
val yaml2 =
"""l1: 9223372036854775807
|l2: -9223372036854775808
|l3: 0.3
|l4: 123
|l5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[java.lang.NumberFormatException]("For input string: \\"0.3\\""){
sj.read[SampleJLong](yaml2)
}
}
test("Number must break") {
val yaml =
"""n1: -128
|n2: 127
|n3: [a,b]
|n4: 32767
|n5: -2147483648
|n6: 2147483647
|n7: -9223372036854775808
|n8: 9223372036854755807
|n9: 9923372036854755810
|n10: 0
|n11: 3.4E-38
|n12: 3.4E38
|n13: 1.7E-308
|n14: 1.7E308
|n15: 1.8E+308
|n16: 0.0
|n17: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 2: Expected a Number value here: +SEQ"){
sj.read[SampleJNumber](yaml)
}
}
test("Short must break") {
val yaml =
"""s1: false
|s2: -32768
|s3: 0
|s4: 123
|s5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[ScalaJackError]("Line 0: Expected a Number value here: =VAL :false"){
sj.read[SampleJShort](yaml)
}
val yaml2 =
"""s1: 2.3
|s2: -32768
|s3: 0
|s4: 123
|s5: null""".stripMargin.asInstanceOf[YAML]
interceptMessage[java.lang.NumberFormatException]("For input string: \\"2.3\\""){
sj.read[SampleJShort](yaml2)
}
}
|
gzoller/ScalaJack
|
core/src/test/scala/co.blocke.scalajack/yaml/primitives/JavaPrim.scala
|
Scala
|
mit
| 12,763 |
package com.blogspot.ramannanda.scala.algorithms.medium
object MatrixFlip {
def matrixScore(A: Array[Array[Int]]): Int = {
for (i <- A.indices) {
if (A(i)(0) == 0) {
for (j <- A(i).indices) {
A(i)(j) = 1 - A(i)(j)
}
}
}
for (j <- A(0).indices) {
var zeros = 0
for (i <- A.indices) {
if (A(i)(j) == 0) {
zeros += 1
}
}
if (zeros > A.length / 2.0) {
for (i <- A.indices) {
A(i)(j) = 1 - A(i)(j)
}
}
}
var result = 0
for (i <- A.indices) {
for (j <- A(i).indices) {
result += A(i)(j).toString.toInt << (A(i).length - j - 1)
}
}
result
}
def main(args: Array[String]): Unit = {
println(s"Result is ${matrixScore(Array(Array(0, 0, 1, 1), Array(1, 0, 1, 0), Array(1, 1, 0, 0)))}")
}
}
|
ramannanda9/algorithms-in-scala
|
src/main/scala/com/blogspot/ramannanda/scala/algorithms/medium/MatrixFlip.scala
|
Scala
|
gpl-3.0
| 865 |
package org.jetbrains.plugins.scala.lang.psi.api.base.patterns
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
/**
* @author Alexander Podkhalyuzin
* Patterns, introduced by case classes or extractors
*/
trait ScConstructorPattern extends ScPattern {
def args: ScPatternArgumentList = findChildByClassScala(classOf[ScPatternArgumentList])
def ref: ScStableCodeReferenceElement = findChildByClassScala(classOf[ScStableCodeReferenceElement])
}
|
gtache/intellij-lsp
|
intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/api/base/patterns/ScConstructorPattern.scala
|
Scala
|
apache-2.0
| 481 |
package dao.alertwatcherpacs
import scala.concurrent.Future
import javax.inject.Inject
import play.api.db.slick.DatabaseConfigProvider
import play.api.db.slick.HasDatabaseConfigProvider
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import slick.driver.JdbcProfile
import slick.jdbc.GetResult
import models.alertwatcherpacs.Alert
import play.db.NamedDatabase
import org.joda.time.DateTime
import org.joda.time.LocalDate
import java.sql.Timestamp
import com.github.tototoshi.slick.PostgresJodaSupport._
import play.api.Logger
trait IAlertWatcherPACSDao extends BaseDao[Alert]{
def findAll(): Future[Seq[Alert]]
def findById(id: Long): Future[Option[Alert]]
def findBySiteId(siteid: String): Future[Seq[Alert]]
def remove(id: Long): Future[Int]
def insert(p: Alert): Future[Unit]
def update(p2: Alert): Future[Unit]
}
class AlertWatcherPACSDao @Inject()(@NamedDatabase("AlertWatcherPACS") protected val dbConfigProvider: DatabaseConfigProvider)
extends HasDatabaseConfigProvider[JdbcProfile] with IAlertWatcherPACSDao {
// import driver.api._
import com.typesafe.slick.driver.ms.SQLServerDriver.api._
class AlertTable(tag: Tag)
extends Table[Alert](tag, models.alertwatcherpacs.AlertDef.toTable) {
def id = column[Long]("ID", O.PrimaryKey)
def siteid = column[String]("SiteID")
def sitename = column[String]("SiteName")
def alertlevel = column[Int]("AlertLevel")
def receivedtime = column[LocalDate]("ReceivedTime")
def occurredtime = column[DateTime]("OccurredTime")
def alertsource = column[String]("AlertSource")
def alertcomment = column[String]("AlertComment")
def alertmessage = column[String]("AlertMessage")
def alertfilename = column[Option[String]]("AlertFilename")
def alertcode = column[Option[String]]("AlertCode")
def commnet = column[Option[String]]("Comment")
def actionid = column[Int]("ActionID")
def zoneid = column[Int]("ZoneID")
def subzoneid = column[Int]("SubZoneID")
// def lastmodifiedtime = column[DateTime]("LastModifiedTime")
def lastmodifier = column[String]("LastModifier")
// def modifiedtimestamp = column[Timestamp]("modifiedtimestamp")
def registeredhost = column[String]("RegisteredHost")
def zonename = column[String]("ZoneName")
def subzonename = column[String]("SubZoneName")
def equipmentname = column[String]("EquipmentName")
def * = (
id,
siteid,
sitename,
alertlevel,
receivedtime,
occurredtime,
alertsource,
alertcomment,
alertmessage,
alertfilename,
alertcode,
commnet,
actionid,
zoneid,
subzoneid,
// lastmodifiedtime,
// lastmodifier,
// modifiedtimestamp: Timestamp,
registeredhost,
zonename,
subzonename,
equipmentname
) <> (Alert.tupled, Alert.unapply _)
}
lazy val sourcefilename = new Exception().getStackTrace.head.getFileName
override def toTable = {
Logger.info("AlertWatcherPACSDao.scala toTable called.")
TableQuery[AlertTable]
}
private val Alerts = toTable()
override def findAll(): Future[Seq[Alert]] = {
db.run(Alerts.take(1000).result)
}
override def findById(id: Long): Future[Option[Alert]] = {
db.run(Alerts.filter( _.id === id).result.headOption)
}
override def findBySiteId(siteid: String): Future[Seq[Alert]] = {
db.run(Alerts.filter( _.siteid === siteid).take(1000).result)
}
override def remove(id: Long): Future[Int] = {
// override def remove(id: Int): Future[String] = db.run(Alerts.filter( _.siteid === id).delete)
Future(1)
}
override def insert(p: Alert): Future[Unit] = {
db.run(Alerts += p).map { _ => () }
}
override def update(p2: Alert) = Future[Unit] {
db.run(
Alerts.filter(_.id === p2.id)
// .map(p => (p.name,p.details, p.price))
.map(p => (p.id))
// .update((p2.name,p2.details,p2.price))
.update((p2.id))
)
}
}
|
tnddn/iv-web
|
portal/rest-portal/app/dao/alertwatcherpacs/AlertWatcherPACSDao.scala
|
Scala
|
apache-2.0
| 4,074 |
/*
* Copyright (c) 2014 Paul Bernard
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Spectrum Finance is based in part on:
* QuantLib. http://quantlib.org/
*
*/
package org.quantintel.ql.time
/**
* @author Paul Bernard
*/
object DateGeneration extends Enumeration {
type DateGeneration = Value
val BACKWARD = Value(1) // Backward from termination date to effective date
val FORWARD = Value(2) // forward from effective date to termination date
val ZERO = Value(3) // no intermediate dates between eff date and term date
val THIRD_WEDNESDAY = Value(4) // all dates but eff date and term date are taken to be on the 3rd wed.
val TWENTIETH = Value(5) // all dates but the eff date are taken to be the 20th of their month.
// Temination date is also modified
val TWENTIETH_IMM = Value(6) // all dates but the eff date are taken to be the 20th of an IMM
// month. The termination data is also modified.
val OLD_CDS = Value(7) // Identical to 20th IMM with unrestricted date ends and long/short stub
// coupon period.
val CDS = Value(8) // credit derivatives standard rule
def valueOf(market: Int) : DateGeneration = market match {
case 1 => BACKWARD
case 2 => FORWARD
case 3 => ZERO
case 4 => THIRD_WEDNESDAY
case 5 => TWENTIETH
case 6 => TWENTIETH_IMM
case 7 => OLD_CDS
case 8 => CDS
case _ => throw new Exception("value must be 1 through 8")
}
}
|
pmularien/spectrum-old
|
financial/src/main/scala/org/quantintel/ql/time/DateGeneration.scala
|
Scala
|
apache-2.0
| 2,030 |
package org.bitcoins.core.protocol.script
import org.bitcoins.core.crypto.ECPublicKey
import org.bitcoins.core.script.constant.{
BytesToPushOntoStack,
OP_0,
ScriptConstant
}
import org.bitcoins.core.util.{BitcoinSLogger, TestUtil}
import org.scalatest.{FlatSpec, MustMatchers}
/**
* Created by chris on 3/8/16.
*/
class P2SHScriptSignatureTest extends FlatSpec with MustMatchers {
private def logger = BitcoinSLogger.logger
"P2SHScriptSignature" must "find the public keys embedded inside of the redeemScript" in {
val rawP2SHScriptSig = TestUtil.rawP2shInputScript2Of2
val p2shScriptSig: P2SHScriptSignature =
ScriptSignature(rawP2SHScriptSig) match {
case x: P2SHScriptSignature => x
case y => throw new RuntimeException("Must be p2sh script sig: " + y)
}
p2shScriptSig.publicKeys must be(
Seq(
ECPublicKey(
"0369d26ebd086523384a0f89f293d4c327a65fa73332d8efd1097cb35231295b83"),
ECPublicKey(
"02480863e5c4a4e9763f5380c44fcfe6a3b7787397076cf9ea1049303a9d34f721")
))
}
it must "return a p2sh scriptSig with no serialized redeemScript" in {
val p2shScriptSig = TestUtil.p2shInputScript2Of2 match {
case s: P2SHScriptSignature => s
case _ => throw new RuntimeException("Should be p2sh scriptSig")
}
p2shScriptSig.scriptSignatureNoRedeemScript.asm must be(
Seq(
OP_0,
BytesToPushOntoStack(71),
ScriptConstant(
"304402207d764cb90c9fd84b74d33a47cf3a0ffead9ded98333776becd6acd32c4426dac02203905a0d064e7f53d07793e86136571b6e4f700c1cfb888174e84d78638335b8101"),
BytesToPushOntoStack(72),
ScriptConstant(
"3045022100906aaca39f022acd8b7a38fd2f92aca9e9f35cfeaee69a6f13e1d083ae18222602204c9ed96fc6c4de56fd85c679fc59c16ee1ccc80c42563b86174e1a506fc007c801")
))
}
}
|
bitcoin-s/bitcoin-s-core
|
core-test/src/test/scala/org/bitcoins/core/protocol/script/P2SHScriptSignatureTest.scala
|
Scala
|
mit
| 1,905 |
package net.atos.entng.rbs.test.integration
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.entcore.test.appregistry.Role
import net.minidev.json.{ JSONValue, JSONObject }
import scala.collection.JavaConverters._
import java.util.concurrent.TimeUnit
import io.gatling.http.request.StringBody
object RbsScenario {
def getSlotAsStringBody(slot: Tuple2[Long, Long]): StringBody = {
return StringBody("""{"slots" : [{"start_date" : """ + slot._1 + """, "end_date" : """ + slot._2 + "}]}")
}
// Slots to create concurrent bookings
val startDate = 1914298200L // Unix timestamp in seconds, corresponding to 2030-08-30 05:30:00
val firstSlot = (startDate, startDate + TimeUnit.SECONDS.convert(3, TimeUnit.HOURS))
val secondSlot = (firstSlot._2, firstSlot._2 + TimeUnit.SECONDS.convert(3, TimeUnit.HOURS))
val thirdSlot = (firstSlot._1 - TimeUnit.SECONDS.convert(2, TimeUnit.HOURS),
secondSlot._2 + TimeUnit.SECONDS.convert(1, TimeUnit.HOURS))
val concurrentSlot = (firstSlot._1 + TimeUnit.SECONDS.convert(10, TimeUnit.MINUTES),
firstSlot._2 + TimeUnit.SECONDS.convert(1, TimeUnit.HOURS))
val nonConcurrentSlot = (concurrentSlot._2, concurrentSlot._2 + TimeUnit.SECONDS.convert(1, TimeUnit.HOURS))
val secondNonConcurrentSlot = (concurrentSlot._1 - TimeUnit.SECONDS.convert(1, TimeUnit.HOURS), concurrentSlot._1)
// Dates to create a periodic booking
val pSlotStartDate = 1919755800L // 2030-11-01 (friday) 10:30:00
val pSlotEndDate = pSlotStartDate + TimeUnit.SECONDS.convert(7, TimeUnit.HOURS)
val pLastSlotEndDate = 1923409800L // 2030-12-13 17:30:00
val scnCreateTeachers = exec(http("Login - admin user")
.post("""/auth/login""")
.formParam("""callBack""", """http%3A%2F%2Flocalhost%3A8080%2Fadmin""")
.formParam("""email""", """tom.mate""")
.formParam("""password""", """password""")
.check(status.is(302)))
.exec(http("Create manual teacher")
.post("""/directory/api/user""")
.formParam("""classId""", """${classId}""")
.formParam("""lastname""", "DROUILLACrbs")
.formParam("""firstname""", """Aurelie""")
.formParam("""type""", """Teacher""")
.check(status.is(200)))
.pause(1)
.exec(http("Create manual teacher")
.post("""/directory/api/user""")
.formParam("""classId""", """${classId}""")
.formParam("""lastname""", "PIRESrbs")
.formParam("""firstname""", """Rachelle""")
.formParam("""type""", """Teacher""")
.check(status.is(200)))
.pause(1)
.exec(http("Create manual teacher")
.post("""/directory/api/user""")
.formParam("""classId""", """${classId}""")
.formParam("""lastname""", "BAILLYrbs")
.formParam("""firstname""", """Catherine""")
.formParam("""type""", """Teacher""")
.check(status.is(200)))
.pause(1)
.exec(http("Create manual teacher")
.post("""/directory/api/user""")
.formParam("""classId""", """${classId}""")
.formParam("""lastname""", "DAUDIERrbs")
.formParam("""firstname""", """Remi""")
.formParam("""type""", """Teacher""")
.check(status.is(200)))
.pause(1)
.exec(http("List teachers in class")
.get("""/directory/api/personnes?id=${classId}&type=Teacher""")
.check(status.is(200), jsonPath("$.status").is("ok"),
jsonPath("$.result").find.transformOption(_.map(res => {
val json = JSONValue.parse(res).asInstanceOf[JSONObject]
json.values.asScala.foldLeft[List[(String, String)]](Nil) { (acc, c) =>
val user = c.asInstanceOf[JSONObject]
user.get("lastName").asInstanceOf[String] match {
case "DROUILLACrbs" | "PIRESrbs" | "BAILLYrbs" | "DAUDIERrbs" if user.get("code") != null =>
(user.get("lastName").asInstanceOf[String], user.get("userId").asInstanceOf[String]) :: acc
case _ => acc
}
}.toMap
})).saveAs("createdTeacherIds")))
.exec { session =>
val uIds = session("createdTeacherIds").as[Map[String, String]]
session.set("teacherDrouillacId", uIds.get("DROUILLACrbs").get)
.set("teacherPiresId", uIds.get("PIRESrbs").get)
.set("teacherBaillyId", uIds.get("BAILLYrbs").get)
.set("teacherDaudierId", uIds.get("DAUDIERrbs").get)
.set("now", System.currentTimeMillis())
}
.exec(http("Teacher details")
.get("""/directory/api/details?id=${teacherDrouillacId}""")
.check(status.is(200), jsonPath("$.status").is("ok"),
jsonPath("$.result.*.login").find.saveAs("teacherDrouillacLogin"),
jsonPath("$.result.*.code").find.saveAs("teacherDrouillacCode")))
.exec(http("Teacher details")
.get("""/directory/api/details?id=${teacherPiresId}""")
.check(status.is(200), jsonPath("$.status").is("ok"),
jsonPath("$.result.*.login").find.saveAs("teacherPiresLogin"),
jsonPath("$.result.*.code").find.saveAs("teacherPiresCode")))
.exec(http("Teacher details")
.get("""/directory/api/details?id=${teacherBaillyId}""")
.check(status.is(200), jsonPath("$.status").is("ok"),
jsonPath("$.result.*.login").find.saveAs("teacherBaillyLogin"),
jsonPath("$.result.*.code").find.saveAs("teacherBaillyCode")))
.exec(http("Teacher details")
.get("""/directory/api/details?id=${teacherDaudierId}""")
.check(status.is(200), jsonPath("$.status").is("ok"),
jsonPath("$.result.*.login").find.saveAs("teacherDaudierLogin"),
jsonPath("$.result.*.code").find.saveAs("teacherDaudierCode")))
.exec(http("Activate teacher account")
.post("""/auth/activation""")
.formParam("""login""", """${teacherDrouillacLogin}""")
.formParam("""activationCode""", """${teacherDrouillacCode}""")
.formParam("""password""", """blipblop""")
.formParam("""confirmPassword""", """blipblop""")
.formParam("""acceptCGU""", """true""")
.check(status.is(302)))
.exec(http("Activate teacher account")
.post("""/auth/activation""")
.formParam("""login""", """${teacherPiresLogin}""")
.formParam("""activationCode""", """${teacherPiresCode}""")
.formParam("""password""", """blipblop""")
.formParam("""confirmPassword""", """blipblop""")
.formParam("""acceptCGU""", """true""")
.check(status.is(302)))
.exec(http("Activate teacher account")
.post("""/auth/activation""")
.formParam("""login""", """${teacherBaillyLogin}""")
.formParam("""activationCode""", """${teacherBaillyCode}""")
.formParam("""password""", """blipblop""")
.formParam("""confirmPassword""", """blipblop""")
.formParam("""acceptCGU""", """true""")
.check(status.is(302)))
.exec(http("Activate teacher account")
.post("""/auth/activation""")
.formParam("""login""", """${teacherDaudierLogin}""")
.formParam("""activationCode""", """${teacherDaudierCode}""")
.formParam("""password""", """blipblop""")
.formParam("""confirmPassword""", """blipblop""")
.formParam("""acceptCGU""", """true""")
.check(status.is(302)))
.exec(http("Add ADML function to teacher DAUDIERrbs")
.post("""/directory/user/function/${teacherDaudierId}""")
.header("Content-Type", "application/json")
.body(StringBody("""{"functionCode": "ADMIN_LOCAL", "scope": ["${schoolId}"], "inherit":"sc"}"""))
.check(status.is(200)))
val scnCreateBookings =
Role.createAndSetRole("Réservation de ressources")
.exec(http("Login - teacher")
.post("""/auth/login""")
.formParam("""email""", """${teacherDrouillacLogin}""")
.formParam("""password""", """blipblop""")
.check(status.is(302)))
// ResourceType
.exec(http("Create type")
.post("/rbs/type")
.body(StringBody("""{"name" : "type created", "color" : "#FF8500", "validation" : true, "school_id" : "${schoolId}"}"""))
.check(status.is(200),
jsonPath("$.id").find.saveAs("typeId")))
// Resource
.exec(http("Create resource")
.post("/rbs/type/${typeId}/resource")
.body(StringBody("""{"name" : "resource created",
"description" : "resource created desc",
"periodic_booking" : true,
"validation" : true,
"is_available" : true }"""))
.check(status.is(200),
jsonPath("$.id").find.saveAs("resourceId")))
.exec(http("Get resource")
.get("/rbs/resource/${resourceId}")
.check(status.is(200),
jsonPath("$.id").find.is("${resourceId}"),
jsonPath("$.name").find.is("resource created"),
jsonPath("$.description").find.is("resource created desc"),
jsonPath("$.periodic_booking").find.is("true"),
jsonPath("$.is_available").find.is("true"),
jsonPath("$.type_id").find.is("${typeId}")))
.exec(http("Update resource")
.put("/rbs/resource/${resourceId}")
.body(StringBody("""{"name" : "resource updated",
"description" : "resource updated desc",
"type_id" : ${typeId},
"validation" : true,
"is_available" : true,
"was_available" : true }"""))
.check(status.is(200)))
.exec(http("Get updated resource")
.get("/rbs/resource/${resourceId}")
.check(status.is(200),
jsonPath("$.id").find.is("${resourceId}"),
jsonPath("$.name").find.is("resource updated"),
jsonPath("$.description").find.is("resource updated desc"),
jsonPath("$.periodic_booking").find.is("true"),
jsonPath("$.is_available").find.is("true"),
jsonPath("$.type_id").find.is("${typeId}")))
.exec(http("List resources")
.get("/rbs/resources")
.check(status.is(200),
jsonPath("$[0].id").find.is("${resourceId}")))
// .exec(http("Share rights 'rbs.read' and 'rbs.contrib' for created type")
// .put("/rbs/share/json/${typeId}")
// .bodyPart(StringBodyPart("userId", "${teacherPiresId}"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|updateBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|updatePeriodicBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|createPeriodicBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|createBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-ResourceController|get"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-ResourceTypeController|getResourceType"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|listBookingsByResource"))
// .check(status.is(200)))
// .exec(http("Share rights 'rbs.read' and 'rbs.contrib' for created type")
// .put("/rbs/share/json/${typeId}")
// .bodyPart(StringBodyPart("userId", "${teacherBaillyId}"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|updateBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|updatePeriodicBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|createPeriodicBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|createBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-ResourceController|get"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-ResourceTypeController|getResourceType"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|listBookingsByResource"))
// .check(status.is(200)))
.exec(http("Logout - teacher")
.get("""/auth/logout""")
.check(status.is(302)))
// 1. Concurrent bookings
// Other teachers create bookings that overlap each other
.exec(http("Login - teacher 2")
.post("""/auth/login""")
.formParam("""email""", """${teacherPiresLogin}""")
.formParam("""password""", """blipblop""")
.check(status.is(302)))
// .exec(http("Create booking")
// .post("/rbs/resource/${resourceId}/booking")
// .body(getSlotAsStringBody(firstSlot))
// .check(status.is(200),
// jsonPath("$.id").find.saveAs("firstBookingId")))
// .exec(http("Create booking")
// .post("/rbs/resource/${resourceId}/booking")
// .body(getSlotAsStringBody(secondSlot))
// .check(status.is(200),
// jsonPath("$.id").find.saveAs("secondBookingId")))
// .exec(http("Create booking")
// .post("/rbs/resource/${resourceId}/booking")
// .body(getSlotAsStringBody(thirdSlot))
// .check(status.is(200),
// jsonPath("$.id").find.saveAs("thirdBookingId")))
.exec(http("Logout - teacher 2")
.get("""/auth/logout""")
.check(status.is(302)))
.exec(http("Login - teacher 3")
.post("""/auth/login""")
.formParam("""email""", """${teacherBaillyLogin}""")
.formParam("""password""", """blipblop""")
.check(status.is(302)))
// .exec(http("Create booking")
// .post("/rbs/resource/${resourceId}/booking")
// .body(getSlotAsStringBody(concurrentSlot))
// .check(status.is(200),
// jsonPath("$.id").find.saveAs("concurrentBookingId")))
// // Create 2 bookings that do not overlap with the previous bookings
// .exec(http("Create booking")
// .post("/rbs/resource/${resourceId}/booking")
// .body(getSlotAsStringBody(nonConcurrentSlot))
// .check(status.is(200),
// jsonPath("$.id").find.saveAs("nonConcurrentBookingId")))
// .exec(http("Create booking")
// .post("/rbs/resource/${resourceId}/booking")
// .body(getSlotAsStringBody(secondNonConcurrentSlot))
// .check(status.is(200),
// jsonPath("$.id").find.saveAs("secondNonConcurrentBookingId")))
.exec(http("Logout - teacher 3")
.get("""/auth/logout""")
.check(status.is(302)))
.exec(http("Login 1 - teacher")
.post("""/auth/login""")
.formParam("""email""", """${teacherDrouillacLogin}""")
.formParam("""password""", """blipblop""")
.check(status.is(302)))
/* Validate a booking, and check that :
* concurrents bookings have been refused
* and non concurrent bookings still have status "created"
*/
// .exec(http("Validate booking")
// .put("/rbs/resource/${resourceId}/booking/${concurrentBookingId}/process")
// .body(StringBody("""{"status": 2}"""))
// .check(status.is(200),
// jsonPath("$.id").is("${concurrentBookingId}"),
// jsonPath("$.status").is("2")))
// .exec(http("List bookings and check their status")
// .get("/rbs/resource/${resourceId}/bookings")
// .check(status.is(200),
// jsonPath("$[?(@.id == ${concurrentBookingId})].status").is("2"),
// jsonPath("$[?(@.id == ${firstBookingId})].status").is("3"),
// jsonPath("$[?(@.id == ${secondBookingId})].status").is("3"),
// jsonPath("$[?(@.id == ${thirdBookingId})].status").is("3"),
// jsonPath("$[?(@.id == ${nonConcurrentBookingId})].status").is("1"),
// jsonPath("$[?(@.id == ${secondNonConcurrentBookingId})].status").is("1")))
// .exec(http("Try creating a conflicting booking")
// .post("/rbs/resource/${resourceId}/booking")
// .body(getSlotAsStringBody(firstSlot))
// .check(status.is(409)))
// 2a. Create a periodic booking (with field 'occurrences' supplied) and check that slots' start and end dates are correct
.exec(http("Create periodic booking")
.post("/rbs/resource/${resourceId}/booking/periodic")
.body(StringBody("""{"booking_reason":"Résa périodique", "slots":[{"start_date" : """ + pSlotStartDate + """, "end_date" : """ + pSlotEndDate + """}],
"days":[false, true, false, false, true, true, false],
"periodicity":2,
"occurrences":10
}"""))
.check(status.is(200),
jsonPath("$[?(@.status != 1)]").notExists,
jsonPath("$..id").findAll.saveAs("slotsIds")))
.exec(http("List bookings and check their dates")
.get("/rbs/resource/${resourceId}/bookings")
.check(status.is(200),
jsonPath("$[?(@.id == ${slotsIds(0)})].start_date").is("2030-11-01T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(0)})].end_date").is("2030-11-01T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(1)})].start_date").is("2030-11-11T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(1)})].end_date").is("2030-11-11T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(2)})].start_date").is("2030-11-14T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(2)})].end_date").is("2030-11-14T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(3)})].start_date").is("2030-11-15T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(3)})].end_date").is("2030-11-15T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(4)})].start_date").is("2030-11-25T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(4)})].end_date").is("2030-11-25T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(5)})].start_date").is("2030-11-28T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(5)})].end_date").is("2030-11-28T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(6)})].start_date").is("2030-11-29T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(6)})].end_date").is("2030-11-29T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(7)})].start_date").is("2030-12-09T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(7)})].end_date").is("2030-12-09T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(8)})].start_date").is("2030-12-12T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(8)})].end_date").is("2030-12-12T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(9)})].start_date").is("2030-12-13T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(9)})].end_date").is("2030-12-13T17:30:00.000")))
// 2b. Create a periodic booking (with field 'periodic_end_date' supplied) and check that slots' start and end dates are correct
.exec(http("Create periodic booking")
.post("/rbs/resource/${resourceId}/booking/periodic")
.body(StringBody("""{"booking_reason":"Résa périodique", "slots":[{"start_date" : """ + pSlotStartDate + """, "end_date" : """ + pSlotEndDate + """}],
"days":[false, true, false, false, true, true, false],
"periodicity":2,
"periodic_end_date": """ + pLastSlotEndDate + """
}"""))
.check(status.is(200),
jsonPath("$[?(@.status != 1)]").notExists,
jsonPath("$..id").findAll.saveAs("slotsIds")))
.exec(http("List bookings and check their dates")
.get("/rbs/resource/${resourceId}/bookings")
.check(status.is(200),
jsonPath("$[?(@.id == ${slotsIds(0)})].start_date").is("2030-11-01T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(0)})].end_date").is("2030-11-01T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(1)})].start_date").is("2030-11-11T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(1)})].end_date").is("2030-11-11T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(2)})].start_date").is("2030-11-14T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(2)})].end_date").is("2030-11-14T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(3)})].start_date").is("2030-11-15T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(3)})].end_date").is("2030-11-15T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(4)})].start_date").is("2030-11-25T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(4)})].end_date").is("2030-11-25T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(5)})].start_date").is("2030-11-28T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(5)})].end_date").is("2030-11-28T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(6)})].start_date").is("2030-11-29T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(6)})].end_date").is("2030-11-29T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(7)})].start_date").is("2030-12-09T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(7)})].end_date").is("2030-12-09T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(8)})].start_date").is("2030-12-12T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(8)})].end_date").is("2030-12-12T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(9)})].start_date").is("2030-12-13T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(9)})].end_date").is("2030-12-13T17:30:00.000")))
val scnAdml = exec(http("Login - ADML")
.post("""/auth/login""")
.formParam("""email""", """${teacherDaudierLogin}""")
.formParam("""password""", """blipblop""")
.check(status.is(302)))
// ResourceType
.exec(http("ADML creates type")
.post("/rbs/type")
.body(StringBody("""{"name" : "type created by ADML", "color" : "#FF8500", "validation" : true, "school_id" : "${schoolId}"}"""))
.check(status.is(200),
jsonPath("$.id").find.saveAs("admlTypeId")))
.exec(http("ADML lists types")
.get("/rbs/types")
.check(status.is(200),
// the returned list should contain typeId and admlTypeId
jsonPath("$[?(@.id == ${typeId})].name").is("type created"),
jsonPath("$[?(@.id == ${admlTypeId})].name").is("type created by ADML")))
// Resource
.exec(http("ADML creates resource")
.post("/rbs/type/${admlTypeId}/resource")
.body(StringBody("""{"name" : "resource created by ADML",
"description" : "resource created by ADML description",
"periodic_booking" : true,
"is_available" : true }"""))
.check(status.is(200),
jsonPath("$.id").find.saveAs("admlResourceId")))
.exec(http("ADML creates resource in teacher's type")
.post("/rbs/type/${typeId}/resource")
.body(StringBody("""{"name" : "resource created by ADML in teacher type",
"description" : "resource created by ADML in teacher type description",
"periodic_booking" : true,
"validation" : true,
"is_available" : true }"""))
.check(status.is(200),
jsonPath("$.id").find.saveAs("admlResourceIdInTeacherType")))
.exec(http("ADML gets resource created by himself")
.get("/rbs/resource/${admlResourceIdInTeacherType}")
.check(status.is(200),
jsonPath("$.id").find.is("${admlResourceIdInTeacherType}"),
jsonPath("$.name").find.is("resource created by ADML in teacher type"),
jsonPath("$.description").find.is("resource created by ADML in teacher type description"),
jsonPath("$.periodic_booking").find.is("true"),
jsonPath("$.is_available").find.is("true"),
jsonPath("$.type_id").find.is("${typeId}")))
.exec(http("ADML gets resource created by a teacher")
.get("/rbs/resource/${resourceId}")
.check(status.is(200),
jsonPath("$.id").find.is("${resourceId}"),
jsonPath("$.name").find.is("resource updated"),
jsonPath("$.description").find.is("resource updated desc"),
jsonPath("$.periodic_booking").find.is("true"),
jsonPath("$.is_available").find.is("true"),
jsonPath("$.type_id").find.is("${typeId}")))
.exec(http("ADML updates resource created by a teacher")
.put("/rbs/resource/${resourceId}")
.body(StringBody("""{"name" : "resource created by teacher and updated by adml",
"description" : "resource created by teacher and updated by adml description",
"type_id" : ${typeId},
"is_available" : true,
"validation" : true,
"was_available" : true }"""))
.check(status.is(200)))
.exec(http("ADML gets updated resource")
.get("/rbs/resource/${resourceId}")
.check(status.is(200),
jsonPath("$.id").find.is("${resourceId}"),
jsonPath("$.name").find.is("resource created by teacher and updated by adml"),
jsonPath("$.description").find.is("resource created by teacher and updated by adml description"),
jsonPath("$.periodic_booking").find.is("true"),
jsonPath("$.is_available").find.is("true"),
jsonPath("$.type_id").find.is("${typeId}")))
.exec(http("ADML lists resources")
.get("/rbs/resources")
.check(status.is(200),
// the returned list should contain resourceId, admlResourceId and admlResourceIdInTeacherType
jsonPath("$[?(@.id == ${resourceId})].name").is("resource created by teacher and updated by adml"),
jsonPath("$[?(@.id == ${admlResourceId})].name").is("resource created by ADML"),
jsonPath("$[?(@.id == ${admlResourceIdInTeacherType})].name").is("resource created by ADML in teacher type")))
// .exec(http("ADML shares rights 'rbs.read' and 'rbs.contrib' for created type")
// .put("/rbs/share/json/${admlTypeId}")
// .bodyPart(StringBodyPart("userId", "${teacherPiresId}"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|updateBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|updatePeriodicBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|createPeriodicBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|createBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-ResourceController|get"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-ResourceTypeController|getResourceType"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|listBookingsByResource"))
// .check(status.is(200)))
// .exec(http("ADML shares rights 'rbs.read' and 'rbs.contrib' for created type")
// .put("/rbs/share/json/${admlTypeId}")
// .bodyPart(StringBodyPart("userId", "${teacherBaillyId}"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|updateBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|updatePeriodicBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|createPeriodicBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|createBooking"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-ResourceController|get"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-ResourceTypeController|getResourceType"))
// .bodyPart(StringBodyPart("actions", "net-atos-entng-rbs-controllers-BookingController|listBookingsByResource"))
// .check(status.is(200)))
// Validate/refuse bookings
// .exec(http("ADML validates booking created by teacher")
// .put("/rbs/resource/${resourceId}/booking/${nonConcurrentBookingId}/process")
// .body(StringBody("""{"status": 2}"""))
// .check(status.is(200),
// jsonPath("$.id").is("${nonConcurrentBookingId}"),
// jsonPath("$.status").is("2")))
// .exec(http("ADML refuses booking created by teacher")
// .put("/rbs/resource/${resourceId}/booking/${secondNonConcurrentBookingId}/process")
// .body(StringBody("""{"status": 3}"""))
// .check(status.is(200),
// jsonPath("$.id").is("${secondNonConcurrentBookingId}"),
// jsonPath("$.status").is("3")))
// .exec(http("ADML deletes booking created by teacher")
// .delete("/rbs/resource/${resourceId}/booking/${secondNonConcurrentBookingId}")
// .check(status.is(204)))
.exec(http("ADML creates booking")
.post("/rbs/resource/${resourceId}/booking")
.body(getSlotAsStringBody(secondNonConcurrentSlot))
.check(status.is(200),
jsonPath("$..id").find.saveAs("admlBookingId")))
.exec(http("ADML lists bookings")
.get("/rbs/resource/${resourceId}/bookings")
.check(status.is(200),
jsonPath("$[?(@.id == ${admlBookingId})].status").is("1")))
.exec(http("ADML updates booking")
.put("/rbs/resource/${resourceId}/booking/${admlBookingId}")
.body(StringBody("""{"slots" : [{"start_date" : """ + pSlotStartDate + """,
"end_date" : """ + pSlotEndDate + """
}]}"""))
.check(status.is(200)))
.exec(http("ADML creates periodic booking")
.post("/rbs/resource/${admlResourceIdInTeacherType}/booking/periodic")
.body(StringBody("""{"booking_reason":"Résa périodique", "slots":[{"start_date" : """ + pSlotStartDate + """, "end_date" : """ + pSlotEndDate + """}],
"days":[false, true, false, false, true, true, false],
"periodicity":2,
"occurrences":10
}"""))
.check(status.is(200),
jsonPath("$[?(@.status != 1)]").notExists,
jsonPath("$..id").findAll.saveAs("slotsIds")))
.exec(http("List bookings and check their dates")
.get("/rbs/resource/${admlResourceIdInTeacherType}/bookings")
.check(status.is(200),
jsonPath("$[?(@.id == ${slotsIds(0)})].start_date").is("2030-11-01T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(0)})].end_date").is("2030-11-01T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(1)})].start_date").is("2030-11-11T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(1)})].end_date").is("2030-11-11T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(2)})].start_date").is("2030-11-14T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(2)})].end_date").is("2030-11-14T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(3)})].start_date").is("2030-11-15T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(3)})].end_date").is("2030-11-15T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(4)})].start_date").is("2030-11-25T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(4)})].end_date").is("2030-11-25T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(5)})].start_date").is("2030-11-28T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(5)})].end_date").is("2030-11-28T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(6)})].start_date").is("2030-11-29T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(6)})].end_date").is("2030-11-29T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(7)})].start_date").is("2030-12-09T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(7)})].end_date").is("2030-12-09T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(8)})].start_date").is("2030-12-12T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(8)})].end_date").is("2030-12-12T17:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(9)})].start_date").is("2030-12-13T10:30:00.000"),
jsonPath("$[?(@.id == ${slotsIds(9)})].end_date").is("2030-12-13T17:30:00.000")))
// Deletes
.exec(http("Delete Resource")
.delete("/rbs/resource/${resourceId}")
.check(status.is(200)))
.exec(http("Get Resource deleted")
.get("/rbs/resource/${resourceId}")
.check(status.is(401)))
// .exec(http("Delete Type")
// .delete("/rbs/type/${typeId}")
// .check(status.is(204)))
// .exec(http("Get Type deleted")
// .get("/rbs/type/${typeId}")
// .check(status.is(401)))
.exec(http("Logout - ADML")
.get("""/auth/logout""")
.check(status.is(302)))
}
|
docdoku/rbs
|
src/test/scala/net/atos/entng/rbs/test/integration/RbsScenario.scala
|
Scala
|
agpl-3.0
| 32,422 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa
import java.math.{MathContext, RoundingMode}
import org.calrissian.mango.types.LexiTypeEncoders
import org.geotools.data.DataAccessFactory.Param
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
/**
* In these lexiEncode and -Decode functions, a double is encoded or decoded into a lexical
* representation to be used in the rowID in the Accumulo key.
*
* In the lexiEncodeDoubleToString function, the double is truncated via floor() to four significant digits,
* giving a scientific notation of #.###E0. This is to ensure that there is flexibility in the
* precision of the bounding box given when querying for chunks. Prior to rounding the resolution,
* it was found that even that slightest change in bounding box caused the resolution to be calculated
* differently many digits after the decimal point, leading to a different lexical representation of the
* resolution. This meant that no chunks would be returned out of Accumulo, since they did not match
* the resolution calculated upon ingest.
*
* Now, there is greater flexibility in specifying a bounding box and calculating a resolution because
* we save only four digits after the decimal point.
*/
package object raster {
object RasterParams {
val writeMemoryParam = new Param("writeMemory", classOf[Integer], "The memory allocation to use for writing records", false)
}
val rasterSftName: String = ""
// Raster CQ MetaData SFT
val rasterSft = {
val sft = SimpleFeatureTypes.createType("RasterIndexEntry", "*geom:Geometry:srid=4326,dtg:Date")
sft.setSchemaVersion(8) // TODO GEOMESA-1278 this should be read out of the actual data we're reading and not be constant
sft.setDtgField("dtg")
sft
}
// geom field name
val rasterSftGeomName = "geom"
// dtg field name
val rasterSftDtgName = "dtg"
// defaults
val defaultResolution = 1.0
val rasterMajcMaxVers = "1"
val rasterMincMaxVers = "1"
val rasterScanMaxVers = "1"
val rasterSplitThresh = "512M"
// Sets the rounding mode to use floor() in order to minimize effects from round-off at higher precisions
val roundingMode = RoundingMode.FLOOR
// Sets the scale for the floor() function and thus determines where the truncation occurs
val significantDigits = 4
// Defines the rules for rounding using the above
val mc = new MathContext(significantDigits, roundingMode)
/**
* The double, number, is truncated to a certain number of significant digits and then lexiEncoded into
* a string representations.
* @param number, the Double to be lexiEncoded
*/
def lexiEncodeDoubleToString(number: Double): String = {
val truncatedRes = BigDecimal(number).round(mc).toDouble
LexiTypeEncoders.LEXI_TYPES.encode(truncatedRes)
}
/**
* The string representation of a double, str, is decoded to its original Double representation
* and then truncated to a certain number of significant digits to remain consistent with the lexiEncode function.
* @param str, the String representation of the Double
*/
def lexiDecodeStringToDouble(str: String): Double = {
val number = LexiTypeEncoders.LEXI_TYPES.decode("double", str).asInstanceOf[Double]
BigDecimal(number).round(mc).toDouble
}
def lexiEncodeIntToString(number: Int): String = LexiTypeEncoders.LEXI_TYPES.encode(number)
def lexiDecodeStringToInt(str: String): Int = LexiTypeEncoders.LEXI_TYPES.decode("integer", str).asInstanceOf[Int]
}
|
nagavallia/geomesa
|
geomesa-accumulo/geomesa-accumulo-raster/src/main/scala/org/locationtech/geomesa/raster/package.scala
|
Scala
|
apache-2.0
| 4,036 |
/*
* Copyright 2017 by Simba Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.spark.sql.simba
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions.Attribute
import org.apache.spark.sql.{Encoder, Row, Dataset => SQLDataset}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.simba.execution.QueryExecution
import org.apache.spark.sql.simba.expression._
import org.apache.spark.sql.simba.index.IndexType
import org.apache.spark.sql.simba.plans._
import org.apache.spark.sql.simba.spatial.Point
import org.apache.spark.sql.simba.util.LiteralUtil
import org.apache.spark.storage.StorageLevel
/**
* Created by dongx on 3/7/17.
*/
class Dataset[T] private[simba] (@transient val simbaSession: SimbaSession,
@transient override val queryExecution: QueryExecution,
encoder: Encoder[T])
extends SQLDataset[T](simbaSession, queryExecution.logical, encoder) {
def this(simbaSession: SimbaSession, logicalPlan: LogicalPlan, encoder: Encoder[T]) = {
this(simbaSession, {
val qe = simbaSession.sessionState.executePlan(logicalPlan)
qe
}, encoder)
}
/**
* Spatial operation, range query.
* {{{
* point.range(Array("x", "y"), Array(10, 10), Array(20, 20))
* point.filter($"x" >= 10 && $"x" <= 20 && $"y" >= 10 && $"y" <= 20)
* }}}
*/
def range(keys: Array[String], point1: Array[Double], point2: Array[Double]): DataFrame = withPlan {
val attrs = getAttributes(keys)
attrs.foreach(attr => assert(attr != null, "column not found"))
Filter(InRange(PointWrapper(attrs),
LiteralUtil(new Point(point1)),
LiteralUtil(new Point(point2))), logicalPlan)
}
/**
* Spatial operation, range query
* {{{
* point.range(p, Array(10, 10), Array(20, 20))
* }}}
*/
def range(key: String, point1: Array[Double], point2: Array[Double]): DataFrame = withPlan {
val attrs = getAttributes(Array(key))
assert(attrs.head != null, "column not found")
Filter(InRange(attrs.head,
LiteralUtil(new Point(point1)),
LiteralUtil(new Point(point2))), logicalPlan)
}
/**
* Spatial operation knn
* Find k nearest neighbor of a given point
*/
def knn(keys: Array[String], point: Array[Double], k: Int): DataFrame = withPlan{
val attrs = getAttributes(keys)
attrs.foreach(attr => assert(attr != null, "column not found"))
Filter(InKNN(PointWrapper(attrs),
LiteralUtil(new Point(point)), LiteralUtil(k)), logicalPlan)
}
def knn(key: String, point: Array[Double], k: Int): DataFrame = withPlan{
val attrs = getAttributes(Array(key))
assert(attrs.head != null, "column not found")
Filter(InKNN(attrs.head,
LiteralUtil(new Point(point)), LiteralUtil(k)), logicalPlan)
}
/**
* Spatial operation circle range query
* {{{
* point.circleRange(Array("x", "y"), Array(10, 10), 5)
* point.filter(($"x" - 10) * ($"x" - 10) + ($"y" - 10) * ($"y" - 10) <= 5 * 5)
* }}}
*/
def circleRange(keys: Array[String], point: Array[Double], r: Double): DataFrame = withPlan {
val attrs = getAttributes(keys)
attrs.foreach(attr => assert(attr != null, "column not found"))
Filter(InCircleRange(PointWrapper(attrs),
LiteralUtil(new Point(point)),
LiteralUtil(r)), logicalPlan)
}
/**
* Spatial operation circle range query
* {{{
* point.circleRange(p, Point(10, 10), 5)
* }}}
*/
def circleRange(key: String, point: Array[Double], r: Double): DataFrame = withPlan {
val attrs = getAttributes(Array(key))
assert(attrs.head != null, "column not found")
Filter(InCircleRange(attrs.head,
LiteralUtil(new Point(point)),
LiteralUtil(r)), logicalPlan)
}
/**
* Spatial operation DistanceJoin
*/
def distanceJoin(right: Dataset[_], leftKeys: Array[String],
rightKeys: Array[String], r: Double) : DataFrame = withPlan {
val leftAttrs = getAttributes(leftKeys)
val rightAttrs = getAttributes(rightKeys, right.queryExecution.analyzed.output)
SpatialJoin(this.logicalPlan, right.logicalPlan, DistanceJoin,
Some(InCircleRange(PointWrapper(rightAttrs),
PointWrapper(leftAttrs),
LiteralUtil(r))))
}
def distanceJoin(right: Dataset[_], leftKey: String,
rightKey: String, r: Double) : DataFrame = withPlan {
val leftAttrs = getAttributes(Array(leftKey))
val rightAttrs = getAttributes(Array(rightKey), right.queryExecution.analyzed.output)
SpatialJoin(this.logicalPlan, right.logicalPlan, DistanceJoin,
Some(InCircleRange(rightAttrs.head,
leftAttrs.head,
LiteralUtil(r))))
}
/**
* Spatial operation KNNJoin
*/
def knnJoin(right: Dataset[_], leftKeys: Array[String],
rightKeys: Array[String], k : Int) : DataFrame = withPlan {
val leftAttrs = getAttributes(leftKeys)
val rightAttrs = getAttributes(rightKeys, right.queryExecution.analyzed.output)
SpatialJoin(this.logicalPlan, right.logicalPlan, KNNJoin,
Some(InKNN(PointWrapper(rightAttrs),
PointWrapper(leftAttrs), LiteralUtil(k))))
}
def knnJoin(right: Dataset[_], leftKey: String,
rightKey: String, k : Int) : DataFrame = withPlan {
val leftAttrs = getAttributes(Array(leftKey))
val rightAttrs = getAttributes(Array(rightKey), right.queryExecution.analyzed.output)
SpatialJoin(this.logicalPlan, right.logicalPlan, KNNJoin,
Some(InKNN(rightAttrs.head,
leftAttrs.head, LiteralUtil(k))))
}
/////////////////////////////////////////////////////////////////////////////
// Index operations
/////////////////////////////////////////////////////////////////////////////
/**
* @group extended
*/
def index(indexType: IndexType, indexName: String, column: Array[String]): this.type = {
simbaSession.sessionState.indexManager.createIndexQuery(this, indexType, indexName, getAttributes(column).toList)
this
}
/**
* @group extended
*/
def setStorageLevel(indexName: String, level: StorageLevel): this.type = {
simbaSession.sessionState.indexManager.setStorageLevel(this, indexName, level)
this
}
/**
* @group extended
*/
def dropIndex(blocking: Boolean): this.type = {
simbaSession.sessionState.indexManager.tryDropIndexQuery(this, blocking)
this
}
/**
* @group extended
*/
def dropIndex(): this.type = dropIndex(blocking = false)
/**
* @group extended
*/
def dropIndexByName(indexName : String) : this.type = {
simbaSession.sessionState.indexManager.tryDropIndexByNameQuery(this, indexName, blocking = false)
this
}
/**
* @group extended
*/
def persistIndex(indexName: String, fileName: String): this.type = {
simbaSession.sessionState.indexManager.persistIndex(this.simbaSession, indexName, fileName)
this
}
/**
* @group extended
*/
def loadIndex(indexName: String, fileName: String): this.type = {
simbaSession.sessionState.indexManager.loadIndex(this.simbaSession, indexName, fileName)
this
}
private def getAttributes(keys: Array[String], attrs: Seq[Attribute] = this.queryExecution.analyzed.output)
: Array[Attribute] = {
keys.map(key => {
val temp = attrs.indexWhere(_.name == key)
if (temp >= 0) attrs(temp)
else null
})
}
@inline private def withPlan(logicalPlan: => LogicalPlan): DataFrame = {
Dataset.ofRows(simbaSession, logicalPlan)
}
}
private[simba] object Dataset {
def apply[T: Encoder](simbaSession: SimbaSession, logicalPlan: LogicalPlan): Dataset[T] = {
new Dataset(simbaSession, logicalPlan, implicitly[Encoder[T]])
}
def ofRows(simbaSession: SimbaSession, logicalPlan: LogicalPlan): DataFrame = {
val qe = simbaSession.sessionState.executePlan(logicalPlan)
qe.assertAnalyzed()
new Dataset[Row](simbaSession, qe, RowEncoder(qe.analyzed.schema))
}
}
|
InitialDLab/Simba
|
src/main/scala/org/apache/spark/sql/simba/DataSet.scala
|
Scala
|
apache-2.0
| 8,578 |
/**********************************************************************************************************************
* This file is part of Scrupal, a Scalable Reactive Web Application Framework for Content Management *
* *
* Copyright (c) 2015, Reactific Software LLC. All Rights Reserved. *
* *
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed *
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for *
* the specific language governing permissions and limitations under the License. *
**********************************************************************************************************************/
package scrupal.core.nodes
import java.net.URL
import java.time.Instant
import akka.http.scaladsl.model.{MediaTypes, MediaType}
import scalatags.Text.all._
import scrupal.api._
import scala.concurrent.Future
/** Link Node
* This node type contains a URL to a resource and generates a link to it as an HTML anchor tag
*/
case class AnchorNode(
name: String,
description : String,
url : URL,
modified : Option[Instant] = Some(Instant.now),
created : Option[Instant] = Some(Instant.now)
) extends Node {
override val mediaType : MediaType = MediaTypes.`text/html`
def apply(context: Context) : Future[Response] = Future.successful {
HtmlResponse(Html.renderContents(Seq(a(href := url.toString, description))), Successful)
}
}
|
scrupal/scrupal
|
scrupal-core/src/main/scala/scrupal/core/nodes/AnchorNode.scala
|
Scala
|
apache-2.0
| 2,433 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools.nsc
import settings.MutableSettings
/** A compatibility stub.
*/
class Settings(errorFn: String => Unit) extends MutableSettings(errorFn) {
def this() = this(Console.println)
override def withErrorFn(errorFn: String => Unit): Settings = {
val settings = new Settings(errorFn)
copyInto(settings)
settings
}
}
|
martijnhoekstra/scala
|
src/compiler/scala/tools/nsc/Settings.scala
|
Scala
|
apache-2.0
| 640 |
import language.implicitConversions
given Conversion[Int, String] = _.toString
object a:
val s: String = 1 // OK
object b:
import language.implicitConversions as _
val s: String = 2 // error
object c:
import language.implicitConversions
val s: String = 3 // OK again
|
dotty-staging/dotty
|
tests/neg-custom-args/feature-shadowing.scala
|
Scala
|
apache-2.0
| 304 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.exceptions.TestFailedException
import Matchers._
class ShouldSameInstanceAsSpec extends Spec {
object `The be a ('symbol) syntax` {
val string = "Hi"
val obj: AnyRef = string
val otherString = new String("Hi")
def `should do nothing if the object is the same instance as another object` {
string should be theSameInstanceAs (string)
obj should be theSameInstanceAs (string)
string should be theSameInstanceAs (obj)
}
def `should do nothing if the object is not the same instance as another object, when used with not` {
otherString should not { be theSameInstanceAs (string) }
otherString should not be theSameInstanceAs (string)
}
def `should do nothing if the object is the same instnace as another object, when used in a logical-and expression` {
obj should ((be theSameInstanceAs (string)) and (be theSameInstanceAs (string)))
obj should (be theSameInstanceAs (string) and (be theSameInstanceAs (string)))
obj should (be theSameInstanceAs (string) and be theSameInstanceAs (string))
}
def `should do nothing if the object is the same instance as another object, when used in a logical-or expression` {
obj should ((be theSameInstanceAs (otherString)) or (be theSameInstanceAs (string)))
obj should (be theSameInstanceAs (otherString) or (be theSameInstanceAs (string)))
obj should (be theSameInstanceAs (otherString) or be theSameInstanceAs (string))
obj should ((be theSameInstanceAs (string)) or (be theSameInstanceAs (otherString)))
obj should (be theSameInstanceAs (string) or (be theSameInstanceAs (otherString)))
obj should (be theSameInstanceAs (string) or be theSameInstanceAs (otherString))
}
def `should do nothing if the object is not the same instance as another object, when used in a logical-and expression with not` {
obj should (not (be theSameInstanceAs (otherString)) and not (be theSameInstanceAs (otherString)))
obj should ((not be theSameInstanceAs (otherString)) and (not be theSameInstanceAs (otherString)))
obj should (not be theSameInstanceAs (otherString) and not be theSameInstanceAs (otherString))
}
def `should do nothing if the object is not the same instance as another object, when used in a logical-or expression with not` {
obj should (not (be theSameInstanceAs (string)) or not (be theSameInstanceAs (otherString)))
obj should ((not be theSameInstanceAs (string)) or (not be theSameInstanceAs (otherString)))
obj should (not be theSameInstanceAs (string) or not be theSameInstanceAs (otherString))
obj should (not (be theSameInstanceAs (otherString)) or not (be theSameInstanceAs (string)))
obj should ((not be theSameInstanceAs (otherString)) or (not be theSameInstanceAs (string)))
obj should (not be theSameInstanceAs (otherString) or not be theSameInstanceAs (string))
}
def `should throw TestFailedException if the object is not the same instance as another object` {
val caught1 = intercept[TestFailedException] {
otherString should be theSameInstanceAs (string)
}
assert(caught1.getMessage === "\\"Hi\\" was not the same instance as \\"Hi\\"")
}
def `should throw TestFailedException if the object is the same instance as another object, when used with not` {
val caught1 = intercept[TestFailedException] {
obj should not { be theSameInstanceAs (string) }
}
assert(caught1.getMessage === "\\"Hi\\" was the same instance as \\"Hi\\"")
val caught2 = intercept[TestFailedException] {
obj should not be theSameInstanceAs (string)
}
assert(caught2.getMessage === "\\"Hi\\" was the same instance as \\"Hi\\"")
}
def `should throw TestFailedException if the object is not the same instance as another object, when used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
obj should ((be theSameInstanceAs (string)) and (be theSameInstanceAs (otherString)))
}
assert(caught1.getMessage === "\\"Hi\\" was the same instance as \\"Hi\\", but \\"Hi\\" was not the same instance as \\"Hi\\"")
val caught2 = intercept[TestFailedException] {
obj should (be theSameInstanceAs (string) and (be theSameInstanceAs (otherString)))
}
assert(caught2.getMessage === "\\"Hi\\" was the same instance as \\"Hi\\", but \\"Hi\\" was not the same instance as \\"Hi\\"")
val caught3 = intercept[TestFailedException] {
obj should (be theSameInstanceAs (string) and be theSameInstanceAs (otherString))
}
assert(caught3.getMessage === "\\"Hi\\" was the same instance as \\"Hi\\", but \\"Hi\\" was not the same instance as \\"Hi\\"")
}
def `should throw TestFailedException if the object is not the same instance as another object, when used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
obj should ((be theSameInstanceAs (otherString)) or (be theSameInstanceAs (otherString)))
}
assert(caught1.getMessage === "\\"Hi\\" was not the same instance as \\"Hi\\", and \\"Hi\\" was not the same instance as \\"Hi\\"")
val caught2 = intercept[TestFailedException] {
obj should (be theSameInstanceAs (otherString) or (be theSameInstanceAs (otherString)))
}
assert(caught2.getMessage === "\\"Hi\\" was not the same instance as \\"Hi\\", and \\"Hi\\" was not the same instance as \\"Hi\\"")
val caught3 = intercept[TestFailedException] {
obj should (be theSameInstanceAs (otherString) or be theSameInstanceAs (otherString))
}
assert(caught3.getMessage === "\\"Hi\\" was not the same instance as \\"Hi\\", and \\"Hi\\" was not the same instance as \\"Hi\\"")
}
def `should throw TestFailedException if the object is the same instance as another object, when used in a logical-and expression with not` {
val caught1 = intercept[TestFailedException] {
obj should (not (be theSameInstanceAs (otherString)) and not (be theSameInstanceAs (string)))
}
assert(caught1.getMessage === "\\"Hi\\" was not the same instance as \\"Hi\\", but \\"Hi\\" was the same instance as \\"Hi\\"")
val caught2 = intercept[TestFailedException] {
obj should ((not be theSameInstanceAs (otherString)) and (not be theSameInstanceAs (string)))
}
assert(caught2.getMessage === "\\"Hi\\" was not the same instance as \\"Hi\\", but \\"Hi\\" was the same instance as \\"Hi\\"")
val caught3 = intercept[TestFailedException] {
obj should (not be theSameInstanceAs (otherString) and not be theSameInstanceAs (string))
}
assert(caught3.getMessage === "\\"Hi\\" was not the same instance as \\"Hi\\", but \\"Hi\\" was the same instance as \\"Hi\\"")
// Check that the error message "short circuits"
val caught7 = intercept[TestFailedException] {
obj should (not (be theSameInstanceAs (string)) and not (be theSameInstanceAs (otherString)))
}
assert(caught7.getMessage === "\\"Hi\\" was the same instance as \\"Hi\\"")
}
def `should throw TestFailedException if the object has an appropriately named method, which returns true, when used in a logical-or expression with not` {
val caught1 = intercept[TestFailedException] {
obj should (not (be theSameInstanceAs (string)) or not (be theSameInstanceAs (string)))
}
assert(caught1.getMessage === "\\"Hi\\" was the same instance as \\"Hi\\", and \\"Hi\\" was the same instance as \\"Hi\\"")
val caught2 = intercept[TestFailedException] {
obj should ((not be theSameInstanceAs (string)) or (not be theSameInstanceAs (string)))
}
assert(caught2.getMessage === "\\"Hi\\" was the same instance as \\"Hi\\", and \\"Hi\\" was the same instance as \\"Hi\\"")
val caught3 = intercept[TestFailedException] {
obj should (not be theSameInstanceAs (string) or not be theSameInstanceAs (string))
}
assert(caught3.getMessage === "\\"Hi\\" was the same instance as \\"Hi\\", and \\"Hi\\" was the same instance as \\"Hi\\"")
}
}
}
|
travisbrown/scalatest
|
src/test/scala/org/scalatest/ShouldSameInstanceAsSpec.scala
|
Scala
|
apache-2.0
| 8,709 |
package io.cumulus.persistence.stores.orderings
import enumeratum.{Enum, EnumEntry}
import io.cumulus.persistence.query.QueryOrderingDirection.{ASC, DESC}
import io.cumulus.persistence.query.{QueryOrdering, QueryOrderingDirection, SqlOrdering}
import io.cumulus.persistence.stores.SessionStore._
import scala.collection.immutable
sealed abstract class SessionOrderingType(sql: String, direction: QueryOrderingDirection) extends EnumEntry {
def toSqlOrdering: SqlOrdering =
SqlOrdering(sql, direction)
}
object SessionOrderingType extends Enum[SessionOrderingType] {
// Note: since we use LocalDateTime, we can use the alphabetical order to sort dates
case object OrderByLastActivityAsc extends SessionOrderingType(s"$metadataField ->> 'lastActivity'", ASC)
case object OrderByLastActivityDesc extends SessionOrderingType(s"$metadataField ->> 'lastActivity'", DESC)
case object OrderBySinceAsc extends SessionOrderingType(s"$metadataField ->> 'since'", ASC)
case object OrderBySinceDesc extends SessionOrderingType(s"$metadataField ->> 'since'", DESC)
override val values: immutable.IndexedSeq[SessionOrderingType] = findValues
}
case class SessionOrdering(
orders: Seq[SessionOrderingType]
) extends QueryOrdering {
val orderings: Seq[SqlOrdering] =
orders.map(_.toSqlOrdering)
}
object SessionOrdering {
val empty: SessionOrdering =
SessionOrdering(Seq.empty)
def of(orders: SessionOrderingType*): SessionOrdering =
SessionOrdering(orders)
}
|
Cumulus-Cloud/cumulus
|
server/cumulus-core/src/main/scala/io/cumulus/persistence/stores/orderings/SessionOrdering.scala
|
Scala
|
mit
| 1,516 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.util.concurrent.TimeUnit._
import org.apache.spark.{SparkEnv, TaskContext}
import org.apache.spark.executor.TaskMetrics
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, CodeGenerator, ExprCode}
import org.apache.spark.sql.catalyst.plans.physical._
import org.apache.spark.sql.catalyst.util.DateTimeConstants.NANOS_PER_MILLIS
import org.apache.spark.sql.execution.metric.SQLMetrics
/**
* Performs (external) sorting.
*
* @param global when true performs a global sort of all partitions by shuffling the data first
* if necessary.
* @param testSpillFrequency Method for configuring periodic spilling in unit tests. If set, will
* spill every `frequency` records.
*/
case class SortExec(
sortOrder: Seq[SortOrder],
global: Boolean,
child: SparkPlan,
testSpillFrequency: Int = 0)
extends UnaryExecNode with BlockingOperatorWithCodegen {
override def output: Seq[Attribute] = child.output
override def outputOrdering: Seq[SortOrder] = sortOrder
// sort performed is local within a given partition so will retain
// child operator's partitioning
override def outputPartitioning: Partitioning = child.outputPartitioning
override def requiredChildDistribution: Seq[Distribution] =
if (global) OrderedDistribution(sortOrder) :: Nil else UnspecifiedDistribution :: Nil
private val enableRadixSort = sqlContext.conf.enableRadixSort
override lazy val metrics = Map(
"sortTime" -> SQLMetrics.createTimingMetric(sparkContext, "sort time"),
"peakMemory" -> SQLMetrics.createSizeMetric(sparkContext, "peak memory"),
"spillSize" -> SQLMetrics.createSizeMetric(sparkContext, "spill size"))
private[sql] var rowSorter: UnsafeExternalRowSorter = _
/**
* This method gets invoked only once for each SortExec instance to initialize an
* UnsafeExternalRowSorter, both `plan.execute` and code generation are using it.
* In the code generation code path, we need to call this function outside the class so we
* should make it public.
*/
def createSorter(): UnsafeExternalRowSorter = {
val ordering = newOrdering(sortOrder, output)
// The comparator for comparing prefix
val boundSortExpression = BindReferences.bindReference(sortOrder.head, output)
val prefixComparator = SortPrefixUtils.getPrefixComparator(boundSortExpression)
val canUseRadixSort = enableRadixSort && sortOrder.length == 1 &&
SortPrefixUtils.canSortFullyWithPrefix(boundSortExpression)
// The generator for prefix
val prefixExpr = SortPrefix(boundSortExpression)
val prefixProjection = UnsafeProjection.create(Seq(prefixExpr))
val prefixComputer = new UnsafeExternalRowSorter.PrefixComputer {
private val result = new UnsafeExternalRowSorter.PrefixComputer.Prefix
override def computePrefix(row: InternalRow):
UnsafeExternalRowSorter.PrefixComputer.Prefix = {
val prefix = prefixProjection.apply(row)
result.isNull = prefix.isNullAt(0)
result.value = if (result.isNull) prefixExpr.nullValue else prefix.getLong(0)
result
}
}
val pageSize = SparkEnv.get.memoryManager.pageSizeBytes
rowSorter = UnsafeExternalRowSorter.create(
schema, ordering, prefixComparator, prefixComputer, pageSize, canUseRadixSort)
if (testSpillFrequency > 0) {
rowSorter.setTestSpillFrequency(testSpillFrequency)
}
rowSorter
}
protected override def doExecute(): RDD[InternalRow] = {
val peakMemory = longMetric("peakMemory")
val spillSize = longMetric("spillSize")
val sortTime = longMetric("sortTime")
child.execute().mapPartitionsInternal { iter =>
val sorter = createSorter()
val metrics = TaskContext.get().taskMetrics()
// Remember spill data size of this task before execute this operator so that we can
// figure out how many bytes we spilled for this operator.
val spillSizeBefore = metrics.memoryBytesSpilled
val sortedIterator = sorter.sort(iter.asInstanceOf[Iterator[UnsafeRow]])
sortTime += NANOSECONDS.toMillis(sorter.getSortTimeNanos)
peakMemory += sorter.getPeakMemoryUsage
spillSize += metrics.memoryBytesSpilled - spillSizeBefore
metrics.incPeakExecutionMemory(sorter.getPeakMemoryUsage)
sortedIterator
}
}
override def usedInputs: AttributeSet = AttributeSet(Seq.empty)
override def inputRDDs(): Seq[RDD[InternalRow]] = {
child.asInstanceOf[CodegenSupport].inputRDDs()
}
// Name of sorter variable used in codegen.
private var sorterVariable: String = _
override protected def doProduce(ctx: CodegenContext): String = {
val needToSort =
ctx.addMutableState(CodeGenerator.JAVA_BOOLEAN, "needToSort", v => s"$v = true;")
// Initialize the class member variables. This includes the instance of the Sorter and
// the iterator to return sorted rows.
val thisPlan = ctx.addReferenceObj("plan", this)
// Inline mutable state since not many Sort operations in a task
sorterVariable = ctx.addMutableState(classOf[UnsafeExternalRowSorter].getName, "sorter",
v => s"$v = $thisPlan.createSorter();", forceInline = true)
val metrics = ctx.addMutableState(classOf[TaskMetrics].getName, "metrics",
v => s"$v = org.apache.spark.TaskContext.get().taskMetrics();", forceInline = true)
val sortedIterator = ctx.addMutableState("scala.collection.Iterator<UnsafeRow>", "sortedIter",
forceInline = true)
val addToSorter = ctx.freshName("addToSorter")
val addToSorterFuncName = ctx.addNewFunction(addToSorter,
s"""
| private void $addToSorter() throws java.io.IOException {
| ${child.asInstanceOf[CodegenSupport].produce(ctx, this)}
| }
""".stripMargin.trim)
val outputRow = ctx.freshName("outputRow")
val peakMemory = metricTerm(ctx, "peakMemory")
val spillSize = metricTerm(ctx, "spillSize")
val spillSizeBefore = ctx.freshName("spillSizeBefore")
val sortTime = metricTerm(ctx, "sortTime")
s"""
| if ($needToSort) {
| long $spillSizeBefore = $metrics.memoryBytesSpilled();
| $addToSorterFuncName();
| $sortedIterator = $sorterVariable.sort();
| $sortTime.add($sorterVariable.getSortTimeNanos() / $NANOS_PER_MILLIS);
| $peakMemory.add($sorterVariable.getPeakMemoryUsage());
| $spillSize.add($metrics.memoryBytesSpilled() - $spillSizeBefore);
| $metrics.incPeakExecutionMemory($sorterVariable.getPeakMemoryUsage());
| $needToSort = false;
| }
|
| while ($limitNotReachedCond $sortedIterator.hasNext()) {
| UnsafeRow $outputRow = (UnsafeRow)$sortedIterator.next();
| ${consume(ctx, null, outputRow)}
| if (shouldStop()) return;
| }
""".stripMargin.trim
}
override def doConsume(ctx: CodegenContext, input: Seq[ExprCode], row: ExprCode): String = {
s"""
|${row.code}
|$sorterVariable.insertRow((UnsafeRow)${row.value});
""".stripMargin
}
/**
* In SortExec, we overwrites cleanupResources to close UnsafeExternalRowSorter.
*/
override protected[sql] def cleanupResources(): Unit = {
if (rowSorter != null) {
// There's possible for rowSorter is null here, for example, in the scenario of empty
// iterator in the current task, the downstream physical node(like SortMergeJoinExec) will
// trigger cleanupResources before rowSorter initialized in createSorter.
rowSorter.cleanupResources()
}
super.cleanupResources()
}
}
|
caneGuy/spark
|
sql/core/src/main/scala/org/apache/spark/sql/execution/SortExec.scala
|
Scala
|
apache-2.0
| 8,574 |
/**
* Copyright 2014 Andrea Esposito <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unipi.thesis.andrea.esposito.onjag.core
/**
* Created by Andrea Esposito <[email protected]> on 27/02/14.
*
* Defines when a protocol has to be check-pointed
*/
trait CheckpointCondition {
def condition(p: Protocol, superstep: Int): Boolean
}
/**
* Checkpoints the protocol each ''interval'' supersteps
* @param interval
*/
class SuperstepIntervalCondition(interval: Int) extends CheckpointCondition {
def condition(p: Protocol, superstep: Int): Boolean = superstep % interval == 0
}
|
roy20021/ONJAG
|
src/it/unipi/thesis/andrea/esposito/onjag/core/CheckpointCondition.scala
|
Scala
|
apache-2.0
| 1,126 |
package shield.routing
import org.specs2.mutable.Specification
import shield.implicits.HttpImplicits._
import spray.http.HttpHeaders.RawHeader
import spray.http.{Uri, HttpMethods, HttpHeaders, HttpRequest}
class HttpImplicitsSpec extends Specification {
"HttpImplicits" should {
"Use the latest X-Forwarded-For header" in {
val request = HttpRequest().withHeaders(
HttpHeaders.`X-Forwarded-For`("1.1.1.1"),
HttpHeaders.`Remote-Address`("2.2.2.2")
)
getClientAddress(request.withTrustXForwardedFor(1)) must be equalTo "1.1.1.1"
}
"Handle X-Forwarded-For headers are not formed properly" in {
val request = HttpRequest().withHeaders(
RawHeader("X-Forwarded-For", "1111"),
HttpHeaders.`Remote-Address`("2.2.2.2")
)
getClientAddress(request.withTrustXForwardedFor(1)) must be equalTo "1111"
}
"Handle Remote-Address headers are not formed properly" in {
val request = HttpRequest().withHeaders(
RawHeader("X-Forwarded-For", "1111"),
RawHeader("Remote-Address", "2222")
)
getClientAddress(request.withTrustXForwardedFor(1)) must be equalTo "1111"
}
"Use \\"Remote-Address\\" if no \\"X-Forwarded-For\\" is found" in {
val request = HttpRequest().withHeaders(
HttpHeaders.`Remote-Address`("2.2.2.2")
)
getClientAddress(request.withTrustXForwardedFor(1)) must be equalTo "2.2.2.2"
}
"Use \\"Remote-Address\\" header if trustProxies is set to 0" in {
val request = HttpRequest().withHeaders(
HttpHeaders.`X-Forwarded-For`("1.1.1.1"),
HttpHeaders.`Remote-Address`("2.2.2.2")
)
getClientAddress(request.withTrustXForwardedFor(0)) must be equalTo "2.2.2.2"
}
"Default to \\"127.0.0.1\\" if neither \\"X-Forwarded-For\\" and \\"Remote-Address\\" are not present" in {
val request = HttpRequest()
getClientAddress(request.withTrustXForwardedFor(0)) must be equalTo "127.0.0.1"
}
"Use previous proxies located in the \\"X-Forwarded-For\\" header if the trustProxies is set to" in {
val request = HttpRequest().withHeaders(
RawHeader("X-Forwarded-For", "1.1.1.1,2.2.2.2,3.3.3.3,4.4.4.4"),
HttpHeaders.`Remote-Address`("7.7.7.7")
)
getClientAddress(request.withTrustXForwardedFor(2)) must be equalTo "3.3.3.3"
}
"Using a negative trustProxie value results in an exception" in {
val request = HttpRequest().withHeaders(
HttpHeaders.`X-Forwarded-For`("1.1.1.1"),
HttpHeaders.`Remote-Address`("2.2.2.2")
)
getClientAddress(request.withTrustXForwardedFor(-1)) must throwA[Exception]
}
"Use the last \\"X-Forwarded-For\\" value if an input is larger than the list" in {
val request = HttpRequest().withHeaders(
RawHeader("X-Forwarded-For", "1111,2.2.2.2,3333,4.4.4.4"),
HttpHeaders.`Remote-Address`("7.7.7.7")
)
getClientAddress(request.withTrustXForwardedFor(45)) must be equalTo "1111"
}
"Rewrite a path using the X-Forwarded-Proto header" in {
val request = HttpRequest(HttpMethods.GET, Uri("http://example.com/test")).withHeaders(
RawHeader("X-Forwarded-For", "1111,2.2.2.2,3333,4.4.4.4"),
HttpHeaders.`Remote-Address`("7.7.7.7"),
RawHeader("X-Forwarded-Proto", "https")
)
request.withTrustXForwardedProto(true).uri must be equalTo "https://example.com/test"
}
"Leave the URI untouched if trust-x-forwarded-proto is false" in {
val request = HttpRequest(HttpMethods.GET, Uri("http://example.com/test")).withHeaders(
RawHeader("X-Forwarded-For", "1111,2.2.2.2,3333,4.4.4.4"),
HttpHeaders.`Remote-Address`("7.7.7.7"),
RawHeader("X-Forwarded-Proto", "https")
)
request.withTrustXForwardedProto(false).uri must be equalTo "http://example.com/test"
}
"Use original request if an invalid \\"X-Forwarded-Proto\\" header is received" in {
val request = HttpRequest(HttpMethods.GET, Uri("http://example.com/test")).withHeaders(
RawHeader("X-Forwarded-For", "1111,2.2.2.2,3333,4.4.4.4"),
HttpHeaders.`Remote-Address`("7.7.7.7"),
RawHeader("X-Forwarded-Proto", "3")
)
request.withTrustXForwardedProto(true).uri must be equalTo "http://example.com/test"
}
val uris = Set[String](
"http://example.com/test.json",
"http://example.com/test.xml",
"http://example.com/test.",
"http://example.com/test.kkkkkkkwwwwwwwwnnnnnnnnlskdfj",
"http://example.com/test"
)
import shield.implicits.StringImplicits._
val extensions = Set[String](
"json",
"xml",
"kkkkkkkwwwwwwwwnnnnnnnnlskdfj",
"ext",
"",
"txt",
"php"
).map(t => t.mustStartWith("."))
for(uri <- uris) {
s"Correctly strip extensions from request $uri" in {
HttpRequest(HttpMethods.GET, Uri(uri)).withStrippedExtensions(extensions).uri must be equalTo "http://example.com/test"
HttpRequest(HttpMethods.GET, Uri(uri)).withStrippedExtensions(Set[String]()).uri must be equalTo uri
}
}
"Not remove segments from a path, only extensions" in {
HttpRequest(HttpMethods.GET, Uri("http://example.com/test.foo.bar/fizz/buzz")).withStrippedExtensions(extensions).uri must be equalTo "http://example.com/test.foo.bar/fizz/buzz"
HttpRequest(HttpMethods.GET, Uri("http://example.com/test.foo.bar/fizz/buzz.ext")).withStrippedExtensions(extensions).uri must be equalTo "http://example.com/test.foo.bar/fizz/buzz"
HttpRequest(HttpMethods.GET, Uri("http://example.com/test.foo.bar/fizz/buzz.")).withStrippedExtensions(extensions).uri must be equalTo "http://example.com/test.foo.bar/fizz/buzz"
}
"Not remove file extensions from query parameters" in {
HttpRequest(HttpMethods.GET, Uri("http://example.com/handler?file=foo.txt")).withStrippedExtensions(extensions).uri must be equalTo "http://example.com/handler?file=foo.txt"
HttpRequest(HttpMethods.GET, Uri("http://example.com/handler.php?file=foo.txt")).withStrippedExtensions(extensions).uri must be equalTo "http://example.com/handler?file=foo.txt"
}
"Keep remote-address unchanged" in {
val request = HttpRequest().withHeaders(
HttpHeaders.`X-Forwarded-For`("1.1.1.1"),
HttpHeaders.`Remote-Address`("2.2.2.2")
)
getRemoteAddress(request.withTrustXForwardedFor(1)) must be equalTo "2.2.2.2"
}
}
def getHeader(request: HttpRequest, header: String) : Option[String] = {
request.headers.find(_.lowercaseName == header).map(_.value)
}
def getRemoteAddress(request: HttpRequest) : String = {
getHeader(request, "remote-address").get
}
def getClientAddress(request: HttpRequest) : String = {
getHeader(request, "client-address").get
}
}
|
RetailMeNot/shield
|
src/test/scala/shield/routing/HttpImplicitsSpec.scala
|
Scala
|
mit
| 6,838 |
import scala.quoted._
import scala.quoted.staging._
object Test {
def main(args: Array[String]): Unit = {
given Toolbox = Toolbox.make(getClass.getClassLoader)
withQuoteContext('[List])
def list(using QuoteContext) = bound('{List(1, 2, 3)})
println(withQuoteContext(list.show))
println(run(list))
def opt(using QuoteContext) = bound('{Option(4)})
println(withQuoteContext(opt.show))
println(run(opt))
def map(using QuoteContext) = bound('{Map(4 -> 1)})
println(withQuoteContext(map.show))
println(run(map))
}
def bound[T: Type, S[_]: Type](x: Expr[S[T]])(using QuoteContext): Expr[S[T]] = '{
val y = $x
y
}
}
|
som-snytt/dotty
|
tests/run-staging/i5965b.scala
|
Scala
|
apache-2.0
| 675 |
package io.vamp.model.notification
import java.time.OffsetDateTime
import io.vamp.common.notification.Notification
import io.vamp.model.artifact.{ Deployment, DeploymentCluster }
import io.vamp.model.event.Event
object SlaEvent {
def slaTags(deployment: Deployment, cluster: DeploymentCluster): Set[String] = {
("sla" :: s"deployment${Event.tagDelimiter}${deployment.name}" :: s"cluster${Event.tagDelimiter}${cluster.name}" :: Nil).toSet
}
}
trait SlaEvent {
def deployment: Deployment
def cluster: DeploymentCluster
def timestamp: OffsetDateTime
def value: AnyRef = None
def tags: Set[String] = Set()
}
object Escalate {
def tags: Set[String] = Set(s"sla${Event.tagDelimiter}escalate")
}
case class Escalate(deployment: Deployment, cluster: DeploymentCluster, timestamp: OffsetDateTime = OffsetDateTime.now()) extends Notification with SlaEvent {
override def tags: Set[String] = Escalate.tags ++ SlaEvent.slaTags(deployment, cluster)
}
object DeEscalate {
def tags: Set[String] = Set(s"sla${Event.tagDelimiter}deescalate")
}
case class DeEscalate(deployment: Deployment, cluster: DeploymentCluster, timestamp: OffsetDateTime = OffsetDateTime.now()) extends Notification with SlaEvent {
override def tags: Set[String] = DeEscalate.tags ++ SlaEvent.slaTags(deployment, cluster)
}
|
magneticio/vamp
|
model/src/main/scala/io/vamp/model/notification/SlaEvent.scala
|
Scala
|
apache-2.0
| 1,318 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.io._
import java.nio.ByteBuffer
import java.util.UUID
import java.util.concurrent.Semaphore
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{mock, times, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.scalatest.PrivateMethodTester
import org.apache.spark.{SparkFunSuite, TaskContext}
import org.apache.spark.network._
import org.apache.spark.network.buffer.{FileSegmentManagedBuffer, ManagedBuffer}
import org.apache.spark.network.shuffle.{BlockFetchingListener, DownloadFileManager}
import org.apache.spark.network.util.LimitedInputStream
import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.util.Utils
class ShuffleBlockFetcherIteratorSuite extends SparkFunSuite with PrivateMethodTester {
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
// Some of the tests are quite tricky because we are testing the cleanup behavior
// in the presence of faults.
/** Creates a mock [[BlockTransferService]] that returns data from the given map. */
private def createMockTransfer(data: Map[BlockId, ManagedBuffer]): BlockTransferService = {
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any())).thenAnswer(
(invocation: InvocationOnMock) => {
val blocks = invocation.getArguments()(3).asInstanceOf[Array[String]]
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
for (blockId <- blocks) {
if (data.contains(BlockId(blockId))) {
listener.onBlockFetchSuccess(blockId, data(BlockId(blockId)))
} else {
listener.onBlockFetchFailure(blockId, new BlockNotFoundException(blockId))
}
}
})
transfer
}
// Create a mock managed buffer for testing
def createMockManagedBuffer(size: Int = 1): ManagedBuffer = {
val mockManagedBuffer = mock(classOf[ManagedBuffer])
val in = mock(classOf[InputStream])
when(in.read(any())).thenReturn(1)
when(in.read(any(), any(), any())).thenReturn(1)
when(mockManagedBuffer.createInputStream()).thenReturn(in)
when(mockManagedBuffer.size()).thenReturn(size)
mockManagedBuffer
}
test("successful 3 local reads + 2 remote reads") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure blockManager.getBlockData would return the blocks
val localBlocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer())
localBlocks.foreach { case (blockId, buf) =>
doReturn(buf).when(blockManager).getBlockData(meq(blockId))
}
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val remoteBlocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 3, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 4, 0) -> createMockManagedBuffer())
val transfer = createMockTransfer(remoteBlocks)
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(localBmId, localBlocks.keys.map(blockId => (blockId, 1.asInstanceOf[Long])).toSeq),
(remoteBmId, remoteBlocks.keys.map(blockId => (blockId, 1.asInstanceOf[Long])).toSeq)
).toIterator
val taskContext = TaskContext.empty()
val metrics = taskContext.taskMetrics.createTempShuffleReadMetrics()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => in,
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
metrics)
// 3 local blocks fetched in initialization
verify(blockManager, times(3)).getBlockData(any())
for (i <- 0 until 5) {
assert(iterator.hasNext, s"iterator should have 5 elements but actually has $i elements")
val (blockId, inputStream) = iterator.next()
// Make sure we release buffers when a wrapped input stream is closed.
val mockBuf = localBlocks.getOrElse(blockId, remoteBlocks(blockId))
// Note: ShuffleBlockFetcherIterator wraps input streams in a BufferReleasingInputStream
val wrappedInputStream = inputStream.asInstanceOf[BufferReleasingInputStream]
verify(mockBuf, times(0)).release()
val delegateAccess = PrivateMethod[InputStream]('delegate)
verify(wrappedInputStream.invokePrivate(delegateAccess()), times(0)).close()
wrappedInputStream.close()
verify(mockBuf, times(1)).release()
verify(wrappedInputStream.invokePrivate(delegateAccess()), times(1)).close()
wrappedInputStream.close() // close should be idempotent
verify(mockBuf, times(1)).release()
verify(wrappedInputStream.invokePrivate(delegateAccess()), times(1)).close()
}
// 3 local blocks, and 2 remote blocks
// (but from the same block manager so one call to fetchBlocks)
verify(blockManager, times(3)).getBlockData(any())
verify(transfer, times(1)).fetchBlocks(any(), any(), any(), any(), any(), any())
}
test("release current unexhausted buffer in case the task completes early") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer())
// Semaphore to coordinate event sequence in two different threads.
val sem = new Semaphore(0)
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first two blocks, and wait till task completion before returning the 3rd one
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, blocks(ShuffleBlockId(0, 0, 0)))
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 1, 0).toString, blocks(ShuffleBlockId(0, 1, 0)))
sem.acquire()
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 2, 0).toString, blocks(ShuffleBlockId(0, 2, 0)))
}
})
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1.asInstanceOf[Long])).toSeq)).toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => in,
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics())
verify(blocks(ShuffleBlockId(0, 0, 0)), times(0)).release()
iterator.next()._2.close() // close() first block's input stream
verify(blocks(ShuffleBlockId(0, 0, 0)), times(1)).release()
// Get the 2nd block but do not exhaust the iterator
val subIter = iterator.next()._2
// Complete the task; then the 2nd block buffer should be exhausted
verify(blocks(ShuffleBlockId(0, 1, 0)), times(0)).release()
taskContext.markTaskCompleted(None)
verify(blocks(ShuffleBlockId(0, 1, 0)), times(1)).release()
// The 3rd block should not be retained because the iterator is already in zombie state
sem.release()
verify(blocks(ShuffleBlockId(0, 2, 0)), times(0)).retain()
verify(blocks(ShuffleBlockId(0, 2, 0)), times(0)).release()
}
test("iterator is all consumed if task completes early") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer())
// Semaphore to coordinate event sequence in two different threads.
val sem = new Semaphore(0)
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first two blocks, and wait till task completion before returning the last
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, blocks(ShuffleBlockId(0, 0, 0)))
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 1, 0).toString, blocks(ShuffleBlockId(0, 1, 0)))
sem.acquire()
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 2, 0).toString, blocks(ShuffleBlockId(0, 2, 0)))
}
})
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1.asInstanceOf[Long])).toSeq)).toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => in,
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics())
assert(iterator.hasNext)
iterator.next()
taskContext.markTaskCompleted(None)
sem.release()
assert(iterator.hasNext === false)
}
test("fail all blocks if any of the remote request fails") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer()
)
// Semaphore to coordinate event sequence in two different threads.
val sem = new Semaphore(0)
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first block, and then fail.
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, blocks(ShuffleBlockId(0, 0, 0)))
listener.onBlockFetchFailure(
ShuffleBlockId(0, 1, 0).toString, new BlockNotFoundException("blah"))
listener.onBlockFetchFailure(
ShuffleBlockId(0, 2, 0).toString, new BlockNotFoundException("blah"))
sem.release()
}
})
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1.asInstanceOf[Long])).toSeq)).toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => in,
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics())
// Continue only after the mock calls onBlockFetchFailure
sem.acquire()
// The first block should be returned without an exception, and the last two should throw
// FetchFailedExceptions (due to failure)
iterator.next()
intercept[FetchFailedException] { iterator.next() }
intercept[FetchFailedException] { iterator.next() }
}
private def mockCorruptBuffer(size: Long = 1L, corruptAt: Int = 0): ManagedBuffer = {
val corruptStream = new CorruptStream(corruptAt)
val corruptBuffer = mock(classOf[ManagedBuffer])
when(corruptBuffer.size()).thenReturn(size)
when(corruptBuffer.createInputStream()).thenReturn(corruptStream)
corruptBuffer
}
private class CorruptStream(corruptAt: Long = 0L) extends InputStream {
var pos = 0
var closed = false
override def read(): Int = {
if (pos >= corruptAt) {
throw new IOException("corrupt")
} else {
pos += 1
pos
}
}
override def read(dest: Array[Byte], off: Int, len: Int): Int = {
super.read(dest, off, len)
}
override def close(): Unit = { closed = true }
}
test("retry corrupt blocks") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer()
)
// Semaphore to coordinate event sequence in two different threads.
val sem = new Semaphore(0)
val corruptLocalBuffer = new FileSegmentManagedBuffer(null, new File("a"), 0, 100)
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first block, and then fail.
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, blocks(ShuffleBlockId(0, 0, 0)))
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 1, 0).toString, mockCorruptBuffer())
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 2, 0).toString, corruptLocalBuffer)
sem.release()
}
})
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1.asInstanceOf[Long])).toSeq)).toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => new LimitedInputStream(in, 100),
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
true,
taskContext.taskMetrics.createTempShuffleReadMetrics())
// Continue only after the mock calls onBlockFetchFailure
sem.acquire()
// The first block should be returned without an exception
val (id1, _) = iterator.next()
assert(id1 === ShuffleBlockId(0, 0, 0))
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first block, and then fail.
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 1, 0).toString, mockCorruptBuffer())
sem.release()
}
})
// The next block is corrupt local block (the second one is corrupt and retried)
intercept[FetchFailedException] { iterator.next() }
sem.acquire()
intercept[FetchFailedException] { iterator.next() }
}
test("big blocks are also checked for corruption") {
val streamLength = 10000L
val blockManager = mock(classOf[BlockManager])
val localBlockManagerId = BlockManagerId("local-client", "local-client", 1)
doReturn(localBlockManagerId).when(blockManager).blockManagerId
// This stream will throw IOException when the first byte is read
val corruptBuffer1 = mockCorruptBuffer(streamLength, 0)
val blockManagerId1 = BlockManagerId("remote-client-1", "remote-client-1", 1)
val shuffleBlockId1 = ShuffleBlockId(0, 1, 0)
val blockLengths1 = Seq[Tuple2[BlockId, Long]](
shuffleBlockId1 -> corruptBuffer1.size()
)
val streamNotCorruptTill = 8 * 1024
// This stream will throw exception after streamNotCorruptTill bytes are read
val corruptBuffer2 = mockCorruptBuffer(streamLength, streamNotCorruptTill)
val blockManagerId2 = BlockManagerId("remote-client-2", "remote-client-2", 2)
val shuffleBlockId2 = ShuffleBlockId(0, 2, 0)
val blockLengths2 = Seq[Tuple2[BlockId, Long]](
shuffleBlockId2 -> corruptBuffer2.size()
)
val transfer = createMockTransfer(
Map(shuffleBlockId1 -> corruptBuffer1, shuffleBlockId2 -> corruptBuffer2))
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(blockManagerId1, blockLengths1),
(blockManagerId2, blockLengths2)
).toIterator
val taskContext = TaskContext.empty()
val maxBytesInFlight = 3 * 1024
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => new LimitedInputStream(in, streamLength),
maxBytesInFlight,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
true,
taskContext.taskMetrics.createTempShuffleReadMetrics())
// We'll get back the block which has corruption after maxBytesInFlight/3 because the other
// block will detect corruption on first fetch, and then get added to the queue again for
// a retry
val (id, st) = iterator.next()
assert(id === shuffleBlockId2)
// The other block will throw a FetchFailedException
intercept[FetchFailedException] {
iterator.next()
}
// Following will succeed as it reads part of the stream which is not corrupt. This will read
// maxBytesInFlight/3 bytes from the portion copied into memory, and remaining from the
// underlying stream
new DataInputStream(st).readFully(
new Array[Byte](streamNotCorruptTill), 0, streamNotCorruptTill)
// Following will fail as it reads the remaining part of the stream which is corrupt
intercept[FetchFailedException] { st.read() }
// Buffers are mocked and they return the original input corrupt streams
assert(corruptBuffer1.createInputStream().asInstanceOf[CorruptStream].closed)
assert(corruptBuffer2.createInputStream().asInstanceOf[CorruptStream].closed)
}
test("ensure big blocks available as a concatenated stream can be read") {
val tmpDir = Utils.createTempDir()
val tmpFile = new File(tmpDir, "someFile.txt")
val os = new FileOutputStream(tmpFile)
val buf = ByteBuffer.allocate(10000)
for (i <- 1 to 2500) {
buf.putInt(i)
}
os.write(buf.array())
os.close()
val managedBuffer = new FileSegmentManagedBuffer(null, tmpFile, 0, 10000)
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
doReturn(managedBuffer).when(blockManager).getBlockData(ShuffleBlockId(0, 0, 0))
val localBlockLengths = Seq[Tuple2[BlockId, Long]](
ShuffleBlockId(0, 0, 0) -> 10000
)
val transfer = createMockTransfer(Map(ShuffleBlockId(0, 0, 0) -> managedBuffer))
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(localBmId, localBlockLengths)
).toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => new LimitedInputStream(in, 10000),
2048,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
true,
taskContext.taskMetrics.createTempShuffleReadMetrics())
val (id, st) = iterator.next()
// Check that the test setup is correct -- make sure we have a concatenated stream.
assert (st.asInstanceOf[BufferReleasingInputStream].delegate.isInstanceOf[SequenceInputStream])
val dst = new DataInputStream(st)
for (i <- 1 to 2500) {
assert(i === dst.readInt())
}
assert(dst.read() === -1)
dst.close()
}
test("retry corrupt blocks (disabled)") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer()
)
// Semaphore to coordinate event sequence in two different threads.
val sem = new Semaphore(0)
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first block, and then fail.
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, blocks(ShuffleBlockId(0, 0, 0)))
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 1, 0).toString, mockCorruptBuffer())
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 2, 0).toString, mockCorruptBuffer())
sem.release()
}
})
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1.asInstanceOf[Long])).toSeq)).toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => new LimitedInputStream(in, 100),
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics())
// Continue only after the mock calls onBlockFetchFailure
sem.acquire()
// The first block should be returned without an exception
val (id1, _) = iterator.next()
assert(id1 === ShuffleBlockId(0, 0, 0))
val (id2, _) = iterator.next()
assert(id2 === ShuffleBlockId(0, 1, 0))
val (id3, _) = iterator.next()
assert(id3 === ShuffleBlockId(0, 2, 0))
}
test("Blocks should be shuffled to disk when size of the request is above the" +
" threshold(maxReqSizeShuffleToMem).") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
val diskBlockManager = mock(classOf[DiskBlockManager])
val tmpDir = Utils.createTempDir()
doReturn{
val blockId = TempLocalBlockId(UUID.randomUUID())
(blockId, new File(tmpDir, blockId.name))
}.when(diskBlockManager).createTempLocalBlock()
doReturn(diskBlockManager).when(blockManager).diskBlockManager
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val remoteBlocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer())
val transfer = mock(classOf[BlockTransferService])
var tempFileManager: DownloadFileManager = null
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
tempFileManager = invocation.getArguments()(5).asInstanceOf[DownloadFileManager]
Future {
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, remoteBlocks(ShuffleBlockId(0, 0, 0)))
}
})
def fetchShuffleBlock(
blocksByAddress: Iterator[(BlockManagerId, Seq[(BlockId, Long)])]): Unit = {
// Set `maxBytesInFlight` and `maxReqsInFlight` to `Int.MaxValue`, so that during the
// construction of `ShuffleBlockFetcherIterator`, all requests to fetch remote shuffle blocks
// are issued. The `maxReqSizeShuffleToMem` is hard-coded as 200 here.
val taskContext = TaskContext.empty()
new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => in,
maxBytesInFlight = Int.MaxValue,
maxReqsInFlight = Int.MaxValue,
maxBlocksInFlightPerAddress = Int.MaxValue,
maxReqSizeShuffleToMem = 200,
detectCorrupt = true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics())
}
val blocksByAddress1 = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(remoteBmId, remoteBlocks.keys.map(blockId => (blockId, 100L)).toSeq)).toIterator
fetchShuffleBlock(blocksByAddress1)
// `maxReqSizeShuffleToMem` is 200, which is greater than the block size 100, so don't fetch
// shuffle block to disk.
assert(tempFileManager == null)
val blocksByAddress2 = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(remoteBmId, remoteBlocks.keys.map(blockId => (blockId, 300L)).toSeq)).toIterator
fetchShuffleBlock(blocksByAddress2)
// `maxReqSizeShuffleToMem` is 200, which is smaller than the block size 300, so fetch
// shuffle block to disk.
assert(tempFileManager != null)
}
test("fail zero-size blocks") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer()
)
val transfer = createMockTransfer(blocks.mapValues(_ => createMockManagedBuffer(0)))
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1.asInstanceOf[Long])).toSeq))
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress.toIterator,
(_, in) => in,
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics())
// All blocks fetched return zero length and should trigger a receive-side error:
val e = intercept[FetchFailedException] { iterator.next() }
assert(e.getMessage.contains("Received a zero-size buffer"))
}
}
|
aosagie/spark
|
core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
|
Scala
|
apache-2.0
| 28,621 |
package com.eevolution.context.dictionary.domain.model
import ai.x.play.json.Jsonx
import com.eevolution.context.dictionary.api.{ActiveEnabled, DomainModel, Identifiable, Traceable}
import org.joda.time.DateTime
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/e-Evolution
* Created by [email protected] , www.e-evolution.com
*/
/**
* Browse Entity
* @param browseId Browse ID
* @param tenantId Tenant ID
* @param organizationId Organization ID
* @param created Created
* @param createdBy Created By
* @param isActive Is Active
* @param updated Updated
* @param updatedBy Updated By
* @param description Description
* @param entityType Entity Type
* @param help Help
* @param name Name
* @param value Value
* @param viewId View ID
* @param whereClause Where Clause
* @param processing Processing
* @param processId Process ID
* @param isBetaFunctionality Is Beta Functionality
* @param accessLevel Access Level
* @param copyFrom Copy From
* @param isDeleteable Is Deleteable
* @param isCollapsibleByDefault Is Collapsible By Default
* @param isSelectedByDefault Is Selected By Default
* @param isExecutedQueryByDefault Is Executed Query By Default
* @param windowId Window ID
* @param isShowTotal Is Show Total
* @param uuid UUID
*/
case class Browse(browseId: Int,
tenantId: Int,
organizationId: Int,
created: DateTime = DateTime.now,
createdBy: Int,
isActive: Boolean = true,
updated: DateTime = DateTime.now,
updatedBy: Int,
description: Option[String],
entityType: String,
help: Option[String],
name: String,
value: Option[String],
viewId: Int,
whereClause:Option[String],
processing: Option[Boolean],
processId:Option[Int],
isBetaFunctionality: Boolean=false,
accessLevel: Int=4,
copyFrom: Option[Boolean],
isDeleteable: Boolean=false,
isCollapsibleByDefault: Boolean = false,
isSelectedByDefault: Boolean = false,
isExecutedQueryByDefault: Boolean = false,
windowId: Option[Int],
isShowTotal: Boolean = false,
uuid: String
) extends DomainModel
with ActiveEnabled
with Identifiable
with Traceable {
override type ActiveEnabled = this.type
override type Identifiable = this.type
override type Traceable = this.type
override def Id: Int = browseId
override val entityName: String = "AD_Browse"
override val identifier: String = "AD_Browse_ID"
}
object Browse {
implicit lazy val jsonFormat = Jsonx.formatCaseClass[Browse]
def create(browseId: Int,
tenantId: Int,
organizationId: Int,
created: DateTime,
createdBy: Int,
isActive: Boolean,
updated: DateTime,
updatedBy: Int,
description: String,
entityType: String,
help: String,
name: String,
value: String,
viewId: Int,
whereClause:String,
processing: Boolean,
processId: Int,
isBetaFunctionality: Boolean,
accessLevel: Int,
copyFrom: Boolean,
isDeleteable: Boolean,
isCollapsibleByDefault: Boolean,
isSelectedByDefault: Boolean,
isExecutedQueryByDefault: Boolean,
windowId: Int,
isShowTotal: Boolean,
uuid: String) = Browse(browseId, tenantId, organizationId, created, createdBy, isActive, updated,
updatedBy, None, entityType, None , name, None, viewId, None, None,
None, isBetaFunctionality, accessLevel, None, isDeleteable, isCollapsibleByDefault,
isSelectedByDefault, isExecutedQueryByDefault, None, isShowTotal, uuid)
}
|
adempiere/ADReactiveSystem
|
dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/model/Browse.scala
|
Scala
|
gpl-3.0
| 4,976 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.tools.data.downloader.consumer
sealed trait ConsumeState
case class SuccessState(fails: Int) extends ConsumeState // normal state
case class LightFailure(fails: Int, successesInRow: Int) extends ConsumeState // a few errors
case class HeavyFailure(successesInRow: Int) extends ConsumeState // a lot of errors
/**
* State machine of consume states
*
* @see [[ConsumeState]]
*/
object ConsumeStateHandler {
val successToLightFailureThreshold = 2
val lightToHeavyFailureThreshold = 2
val lightFailureToSuccessThreshold = 15
val heavyFailureToLightFailureThreshold = 15
/**
* Determines the next consume state after a failure event
* @param state current consume state
* @return next consume state after failure event
*/
def nextFailure(state: ConsumeState) = state match {
case SuccessState(fails) if fails == successToLightFailureThreshold => LightFailure(0, 0)
case SuccessState(fails) => SuccessState(fails = fails + 1) // still in success
case LightFailure(fails, successesInRow) if fails == lightToHeavyFailureThreshold => HeavyFailure(0)
case LightFailure(fails, successesInRow) => LightFailure(fails = fails + 1, successesInRow = 0)
case HeavyFailure(successesInRow) => HeavyFailure(successesInRow = 0)
}
/**
* Determines the next consume state after a success event
* @param state current consume state
* @return next consume state after success event
*/
def nextSuccess(state: ConsumeState) = state match {
case s@SuccessState(_) => s
case s@LightFailure(fails, successesInRow) if successesInRow == lightFailureToSuccessThreshold => SuccessState(0)
case s@LightFailure(fails, successesInRow) => LightFailure(fails = 0, successesInRow = successesInRow + 1)
case HeavyFailure(successesInRow) if successesInRow == heavyFailureToLightFailureThreshold => LightFailure(0, 0)
case HeavyFailure(successesInRow) => HeavyFailure(successesInRow = successesInRow + 1)
}
}
|
nruppin/CM-Well
|
server/cmwell-data-tools/src/main/scala/cmwell/tools/data/downloader/consumer/ConsumeStateHandler.scala
|
Scala
|
apache-2.0
| 2,653 |
/*
* Copyright (c) 2021, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
package com.krux.hyperion.expression
case class ParameterFields(
id: String,
description: Option[String] = None
)(implicit val pv: ParameterValues)
|
realstraw/hyperion
|
core/src/main/scala/com/krux/hyperion/expression/ParameterFields.scala
|
Scala
|
bsd-3-clause
| 382 |
package models.daos.traits.core
import play.api.libs.json.JsValue
import scala.concurrent.Future
import scala.util.Try
import play.api.libs.json.OWrites
import play.modules.reactivemongo.json.collection.JSONCollection
import play.api.libs.json.Reads
import play.api.libs.json.JsObject
import play.api.libs.json.Writes
import play.api.libs.json.Json
trait CrudDAO[T] {
def create(entity: T)(implicit tjs: OWrites[T]): Future[Try[String]]
def read(id: String)(implicit readsT : Reads[T]): Future[Option[T]]
def readsText(selector: JsObject)(implicit readsT: Reads[T]): Future[List[T]]
def delete(id: String): Future[Try[Unit]]
def update(id: String, updates: JsValue): Future[Try[Unit]]
def findAll()(implicit readsT : Reads[T]): Future[List[T]]
def findProjection(selector: JsObject, projection: JsObject)(implicit readsT: Reads[T]): Future[List[T]]
def findSimpleProjection(selector: JsObject, projection: JsObject)(implicit readsT: Reads[T]): Future[Option[T]]
}
|
carlosFattor/DoceTentacaoScala
|
app/models/daos/traits/core/CrudDAO.scala
|
Scala
|
apache-2.0
| 1,000 |
package fr.hmil.roshttp
import scala.scalajs.js
private object CrossPlatformUtils {
def encodeURIComponent(query: String): String =
js.URIUtils.encodeURIComponent(query)
def decodeURIComponent(query: String): String =
js.URIUtils.decodeURIComponent(query)
}
|
hmil/RosHTTP
|
js/src/main/scala/fr/hmil/roshttp/CrossPlatformUtils.scala
|
Scala
|
mit
| 275 |
trait ValueDiscard[@specialized U] {
def u: U
}
/* Was:
scalac-hash v2.11.5 -Ywarn-value-discard test/files/pos/t9020.scala
test/files/pos/t9020.scala:2: warning: discarded non-Unit value
def u: U
^
one warning found
*/
trait DiscardThis {
import collection.mutable.ListBuffer
val b = ListBuffer.empty[String]
def add(s: String): Unit = b += s
}
|
loskutov/intellij-scala
|
testdata/scalacTests/pos/t9020.scala
|
Scala
|
apache-2.0
| 363 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package integration.security
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.testkit.{ImplicitSender, TestKit}
import org.apache.toree.kernel.protocol.v5._
import org.apache.toree.communication.security.{Hmac, SignatureCheckerActor}
import com.typesafe.config.ConfigFactory
import org.scalatest.{BeforeAndAfter, FunSpecLike, Matchers}
import play.api.libs.json.Json
object SignatureCheckerActorSpecForIntegration {
val config = """
akka {
loglevel = "WARNING"
}"""
}
class SignatureCheckerActorSpecForIntegration extends TestKit(
ActorSystem(
"SignatureCheckerActorSpec",
ConfigFactory.parseString(SignatureCheckerActorSpecForIntegration.config)
)
) with ImplicitSender with FunSpecLike with Matchers with BeforeAndAfter
{
private val sigKey = "12345"
private val signature =
"1c4859a7606fd93eb5f73c3d9642f9bc860453ba42063961a00d02ed820147b5"
private val goodMessage =
KernelMessage(
null, signature,
Header("a", "b", "c", "d", "e"),
ParentHeader("f", "g", "h", "i", "j"),
Metadata(),
"<STRING>"
)
private val badMessage =
KernelMessage(
null, "wrong signature",
Header("a", "b", "c", "d", "e"),
ParentHeader("f", "g", "h", "i", "j"),
Metadata(),
"<STRING>"
)
private var signatureChecker: ActorRef = _
before {
val hmac = Hmac(sigKey)
signatureChecker =
system.actorOf(Props(classOf[SignatureCheckerActor], hmac))
}
after {
signatureChecker = null
}
describe("SignatureCheckerActor") {
describe("#receive") {
it("should return true if the kernel message is valid") {
val blob =
Json.stringify(Json.toJson(goodMessage.header)) ::
Json.stringify(Json.toJson(goodMessage.parentHeader)) ::
Json.stringify(Json.toJson(goodMessage.metadata)) ::
goodMessage.contentString ::
Nil
signatureChecker ! ((goodMessage.signature, blob))
expectMsg(true)
}
it("should return false if the kernel message is invalid") {
val blob =
Json.stringify(Json.toJson(badMessage.header)) ::
Json.stringify(Json.toJson(badMessage.parentHeader)) ::
Json.stringify(Json.toJson(badMessage.metadata)) ::
badMessage.contentString ::
Nil
signatureChecker ! ((badMessage.signature, blob))
expectMsg(false)
}
}
}
}
|
Myllyenko/incubator-toree
|
communication/src/test/scala/integration/security/SignatureCheckerActorSpecForIntegration.scala
|
Scala
|
apache-2.0
| 3,243 |
import org.specs2.mutable._
import org.specs2.runner._
import org.junit.runner._
import play.api.test._
import play.api.test.Helpers._
/**
* add your integration spec here.
* An integration test will fire up a whole play application in a real (or headless) browser
*/
@RunWith(classOf[JUnitRunner])
class IntegrationSpec extends Specification {
"Application" should {
"work from within a browser" in new WithBrowser {
browser.goTo("http://localhost:" + port)
browser.pageSource must contain("The lightning tracker application is ready.")
}
}
}
|
JavaPosseRoundup/lightning
|
test/IntegrationSpec.scala
|
Scala
|
apache-2.0
| 577 |
import sbt._
object GlobalLegacyPlugin extends sbt.Plugin {
import sbt.Keys._
val globalLegacyPluginSetting = SettingKey[String]("A top level setting declared by a legacy plugin")
val useGlobalLegacyPluginSetting = globalLegacyPluginSetting in Global
}
|
som-snytt/xsbt
|
sbt/src/sbt-test/project/global-settings/global/plugins/B.scala
|
Scala
|
bsd-3-clause
| 260 |
package xml.claim
import app.ConfigProperties._
import controllers.mappings.Mappings
import models.DayMonthYear
import models.domain._
import org.joda.time.format.DateTimeFormat
import org.joda.time.{Months, DateTime}
import play.api.Logger
import xml.XMLComponent
import scala.xml.NodeSeq
/**
* Generate the XML presenting the Assisted decisions.
*
* @author Jorge Migueis/Peter Whitehead
*/
object AssistedDecision extends XMLComponent {
val FIFTEENYEARS9MONTHS = 15 * 12 + 9
val noDecisionSoFar: AssistedDecisionDetails = new AssistedDecisionDetails
val blankDecisionShowTable = decisionModel("None", "None,show table")
def createAssistedDecisionDetails(claim: Claim): Claim = {
val isDecisionMade = (assisted: AssistedDecisionDetails) => assisted.reason != ""
val fnList = Array[(Claim) => AssistedDecisionDetails](isTooYoung _, dateOfClaim _, caringHours _, isInReceiptOfBenefit _, isAFIP _, yesEEAGuardWork _, isInResidency _, isInEducation _, isBlankShowTable _, isTooOld _, defaultDecision _)
val decision = process(isDecisionMade, claim)(fnList)
claim.update(decision)
}
def xml(claim: Claim) = {
decisionElement(claim.questionGroup[AssistedDecisionDetails].getOrElse(new AssistedDecisionDetails))
}
// ============ Decision functions ====================
private def isTooYoung(claim: Claim): AssistedDecisionDetails = {
val submitDate = DateTime.now
val yourDetails = claim.questionGroup[YourDetails].getOrElse(YourDetails())
yourDetails.dateOfBirth match {
case DayMonthYear(None, None, None, None, None) => noDecisionSoFar
case _ => {
val dateOfBirthDate = DateTime.parse(yourDetails.dateOfBirth.`dd-MM-yyyy`, DateTimeFormat.forPattern("dd-MM-yyyy"))
val monthsOld = Months.monthsBetween(dateOfBirthDate.withTimeAtStartOfDay(), submitDate.withTimeAtStartOfDay())
if (monthsOld.getMonths < FIFTEENYEARS9MONTHS)
decisionModel("Customer does not turn 16 in next 3 months. Send Proforma 491 to customer.", "Potential disallowance decision,no table")
else
noDecisionSoFar
}
}
}
private def dateOfClaim(claim: Claim): AssistedDecisionDetails = {
if (isOverThreeMonths(claim)) decisionModel("Claim date over 3 months into future.", "Potential disallowance decision,no table")
else noDecisionSoFar
}
private def caringHours(claim: Claim): AssistedDecisionDetails = {
if (!isOver35Hours(claim)) decisionModel("Not caring 35 hours a week.", "Potential disallowance decision,no table")
else noDecisionSoFar
}
def checkBenefits(benefitsAnswer: String) = {
benefitsAnswer match {
case Benefits.aa | Benefits.pip | Benefits.dla | Benefits.caa => true
case _ => false
}
}
private def isInReceiptOfBenefit(claim: Claim): AssistedDecisionDetails = {
if (claim.questionGroup[Benefits].getOrElse(new Benefits()).benefitsAnswer == Benefits.noneOfTheBenefits) decisionModel("DP on No QB. Check CIS.", "Potential disallowance decision,show table")
else noDecisionSoFar
}
private def isAFIP(claim: Claim): AssistedDecisionDetails = {
if (claim.questionGroup[Benefits].getOrElse(new Benefits()).benefitsAnswer == Benefits.afip) decisionModel("Assign to AFIP officer on CAMLite workflow.", "None,show table")
else noDecisionSoFar
}
private def yesEEAGuardWork(claim: Claim): AssistedDecisionDetails = {
val paymentsFromAbroad = claim.questionGroup[PaymentsFromAbroad].getOrElse(PaymentsFromAbroad())
if (paymentsFromAbroad.guardQuestion.answer == "yes" && paymentsFromAbroad.guardQuestion.field1.get.answer == "yes")
decisionModel("Assign to Exportability in CAMLite workflow.", "None,show table")
else if (paymentsFromAbroad.guardQuestion.answer == "yes" && paymentsFromAbroad.guardQuestion.field2.get.answer == "yes")
decisionModel("Assign to Exportability in CAMLite workflow.", "None,show table")
else noDecisionSoFar
}
private def isInResidency(claim: Claim): AssistedDecisionDetails = {
val residency = claim.questionGroup[NationalityAndResidency].getOrElse(NationalityAndResidency(nationality = "British"))
(residency.alwaysLivedInUK, residency.liveInUKNow, residency.arrivedInUK) match {
case ("no", Some("no"), _) => decisionModel("Assign to Exportability in CAMLite workflow.", "None,show table")
case ("no", _, Some("less")) => decisionModel("Assign to Exportability in CAMLite workflow (Consider refugee status).", "None,show table")
case _ => noDecisionSoFar
}
}
private def isInEducation(claim: Claim): AssistedDecisionDetails = {
if (claim.questionGroup[YourCourseDetails].getOrElse(new YourCourseDetails()).beenInEducationSinceClaimDate == "yes")
decisionModel("Send DS790/790B COMB to customer.", "None,show table")
else noDecisionSoFar
}
// If no date of claim then its test with missing data so default to happy
private def lessThan65YearsOldAtClaimDate(claim: Claim) = {
val yourDetails = claim.questionGroup[YourDetails].getOrElse(YourDetails())
claim.dateOfClaim match {
case Some(dmy) => yourDetails.dateOfBirth.yearsDiffWith(dmy) < getIntProperty("age.hide.paydetails")
case _ => true
}
}
private def isBlankShowTable(claim: Claim): AssistedDecisionDetails = {
val yourIncomes = claim.questionGroup[YourIncomes].getOrElse(models.domain.YourIncomes())
val nationalityAndResidency = claim.questionGroup[NationalityAndResidency].getOrElse(NationalityAndResidency(nationality = "British"))
if (nationalityAndResidency.nationality == NationalityAndResidency.anothercountry && nationalityAndResidency.actualnationality.getOrElse("").length() > 0) {
Logger.info(s"AssistedDecision anotherNationality means emptyDecision")
blankDecisionShowTable
}
else if (nationalityAndResidency.trip52weeks == "yes") {
Logger.info(s"AssistedDecision trip52weeks means emptyDecision")
blankDecisionShowTable
}
else if (claim.questionGroup[BreaksInCare].getOrElse(BreaksInCare()).hasBreaks) {
Logger.info(s"AssistedDecision breaksInCare means emptyDecision")
blankDecisionShowTable
}
else if (claim.questionGroup[YourCourseDetails].getOrElse(YourCourseDetails()).beenInEducationSinceClaimDate == "yes") {
Logger.info(s"AssistedDecision Education means emptyDecision")
blankDecisionShowTable
}
else if (yourIncomes.beenEmployedSince6MonthsBeforeClaim == "yes") {
Logger.info(s"AssistedDecision Employed means emptyDecision")
blankDecisionShowTable
}
else if (yourIncomes.beenSelfEmployedSince1WeekBeforeClaim == "yes") {
Logger.info(s"AssistedDecision SelfEmployed means emptyDecision")
blankDecisionShowTable
}
else if (
yourIncomes.yourIncome_sickpay == Mappings.someTrue
|| yourIncomes.yourIncome_patmatadoppay == Mappings.someTrue
|| yourIncomes.yourIncome_fostering == Mappings.someTrue
|| yourIncomes.yourIncome_directpay == Mappings.someTrue
|| yourIncomes.yourIncome_rentalincome == Mappings.someTrue
|| yourIncomes.yourIncome_anyother == Mappings.someTrue) {
Logger.info(s"AssistedDecision Income means emptyDecision")
blankDecisionShowTable
}
else if (claim.questionGroup[HowWePayYou].getOrElse(HowWePayYou()).likeToBePaid == "no" && lessThan65YearsOldAtClaimDate(claim)) {
Logger.info(s"AssistedDecision under65 no LikeToBePaid means emptyDecision")
blankDecisionShowTable
}
else if (claim.questionGroup[AdditionalInfo].getOrElse(AdditionalInfo()).anythingElse.answer == "yes") {
Logger.info(s"AssistedDecision AdditionalInfo means emptyDecision")
blankDecisionShowTable
}
else {
noDecisionSoFar
}
}
private def isTooOld(claim: Claim): AssistedDecisionDetails = {
if (lessThan65YearsOldAtClaimDate(claim)) noDecisionSoFar
else decisionModel("Check CIS for benefits. Send Pro517 if relevant.", "Potential underlying entitlement,show table")
}
private def defaultDecision(claim: Claim): AssistedDecisionDetails = {
Logger.info(s"AssistedDecision default happy path means check CIS")
decisionModel("Check CIS for benefits. Send Pro517 if relevant.", "Potential award,show table")
}
private def isOver35Hours(claim: Claim): Boolean = {
val hours = claim.questionGroup[MoreAboutTheCare].getOrElse(MoreAboutTheCare())
hours.spent35HoursCaring.getOrElse("").toLowerCase == "yes"
}
private def isEEA(claim: Claim): Boolean = {
val paymentsFromAbroad = claim.questionGroup[PaymentsFromAbroad].getOrElse(PaymentsFromAbroad())
if (paymentsFromAbroad.guardQuestion.answer == "yes" &&
(paymentsFromAbroad.guardQuestion.field1.get.answer == "yes" ||
paymentsFromAbroad.guardQuestion.field2.get.answer == "yes"))
true
else false
}
private def isOverThreeMonths(claim: Claim): Boolean = {
val claimDateAnswer = claim.questionGroup[ClaimDate].getOrElse(ClaimDate())
val monthsFuture = DateTime.now().plusMonths(3)
val claimDate = new DateTime(claimDateAnswer.dateOfClaim.year.get, claimDateAnswer.dateOfClaim.month.get, claimDateAnswer.dateOfClaim.day.get, 0, 0)
claimDate.isAfter(monthsFuture)
}
private def decisionElement(assistedDecision: AssistedDecisionDetails) = <AssistedDecisions><AssistedDecision><Reason>{assistedDecision.reason}</Reason><RecommendedDecision>{assistedDecision.recommendation}</RecommendedDecision></AssistedDecision></AssistedDecisions>
private def decisionModel(reason: String, decision: String): AssistedDecisionDetails = new AssistedDecisionDetails(reason, decision)
private def process(decision: AssistedDecisionDetails => Boolean, claim: Claim)(fns: Array[(Claim) => AssistedDecisionDetails]): AssistedDecisionDetails = {
for (f <- fns) {
val result = f(claim)
if (decision(result)) return result
}
noDecisionSoFar
}
def fromXml(xml: NodeSeq, claim: Claim): Claim = {
val decisions = (xml \\\\ "AssistedDecisions" \\ "AssistedDecision")
val assistedDecisionDetails = AssistedDecisionDetails(reason = (decisions \\ "Reason").text, recommendation = (decisions \\ "RecommendedDecision").text)
claim.update(assistedDecisionDetails)
}
}
|
Department-for-Work-and-Pensions/ClaimCapture
|
c3/app/xml/claim/AssistedDecision.scala
|
Scala
|
mit
| 10,275 |
package mesosphere.marathon.api.v2.json
import mesosphere.marathon.MarathonSpec
import mesosphere.marathon.state.{ AppDefinition, PathId, UpgradeStrategy, Timestamp }
import mesosphere.marathon.state.PathId._
import org.scalatest.Matchers
import play.api.libs.json._
class V2AppDefinitionFormatsTest
extends MarathonSpec
with V2Formats
with HealthCheckFormats
with Matchers {
import Formats.PathIdFormat
object Fixture {
val a1 = V2AppDefinition(
id = "app1".toPath,
cmd = Some("sleep 10"),
version = Timestamp(1)
)
val j1 = Json.parse("""
{
"id": "app1",
"cmd": "sleep 10",
"version": "1970-01-01T00:00:00.001Z"
}
""")
}
test("ToJson") {
import Fixture._
import AppDefinition._
val r1 = Json.toJson(a1)
// check supplied values
r1 \ "id" should equal (JsString("app1"))
r1 \ "cmd" should equal (JsString("sleep 10"))
r1 \ "version" should equal (JsString("1970-01-01T00:00:00.001Z"))
// check default values
r1 \ "args" should equal (JsNull)
r1 \ "user" should equal (JsNull)
r1 \ "env" should equal (JsObject(DefaultEnv.mapValues(JsString(_)).toSeq))
r1 \ "instances" should equal (JsNumber(DefaultInstances))
r1 \ "cpus" should equal (JsNumber(DefaultCpus))
r1 \ "mem" should equal (JsNumber(DefaultMem))
r1 \ "disk" should equal (JsNumber(DefaultDisk))
r1 \ "executor" should equal (JsString(DefaultExecutor))
r1 \ "constraints" should equal (Json.toJson(DefaultConstraints))
r1 \ "uris" should equal (Json.toJson(DefaultUris))
r1 \ "storeUrls" should equal (Json.toJson(DefaultStoreUrls))
r1 \ "ports" should equal (JsArray(DefaultPorts.map { p => JsNumber(p.toInt) }))
r1 \ "requirePorts" should equal (JsBoolean(DefaultRequirePorts))
r1 \ "backoffSeconds" should equal (JsNumber(DefaultBackoff.toSeconds))
r1 \ "backoffFactor" should equal (JsNumber(DefaultBackoffFactor))
r1 \ "maxLaunchDelaySeconds" should equal (JsNumber(DefaultMaxLaunchDelay.toSeconds))
r1 \ "container" should equal (JsNull)
r1 \ "healthChecks" should equal (Json.toJson(DefaultHealthChecks))
r1 \ "dependencies" should equal (Json.toJson(DefaultDependencies))
r1 \ "upgradeStrategy" should equal (Json.toJson(DefaultUpgradeStrategy))
}
test("FromJson") {
import Fixture._
import AppDefinition._
val r1 = j1.as[V2AppDefinition]
// check supplied values
r1.id should equal (a1.id)
r1.cmd should equal (a1.cmd)
r1.version should equal (Timestamp(1))
// check default values
r1.args should equal (DefaultArgs)
r1.user should equal (DefaultUser)
r1.env should equal (DefaultEnv)
r1.instances should equal (DefaultInstances)
r1.cpus should equal (DefaultCpus)
r1.mem should equal (DefaultMem)
r1.disk should equal (DefaultDisk)
r1.executor should equal (DefaultExecutor)
r1.constraints should equal (DefaultConstraints)
r1.uris should equal (DefaultUris)
r1.storeUrls should equal (DefaultStoreUrls)
r1.ports should equal (DefaultPorts)
r1.requirePorts should equal (DefaultRequirePorts)
r1.backoff should equal (DefaultBackoff)
r1.backoffFactor should equal (DefaultBackoffFactor)
r1.maxLaunchDelay should equal (DefaultMaxLaunchDelay)
r1.container should equal (DefaultContainer)
r1.healthChecks should equal (DefaultHealthChecks)
r1.dependencies should equal (DefaultDependencies)
r1.upgradeStrategy should equal (DefaultUpgradeStrategy)
r1.acceptedResourceRoles should not be ('defined)
}
test("FromJSON should fail for empty id") {
val json = Json.parse(""" { "id": "" }""")
a[JsResultException] shouldBe thrownBy { json.as[V2AppDefinition] }
}
test("FromJSON should fail when using / as an id") {
val json = Json.parse(""" { "id": "/" }""")
a[JsResultException] shouldBe thrownBy { json.as[V2AppDefinition] }
}
test("FromJSON should not fail when 'cpus' is greater than 0") {
val json = Json.parse(""" { "id": "test", "cpus": 0.0001 }""")
noException should be thrownBy {
json.as[V2AppDefinition]
}
}
test("FromJSON should fail when 'cpus' is less than or equal to 0") {
var json1 = Json.parse(""" { "id": "test", "cpus": 0.0 }""")
a[JsResultException] shouldBe thrownBy { json1.as[V2AppDefinition] }
val json2 = Json.parse(""" { "id": "test", "cpus": -1.0 }""")
a[JsResultException] shouldBe thrownBy { json2.as[V2AppDefinition] }
}
test("""ToJSON should correctly handle missing acceptedResourceRoles""") {
val appDefinition = V2AppDefinition(id = PathId("test"), acceptedResourceRoles = None)
val json = Json.toJson(appDefinition)
(json \ "acceptedResourceRoles").asOpt[Set[String]] should be(None)
}
test("""ToJSON should correctly handle acceptedResourceRoles""") {
val appDefinition = V2AppDefinition(id = PathId("test"), acceptedResourceRoles = Some(Set("a")))
val json = Json.toJson(appDefinition)
(json \ "acceptedResourceRoles").asOpt[Set[String]] should be(Some(Set("a")))
}
test("""FromJSON should parse "acceptedResourceRoles": ["production", "*"] """) {
val json = Json.parse(""" { "id": "test", "acceptedResourceRoles": ["production", "*"] }""")
val appDef = json.as[V2AppDefinition]
appDef.acceptedResourceRoles should equal(Some(Set("production", "*")))
}
test("""FromJSON should parse "acceptedResourceRoles": ["*"] """) {
val json = Json.parse(""" { "id": "test", "acceptedResourceRoles": ["*"] }""")
val appDef = json.as[V2AppDefinition]
appDef.acceptedResourceRoles should equal(Some(Set("*")))
}
test("FromJSON should fail when 'acceptedResourceRoles' is defined but empty") {
val json = Json.parse(""" { "id": "test", "acceptedResourceRoles": [] }""")
a[JsResultException] shouldBe thrownBy { json.as[V2AppDefinition] }
}
}
|
sepiroth887/marathon
|
src/test/scala/mesosphere/marathon/api/v2/json/V2AppDefinitionFormatsTest.scala
|
Scala
|
apache-2.0
| 5,919 |
package twos
import java.awt.{ Color, Font }
import scala.swing._
import scala.Vector
import twos.game._
import twos.ai._
object TwosForm extends scala.swing.SimpleSwingApplication {
trait CanvasProperties {
val AIDepth = 8
val AIMaxChecks = 1
val AINeighbourWeight = 5f
val AIClosenessWeight = 3f
val AIFullnessWeight = 3f
val SquareSize = 100
val InternalMargin = 10
val ExternalMargin = 20
val GridSize = 4
val BackgroundColorHex = 0xbbada0
val SquareColorsHex = Vector(
0xccc0b4, 0xeee4da, 0xede0c8, 0xf2b179, 0xec8d54, 0xf67c5f, 0xff6633, 0xf3d86b,
0xf1d04b, 0xe4c02a, 0xe2ba13, 0xecc400, 0x5fda93, 0x4e4439, 0x4e4439, 0x4e4439,
0x4e4439, 0x4e4439, 0x4e4439, 0x4e4439, 0x4e4439, 0x4e4439, 0x4e4439, 0x4e4439)
val TextColor1Hex = 0x776e65
val TextColor2Hex = 0xffffff
val FontSizes = Vector(40, 48, 48, 42, 32, 26, 20, 14, 8, 8, 8, 8, 8, 8, 8)
}
def top: Frame = new MainFrame {
val canv = new TwosCanvas with CanvasProperties
preferredSize = canv.getDisplayDimension
contents = canv
canv.init()
}
}
|
bch29/twos-ai
|
src/main/scala/twos/TwosForm.scala
|
Scala
|
gpl-2.0
| 1,142 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.parquet
import java.math.{BigDecimal => JBigDecimal}
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import java.time.{Duration, LocalDate, LocalDateTime, Period, ZoneId}
import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag
import org.apache.hadoop.fs.Path
import org.apache.parquet.filter2.predicate.{FilterApi, FilterPredicate, Operators}
import org.apache.parquet.filter2.predicate.FilterApi._
import org.apache.parquet.filter2.predicate.Operators.{Column => _, _}
import org.apache.parquet.hadoop.{ParquetFileReader, ParquetInputFormat, ParquetOutputFormat}
import org.apache.parquet.hadoop.util.HadoopInputFile
import org.apache.parquet.schema.MessageType
import org.apache.spark.{SparkConf, SparkException}
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.optimizer.InferFiltersFromConstraints
import org.apache.spark.sql.catalyst.planning.PhysicalOperation
import org.apache.spark.sql.catalyst.util.RebaseDateTime.RebaseSpec
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.parseColumnPath
import org.apache.spark.sql.execution.ExplainMode
import org.apache.spark.sql.execution.datasources.{DataSourceStrategy, HadoopFsRelation, LogicalRelation, PushableColumnAndNestedColumn}
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanRelation
import org.apache.spark.sql.execution.datasources.v2.parquet.ParquetScan
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy.{CORRECTED, LEGACY}
import org.apache.spark.sql.internal.SQLConf.ParquetOutputTimestampType.{INT96, TIMESTAMP_MICROS, TIMESTAMP_MILLIS}
import org.apache.spark.sql.test.SharedSparkSession
import org.apache.spark.sql.types._
import org.apache.spark.tags.ExtendedSQLTest
import org.apache.spark.util.{AccumulatorContext, AccumulatorV2, Utils}
/**
* A test suite that tests Parquet filter2 API based filter pushdown optimization.
*
* NOTE:
*
* 1. `!(a cmp b)` is always transformed to its negated form `a cmp' b` by the
* `BooleanSimplification` optimization rule whenever possible. As a result, predicate `!(a < 1)`
* results in a `GtEq` filter predicate rather than a `Not`.
*
* 2. `Tuple1(Option(x))` is used together with `AnyVal` types like `Int` to ensure the inferred
* data type is nullable.
*
* NOTE:
*
* This file intendedly enables record-level filtering explicitly. If new test cases are
* dependent on this configuration, don't forget you better explicitly set this configuration
* within the test.
*/
abstract class ParquetFilterSuite extends QueryTest with ParquetTest with SharedSparkSession {
protected def createParquetFilters(
schema: MessageType,
caseSensitive: Option[Boolean] = None,
datetimeRebaseSpec: RebaseSpec = RebaseSpec(LegacyBehaviorPolicy.CORRECTED)
): ParquetFilters =
new ParquetFilters(schema, conf.parquetFilterPushDownDate, conf.parquetFilterPushDownTimestamp,
conf.parquetFilterPushDownDecimal, conf.parquetFilterPushDownStringStartWith,
conf.parquetFilterPushDownInFilterThreshold,
caseSensitive.getOrElse(conf.caseSensitiveAnalysis),
datetimeRebaseSpec)
override def beforeEach(): Unit = {
super.beforeEach()
// Note that there are many tests here that require record-level filtering set to be true.
spark.conf.set(SQLConf.PARQUET_RECORD_FILTER_ENABLED.key, "true")
}
override def afterEach(): Unit = {
try {
spark.conf.unset(SQLConf.PARQUET_RECORD_FILTER_ENABLED.key)
} finally {
super.afterEach()
}
}
def checkFilterPredicate(
df: DataFrame,
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
checker: (DataFrame, Seq[Row]) => Unit,
expected: Seq[Row]): Unit
private def checkFilterPredicate
(predicate: Predicate, filterClass: Class[_ <: FilterPredicate], expected: Seq[Row])
(implicit df: DataFrame): Unit = {
checkFilterPredicate(df, predicate, filterClass, checkAnswer(_, _: Seq[Row]), expected)
}
private def checkFilterPredicate[T]
(predicate: Predicate, filterClass: Class[_ <: FilterPredicate], expected: T)
(implicit df: DataFrame): Unit = {
checkFilterPredicate(predicate, filterClass, Seq(Row(expected)))(df)
}
/**
* Takes a sequence of products `data` to generate multi-level nested
* dataframes as new test data. It tests both non-nested and nested dataframes
* which are written and read back with Parquet datasource.
*
* This is different from [[ParquetTest.withParquetDataFrame]] which does not
* test nested cases.
*/
private def withNestedParquetDataFrame[T <: Product: ClassTag: TypeTag](data: Seq[T])
(runTest: (DataFrame, String, Any => Any) => Unit): Unit =
withNestedParquetDataFrame(spark.createDataFrame(data))(runTest)
private def withNestedParquetDataFrame(inputDF: DataFrame)
(runTest: (DataFrame, String, Any => Any) => Unit): Unit = {
withNestedDataFrame(inputDF).foreach { case (newDF, colName, resultFun) =>
withTempPath { file =>
newDF.write.format(dataSourceName).save(file.getCanonicalPath)
readParquetFile(file.getCanonicalPath) { df => runTest(df, colName, resultFun) }
}
}
}
private def testTimestampPushdown(data: Seq[String], java8Api: Boolean): Unit = {
implicit class StringToTs(s: String) {
def ts: Timestamp = Timestamp.valueOf(s)
}
assert(data.size === 4)
val ts1 = data.head
val ts2 = data(1)
val ts3 = data(2)
val ts4 = data(3)
import testImplicits._
val df = data.map(i => Tuple1(Timestamp.valueOf(i))).toDF()
withNestedParquetDataFrame(df) { case (parquetDF, colName, fun) =>
implicit val df: DataFrame = parquetDF
def resultFun(tsStr: String): Any = {
val parsed = if (java8Api) {
LocalDateTime.parse(tsStr.replace(" ", "T"))
.atZone(ZoneId.systemDefault())
.toInstant
} else {
Timestamp.valueOf(tsStr)
}
fun(parsed)
}
val tsAttr = df(colName).expr
assert(df(colName).expr.dataType === TimestampType)
checkFilterPredicate(tsAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(tsAttr.isNotNull, classOf[NotEq[_]],
data.map(i => Row.apply(resultFun(i))))
checkFilterPredicate(tsAttr === ts1.ts, classOf[Eq[_]], resultFun(ts1))
checkFilterPredicate(tsAttr <=> ts1.ts, classOf[Eq[_]], resultFun(ts1))
checkFilterPredicate(tsAttr =!= ts1.ts, classOf[NotEq[_]],
Seq(ts2, ts3, ts4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(tsAttr < ts2.ts, classOf[Lt[_]], resultFun(ts1))
checkFilterPredicate(tsAttr > ts1.ts, classOf[Gt[_]],
Seq(ts2, ts3, ts4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(tsAttr <= ts1.ts, classOf[LtEq[_]], resultFun(ts1))
checkFilterPredicate(tsAttr >= ts4.ts, classOf[GtEq[_]], resultFun(ts4))
checkFilterPredicate(Literal(ts1.ts) === tsAttr, classOf[Eq[_]], resultFun(ts1))
checkFilterPredicate(Literal(ts1.ts) <=> tsAttr, classOf[Eq[_]], resultFun(ts1))
checkFilterPredicate(Literal(ts2.ts) > tsAttr, classOf[Lt[_]], resultFun(ts1))
checkFilterPredicate(Literal(ts3.ts) < tsAttr, classOf[Gt[_]], resultFun(ts4))
checkFilterPredicate(Literal(ts1.ts) >= tsAttr, classOf[LtEq[_]], resultFun(ts1))
checkFilterPredicate(Literal(ts4.ts) <= tsAttr, classOf[GtEq[_]], resultFun(ts4))
checkFilterPredicate(!(tsAttr < ts4.ts), classOf[GtEq[_]], resultFun(ts4))
checkFilterPredicate(tsAttr < ts2.ts || tsAttr > ts3.ts, classOf[Operators.Or],
Seq(Row(resultFun(ts1)), Row(resultFun(ts4))))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(tsAttr, Array(ts2.ts, ts3.ts, ts4.ts, "2021-05-01 00:01:02".ts).map(Literal.apply)),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun(ts2)), Row(resultFun(ts3)), Row(resultFun(ts4))))
}
}
}
}
// This function tests that exactly go through the `canDrop` and `inverseCanDrop`.
private def testStringStartsWith(dataFrame: DataFrame, filter: String): Unit = {
withTempPath { dir =>
val path = dir.getCanonicalPath
dataFrame.write.option("parquet.block.size", 512).parquet(path)
Seq(true, false).foreach { pushDown =>
withSQLConf(
SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_STARTSWITH_ENABLED.key -> pushDown.toString) {
val accu = new NumRowGroupsAcc
sparkContext.register(accu)
val df = spark.read.parquet(path).filter(filter)
df.foreachPartition((it: Iterator[Row]) => it.foreach(v => accu.add(0)))
if (pushDown) {
assert(accu.value == 0)
} else {
assert(accu.value > 0)
}
AccumulatorContext.remove(accu.id)
}
}
}
}
test("filter pushdown - boolean") {
val data = (true :: false :: Nil).map(b => Tuple1.apply(Option(b)))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val booleanAttr = df(colName).expr
assert(df(colName).expr.dataType === BooleanType)
checkFilterPredicate(booleanAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(booleanAttr.isNotNull, classOf[NotEq[_]],
Seq(Row(resultFun(true)), Row(resultFun(false))))
checkFilterPredicate(booleanAttr === true, classOf[Eq[_]], resultFun(true))
checkFilterPredicate(booleanAttr <=> true, classOf[Eq[_]], resultFun(true))
checkFilterPredicate(booleanAttr =!= true, classOf[NotEq[_]], resultFun(false))
}
}
test("filter pushdown - tinyint") {
val data = (1 to 4).map(i => Tuple1(Option(i.toByte)))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val tinyIntAttr = df(colName).expr
assert(df(colName).expr.dataType === ByteType)
checkFilterPredicate(tinyIntAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(tinyIntAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(tinyIntAttr === 1.toByte, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(tinyIntAttr <=> 1.toByte, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(tinyIntAttr =!= 1.toByte, classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(tinyIntAttr < 2.toByte, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(tinyIntAttr > 3.toByte, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(tinyIntAttr <= 1.toByte, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(tinyIntAttr >= 4.toByte, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(Literal(1.toByte) === tinyIntAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(1.toByte) <=> tinyIntAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(2.toByte) > tinyIntAttr, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(Literal(3.toByte) < tinyIntAttr, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(Literal(1.toByte) >= tinyIntAttr, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(Literal(4.toByte) <= tinyIntAttr, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(!(tinyIntAttr < 4.toByte), classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(tinyIntAttr < 2.toByte || tinyIntAttr > 3.toByte,
classOf[Operators.Or], Seq(Row(resultFun(1)), Row(resultFun(4))))
}
}
test("filter pushdown - smallint") {
val data = (1 to 4).map(i => Tuple1(Option(i.toShort)))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val smallIntAttr = df(colName).expr
assert(df(colName).expr.dataType === ShortType)
checkFilterPredicate(smallIntAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(smallIntAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(smallIntAttr === 1.toShort, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(smallIntAttr <=> 1.toShort, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(smallIntAttr =!= 1.toShort, classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(smallIntAttr < 2.toShort, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(smallIntAttr > 3.toShort, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(smallIntAttr <= 1.toShort, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(smallIntAttr >= 4.toShort, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(Literal(1.toShort) === smallIntAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(1.toShort) <=> smallIntAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(2.toShort) > smallIntAttr, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(Literal(3.toShort) < smallIntAttr, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(Literal(1.toShort) >= smallIntAttr, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(Literal(4.toShort) <= smallIntAttr, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(!(smallIntAttr < 4.toShort), classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(smallIntAttr < 2.toShort || smallIntAttr > 3.toShort,
classOf[Operators.Or], Seq(Row(resultFun(1)), Row(resultFun(4))))
}
}
test("filter pushdown - integer") {
val data = (1 to 4).map(i => Tuple1(Option(i)))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val intAttr = df(colName).expr
assert(df(colName).expr.dataType === IntegerType)
checkFilterPredicate(intAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(intAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(intAttr === 1, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(intAttr <=> 1, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(intAttr =!= 1, classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(intAttr < 2, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(intAttr > 3, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(intAttr <= 1, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(intAttr >= 4, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(Literal(1) === intAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(1) <=> intAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(2) > intAttr, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(Literal(3) < intAttr, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(Literal(1) >= intAttr, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(Literal(4) <= intAttr, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(!(intAttr < 4), classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(intAttr < 2 || intAttr > 3, classOf[Operators.Or],
Seq(Row(resultFun(1)), Row(resultFun(4))))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(intAttr, Array(2, 3, 4, 5, 6, 7).map(Literal.apply)),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun(2)), Row(resultFun(3)), Row(resultFun(4))))
}
}
}
}
test("filter pushdown - long") {
val data = (1 to 4).map(i => Tuple1(Option(i.toLong)))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val longAttr = df(colName).expr
assert(df(colName).expr.dataType === LongType)
checkFilterPredicate(longAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(longAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(longAttr === 1, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(longAttr <=> 1, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(longAttr =!= 1, classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(longAttr < 2, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(longAttr > 3, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(longAttr <= 1, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(longAttr >= 4, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(Literal(1) === longAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(1) <=> longAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(2) > longAttr, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(Literal(3) < longAttr, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(Literal(1) >= longAttr, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(Literal(4) <= longAttr, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(!(longAttr < 4), classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(longAttr < 2 || longAttr > 3, classOf[Operators.Or],
Seq(Row(resultFun(1)), Row(resultFun(4))))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(longAttr, Array(2L, 3L, 4L, 5L, 6L, 7L).map(Literal.apply)),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun(2L)), Row(resultFun(3L)), Row(resultFun(4L))))
}
}
}
}
test("filter pushdown - float") {
val data = (1 to 4).map(i => Tuple1(Option(i.toFloat)))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val floatAttr = df(colName).expr
assert(df(colName).expr.dataType === FloatType)
checkFilterPredicate(floatAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(floatAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(floatAttr === 1, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(floatAttr <=> 1, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(floatAttr =!= 1, classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(floatAttr < 2, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(floatAttr > 3, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(floatAttr <= 1, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(floatAttr >= 4, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(Literal(1) === floatAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(1) <=> floatAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(2) > floatAttr, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(Literal(3) < floatAttr, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(Literal(1) >= floatAttr, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(Literal(4) <= floatAttr, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(!(floatAttr < 4), classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(floatAttr < 2 || floatAttr > 3, classOf[Operators.Or],
Seq(Row(resultFun(1)), Row(resultFun(4))))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(floatAttr, Array(2F, 3F, 4F, 5F, 6F, 7F).map(Literal.apply)),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun(2F)), Row(resultFun(3F)), Row(resultFun(4F))))
}
}
}
}
test("filter pushdown - double") {
val data = (1 to 4).map(i => Tuple1(Option(i.toDouble)))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val doubleAttr = df(colName).expr
assert(df(colName).expr.dataType === DoubleType)
checkFilterPredicate(doubleAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(doubleAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(doubleAttr === 1, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(doubleAttr <=> 1, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(doubleAttr =!= 1, classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(doubleAttr < 2, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(doubleAttr > 3, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(doubleAttr <= 1, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(doubleAttr >= 4, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(Literal(1) === doubleAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(1) <=> doubleAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(2) > doubleAttr, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(Literal(3) < doubleAttr, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(Literal(1) >= doubleAttr, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(Literal(4) <= doubleAttr, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(!(doubleAttr < 4), classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(doubleAttr < 2 || doubleAttr > 3, classOf[Operators.Or],
Seq(Row(resultFun(1)), Row(resultFun(4))))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(doubleAttr, Array(2.0D, 3.0D, 4.0D, 5.0D, 6.0D, 7.0D).map(Literal.apply)),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun(2D)), Row(resultFun(3D)), Row(resultFun(4F))))
}
}
}
}
test("filter pushdown - string") {
val data = (1 to 4).map(i => Tuple1(Option(i.toString)))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val stringAttr = df(colName).expr
assert(df(colName).expr.dataType === StringType)
checkFilterPredicate(stringAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(stringAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(i.toString))))
checkFilterPredicate(stringAttr === "1", classOf[Eq[_]], resultFun("1"))
checkFilterPredicate(stringAttr <=> "1", classOf[Eq[_]], resultFun("1"))
checkFilterPredicate(stringAttr =!= "1", classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(i.toString))))
checkFilterPredicate(stringAttr < "2", classOf[Lt[_]], resultFun("1"))
checkFilterPredicate(stringAttr > "3", classOf[Gt[_]], resultFun("4"))
checkFilterPredicate(stringAttr <= "1", classOf[LtEq[_]], resultFun("1"))
checkFilterPredicate(stringAttr >= "4", classOf[GtEq[_]], resultFun("4"))
checkFilterPredicate(Literal("1") === stringAttr, classOf[Eq[_]], resultFun("1"))
checkFilterPredicate(Literal("1") <=> stringAttr, classOf[Eq[_]], resultFun("1"))
checkFilterPredicate(Literal("2") > stringAttr, classOf[Lt[_]], resultFun("1"))
checkFilterPredicate(Literal("3") < stringAttr, classOf[Gt[_]], resultFun("4"))
checkFilterPredicate(Literal("1") >= stringAttr, classOf[LtEq[_]], resultFun("1"))
checkFilterPredicate(Literal("4") <= stringAttr, classOf[GtEq[_]], resultFun("4"))
checkFilterPredicate(!(stringAttr < "4"), classOf[GtEq[_]], resultFun("4"))
checkFilterPredicate(stringAttr < "2" || stringAttr > "3", classOf[Operators.Or],
Seq(Row(resultFun("1")), Row(resultFun("4"))))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(stringAttr, Array("2", "3", "4", "5", "6", "7").map(Literal.apply)),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun("2")), Row(resultFun("3")), Row(resultFun("4"))))
}
}
}
}
test("filter pushdown - binary") {
implicit class IntToBinary(int: Int) {
def b: Array[Byte] = int.toString.getBytes(StandardCharsets.UTF_8)
}
val data = (1 to 4).map(i => Tuple1(Option(i.b)))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val binaryAttr: Expression = df(colName).expr
assert(df(colName).expr.dataType === BinaryType)
checkFilterPredicate(binaryAttr === 1.b, classOf[Eq[_]], resultFun(1.b))
checkFilterPredicate(binaryAttr <=> 1.b, classOf[Eq[_]], resultFun(1.b))
checkFilterPredicate(binaryAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(binaryAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(i.b))))
checkFilterPredicate(binaryAttr =!= 1.b, classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(i.b))))
checkFilterPredicate(binaryAttr < 2.b, classOf[Lt[_]], resultFun(1.b))
checkFilterPredicate(binaryAttr > 3.b, classOf[Gt[_]], resultFun(4.b))
checkFilterPredicate(binaryAttr <= 1.b, classOf[LtEq[_]], resultFun(1.b))
checkFilterPredicate(binaryAttr >= 4.b, classOf[GtEq[_]], resultFun(4.b))
checkFilterPredicate(Literal(1.b) === binaryAttr, classOf[Eq[_]], resultFun(1.b))
checkFilterPredicate(Literal(1.b) <=> binaryAttr, classOf[Eq[_]], resultFun(1.b))
checkFilterPredicate(Literal(2.b) > binaryAttr, classOf[Lt[_]], resultFun(1.b))
checkFilterPredicate(Literal(3.b) < binaryAttr, classOf[Gt[_]], resultFun(4.b))
checkFilterPredicate(Literal(1.b) >= binaryAttr, classOf[LtEq[_]], resultFun(1.b))
checkFilterPredicate(Literal(4.b) <= binaryAttr, classOf[GtEq[_]], resultFun(4.b))
checkFilterPredicate(!(binaryAttr < 4.b), classOf[GtEq[_]], resultFun(4.b))
checkFilterPredicate(binaryAttr < 2.b || binaryAttr > 3.b, classOf[Operators.Or],
Seq(Row(resultFun(1.b)), Row(resultFun(4.b))))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(binaryAttr, Array(2.b, 3.b, 4.b, 5.b, 6.b, 7.b).map(Literal.apply)),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun(2.b)), Row(resultFun(3.b)), Row(resultFun(4.b))))
}
}
}
}
test("filter pushdown - date") {
implicit class StringToDate(s: String) {
def date: Date = Date.valueOf(s)
}
val data = Seq("1000-01-01", "2018-03-19", "2018-03-20", "2018-03-21")
import testImplicits._
Seq(false, true).foreach { java8Api =>
Seq(CORRECTED, LEGACY).foreach { rebaseMode =>
withSQLConf(
SQLConf.DATETIME_JAVA8API_ENABLED.key -> java8Api.toString,
SQLConf.PARQUET_REBASE_MODE_IN_WRITE.key -> rebaseMode.toString) {
val dates = data.map(i => Tuple1(Date.valueOf(i))).toDF()
withNestedParquetDataFrame(dates) { case (inputDF, colName, fun) =>
implicit val df: DataFrame = inputDF
def resultFun(dateStr: String): Any = {
val parsed = if (java8Api) LocalDate.parse(dateStr) else Date.valueOf(dateStr)
fun(parsed)
}
val dateAttr: Expression = df(colName).expr
assert(df(colName).expr.dataType === DateType)
checkFilterPredicate(dateAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(dateAttr.isNotNull, classOf[NotEq[_]],
data.map(i => Row.apply(resultFun(i))))
checkFilterPredicate(dateAttr === "1000-01-01".date, classOf[Eq[_]],
resultFun("1000-01-01"))
checkFilterPredicate(dateAttr <=> "1000-01-01".date, classOf[Eq[_]],
resultFun("1000-01-01"))
checkFilterPredicate(dateAttr =!= "1000-01-01".date, classOf[NotEq[_]],
Seq("2018-03-19", "2018-03-20", "2018-03-21").map(i => Row.apply(resultFun(i))))
checkFilterPredicate(dateAttr < "2018-03-19".date, classOf[Lt[_]],
resultFun("1000-01-01"))
checkFilterPredicate(dateAttr > "2018-03-20".date, classOf[Gt[_]],
resultFun("2018-03-21"))
checkFilterPredicate(dateAttr <= "1000-01-01".date, classOf[LtEq[_]],
resultFun("1000-01-01"))
checkFilterPredicate(dateAttr >= "2018-03-21".date, classOf[GtEq[_]],
resultFun("2018-03-21"))
checkFilterPredicate(Literal("1000-01-01".date) === dateAttr, classOf[Eq[_]],
resultFun("1000-01-01"))
checkFilterPredicate(Literal("1000-01-01".date) <=> dateAttr, classOf[Eq[_]],
resultFun("1000-01-01"))
checkFilterPredicate(Literal("2018-03-19".date) > dateAttr, classOf[Lt[_]],
resultFun("1000-01-01"))
checkFilterPredicate(Literal("2018-03-20".date) < dateAttr, classOf[Gt[_]],
resultFun("2018-03-21"))
checkFilterPredicate(Literal("1000-01-01".date) >= dateAttr, classOf[LtEq[_]],
resultFun("1000-01-01"))
checkFilterPredicate(Literal("2018-03-21".date) <= dateAttr, classOf[GtEq[_]],
resultFun("2018-03-21"))
checkFilterPredicate(!(dateAttr < "2018-03-21".date), classOf[GtEq[_]],
resultFun("2018-03-21"))
checkFilterPredicate(
dateAttr < "2018-03-19".date || dateAttr > "2018-03-20".date,
classOf[Operators.Or],
Seq(Row(resultFun("1000-01-01")), Row(resultFun("2018-03-21"))))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(dateAttr, Array("2018-03-19".date, "2018-03-20".date, "2018-03-21".date,
"2018-03-22".date).map(Literal.apply)),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun("2018-03-19")), Row(resultFun("2018-03-20")),
Row(resultFun("2018-03-21"))))
}
}
}
}
}
}
}
test("filter pushdown - timestamp") {
Seq(true, false).foreach { java8Api =>
Seq(CORRECTED, LEGACY).foreach { rebaseMode =>
val millisData = Seq(
"1000-06-14 08:28:53.123",
"1582-06-15 08:28:53.001",
"1900-06-16 08:28:53.0",
"2018-06-17 08:28:53.999")
withSQLConf(
SQLConf.DATETIME_JAVA8API_ENABLED.key -> java8Api.toString,
SQLConf.PARQUET_REBASE_MODE_IN_WRITE.key -> rebaseMode.toString,
SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> TIMESTAMP_MILLIS.toString) {
testTimestampPushdown(millisData, java8Api)
}
val microsData = Seq(
"1000-06-14 08:28:53.123456",
"1582-06-15 08:28:53.123456",
"1900-06-16 08:28:53.123456",
"2018-06-17 08:28:53.123456")
withSQLConf(
SQLConf.DATETIME_JAVA8API_ENABLED.key -> java8Api.toString,
SQLConf.PARQUET_REBASE_MODE_IN_WRITE.key -> rebaseMode.toString,
SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> TIMESTAMP_MICROS.toString) {
testTimestampPushdown(microsData, java8Api)
}
// INT96 doesn't support pushdown
withSQLConf(
SQLConf.DATETIME_JAVA8API_ENABLED.key -> java8Api.toString,
SQLConf.PARQUET_INT96_REBASE_MODE_IN_WRITE.key -> rebaseMode.toString,
SQLConf.PARQUET_OUTPUT_TIMESTAMP_TYPE.key -> INT96.toString) {
import testImplicits._
withTempPath { file =>
millisData.map(i => Tuple1(Timestamp.valueOf(i))).toDF
.write.format(dataSourceName).save(file.getCanonicalPath)
readParquetFile(file.getCanonicalPath) { df =>
val schema = new SparkToParquetSchemaConverter(conf).convert(df.schema)
assertResult(None) {
createParquetFilters(schema).createFilter(sources.IsNull("_1"))
}
}
}
}
}
}
}
test("filter pushdown - decimal") {
Seq(
(false, Decimal.MAX_INT_DIGITS), // int32Writer
(false, Decimal.MAX_LONG_DIGITS), // int64Writer
(true, Decimal.MAX_LONG_DIGITS), // binaryWriterUsingUnscaledLong
(false, DecimalType.MAX_PRECISION) // binaryWriterUsingUnscaledBytes
).foreach { case (legacyFormat, precision) =>
withSQLConf(SQLConf.PARQUET_WRITE_LEGACY_FORMAT.key -> legacyFormat.toString) {
val rdd =
spark.sparkContext.parallelize((1 to 4).map(i => Row(new java.math.BigDecimal(i))))
val dataFrame = spark.createDataFrame(rdd, StructType.fromDDL(s"a decimal($precision, 2)"))
withNestedParquetDataFrame(dataFrame) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val decimalAttr: Expression = df(colName).expr
assert(df(colName).expr.dataType === DecimalType(precision, 2))
checkFilterPredicate(decimalAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(decimalAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(decimalAttr === 1, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(decimalAttr <=> 1, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(decimalAttr =!= 1, classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(i))))
checkFilterPredicate(decimalAttr < 2, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(decimalAttr > 3, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(decimalAttr <= 1, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(decimalAttr >= 4, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(Literal(1) === decimalAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(1) <=> decimalAttr, classOf[Eq[_]], resultFun(1))
checkFilterPredicate(Literal(2) > decimalAttr, classOf[Lt[_]], resultFun(1))
checkFilterPredicate(Literal(3) < decimalAttr, classOf[Gt[_]], resultFun(4))
checkFilterPredicate(Literal(1) >= decimalAttr, classOf[LtEq[_]], resultFun(1))
checkFilterPredicate(Literal(4) <= decimalAttr, classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(!(decimalAttr < 4), classOf[GtEq[_]], resultFun(4))
checkFilterPredicate(decimalAttr < 2 || decimalAttr > 3, classOf[Operators.Or],
Seq(Row(resultFun(1)), Row(resultFun(4))))
Array(1, 2, 3, 4).map(JBigDecimal.valueOf(_).setScale(2))
.map(Literal.create(_, DecimalType(precision, 2)))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(decimalAttr, Array(2, 3, 4, 5).map(Literal.apply)
.map(_.cast(DecimalType(precision, 2)))),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun(2)), Row(resultFun(3)), Row(resultFun(4))))
}
}
}
}
}
}
test("Ensure that filter value matched the parquet file schema") {
val scale = 2
val schema = StructType(Seq(
StructField("cint", IntegerType),
StructField("cdecimal1", DecimalType(Decimal.MAX_INT_DIGITS, scale)),
StructField("cdecimal2", DecimalType(Decimal.MAX_LONG_DIGITS, scale)),
StructField("cdecimal3", DecimalType(DecimalType.MAX_PRECISION, scale))
))
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val decimal = new JBigDecimal(10).setScale(scale)
val decimal1 = new JBigDecimal(10).setScale(scale + 1)
assert(decimal.scale() === scale)
assert(decimal1.scale() === scale + 1)
val parquetFilters = createParquetFilters(parquetSchema)
assertResult(Some(lt(intColumn("cdecimal1"), 1000: Integer))) {
parquetFilters.createFilter(sources.LessThan("cdecimal1", decimal))
}
assertResult(None) {
parquetFilters.createFilter(sources.LessThan("cdecimal1", decimal1))
}
assertResult(Some(lt(longColumn("cdecimal2"), 1000L: java.lang.Long))) {
parquetFilters.createFilter(sources.LessThan("cdecimal2", decimal))
}
assertResult(None) {
parquetFilters.createFilter(sources.LessThan("cdecimal2", decimal1))
}
assert(parquetFilters.createFilter(sources.LessThan("cdecimal3", decimal)).isDefined)
assertResult(None) {
parquetFilters.createFilter(sources.LessThan("cdecimal3", decimal1))
}
}
test("SPARK-6554: don't push down predicates which reference partition columns") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/part=1"
(1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path)
// If the "part = 1" filter gets pushed down, this query will throw an exception since
// "part" is not a valid column in the actual Parquet file
checkAnswer(
spark.read.parquet(dir.getCanonicalPath).filter("part = 1"),
(1 to 3).map(i => Row(i, i.toString, 1)))
}
}
}
test("SPARK-10829: Filter combine partition key and attribute doesn't work in DataSource scan") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/part=1"
(1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path)
// If the "part = 1" filter gets pushed down, this query will throw an exception since
// "part" is not a valid column in the actual Parquet file
checkAnswer(
spark.read.parquet(dir.getCanonicalPath).filter("a > 0 and (part = 0 or a > 1)"),
(2 to 3).map(i => Row(i, i.toString, 1)))
}
}
}
test("SPARK-12231: test the filter and empty project in partitioned DataSource scan") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}"
(1 to 3).map(i => (i, i + 1, i + 2, i + 3)).toDF("a", "b", "c", "d").
write.partitionBy("a").parquet(path)
// The filter "a > 1 or b < 2" will not get pushed down, and the projection is empty,
// this query will throw an exception since the project from combinedFilter expect
// two projection while the
val df1 = spark.read.parquet(dir.getCanonicalPath)
assert(df1.filter("a > 1 or b < 2").count() == 2)
}
}
}
test("SPARK-12231: test the new projection in partitioned DataSource scan") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}"
(1 to 3).map(i => (i, i + 1, i + 2, i + 3)).toDF("a", "b", "c", "d").
write.partitionBy("a").parquet(path)
// test the generate new projection case
// when projects != partitionAndNormalColumnProjs
val df1 = spark.read.parquet(dir.getCanonicalPath)
checkAnswer(
df1.filter("a > 1 or b > 2").orderBy("a").selectExpr("a", "b", "c", "d"),
(2 to 3).map(i => Row(i, i + 1, i + 2, i + 3)))
}
}
}
test("Filter applied on merged Parquet schema with new column should work") {
import testImplicits._
withAllParquetReaders {
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true",
SQLConf.PARQUET_SCHEMA_MERGING_ENABLED.key -> "true") {
withTempPath { dir =>
val path1 = s"${dir.getCanonicalPath}/table1"
(1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path1)
val path2 = s"${dir.getCanonicalPath}/table2"
(1 to 3).map(i => (i, i.toString)).toDF("c", "b").write.parquet(path2)
// No matter "c = 1" gets pushed down or not, this query should work without exception.
val df = spark.read.parquet(path1, path2).filter("c = 1").selectExpr("c", "b", "a")
checkAnswer(
df,
Row(1, "1", null))
val path3 = s"${dir.getCanonicalPath}/table3"
val dfStruct = sparkContext.parallelize(Seq((1, 1))).toDF("a", "b")
dfStruct.select(struct("a").as("s")).write.parquet(path3)
val path4 = s"${dir.getCanonicalPath}/table4"
val dfStruct2 = sparkContext.parallelize(Seq((1, 1))).toDF("c", "b")
dfStruct2.select(struct("c").as("s")).write.parquet(path4)
// No matter "s.c = 1" gets pushed down or not, this query should work without exception.
val dfStruct3 = spark.read.parquet(path3, path4).filter("s.c = 1")
.selectExpr("s")
checkAnswer(dfStruct3, Row(Row(null, 1)))
}
}
}
}
// The unsafe row RecordReader does not support row by row filtering so run it with it disabled.
test("SPARK-11661 Still pushdown filters returned by unhandledFilters") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/part=1"
(1 to 3).map(i => (i, i.toString)).toDF("a", "b").write.parquet(path)
val df = spark.read.parquet(path).filter("a = 2")
// The result should be single row.
// When a filter is pushed to Parquet, Parquet can apply it to every row.
// So, we can check the number of rows returned from the Parquet
// to make sure our filter pushdown work.
assert(stripSparkFilter(df).count == 1)
}
}
}
}
test("SPARK-12218: 'Not' is included in Parquet filter pushdown") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/table1"
(1 to 5).map(i => (i, (i % 2).toString)).toDF("a", "b").write.parquet(path)
checkAnswer(
spark.read.parquet(path).where("not (a = 2) or not(b in ('1'))"),
(1 to 5).map(i => Row(i, (i % 2).toString)))
checkAnswer(
spark.read.parquet(path).where("not (a = 2 and b in ('1'))"),
(1 to 5).map(i => Row(i, (i % 2).toString)))
}
}
}
test("SPARK-12218 and SPARK-25559 Converting conjunctions into Parquet filter predicates") {
val schema = StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType, nullable = true),
StructField("c", DoubleType, nullable = true)
))
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val parquetFilters = createParquetFilters(parquetSchema)
assertResult(Some(and(
lt(intColumn("a"), 10: Integer),
gt(doubleColumn("c"), 1.5: java.lang.Double)))
) {
parquetFilters.createFilter(
sources.And(
sources.LessThan("a", 10),
sources.GreaterThan("c", 1.5D)))
}
// Testing when `canRemoveOneSideInAnd == true`
// case sources.And(lhs, rhs) =>
// ...
// case (Some(lhsFilter), None) if canRemoveOneSideInAnd => Some(lhsFilter)
assertResult(Some(lt(intColumn("a"), 10: Integer))) {
parquetFilters.createFilter(
sources.And(
sources.LessThan("a", 10),
sources.StringContains("b", "prefix")))
}
// Testing when `canRemoveOneSideInAnd == true`
// case sources.And(lhs, rhs) =>
// ...
// case (None, Some(rhsFilter)) if canRemoveOneSideInAnd => Some(rhsFilter)
assertResult(Some(lt(intColumn("a"), 10: Integer))) {
parquetFilters.createFilter(
sources.And(
sources.StringContains("b", "prefix"),
sources.LessThan("a", 10)))
}
// Testing complex And conditions
assertResult(Some(
FilterApi.and(lt(intColumn("a"), 10: Integer), gt(intColumn("a"), 5: Integer)))) {
parquetFilters.createFilter(
sources.And(
sources.And(
sources.LessThan("a", 10),
sources.StringContains("b", "prefix")
),
sources.GreaterThan("a", 5)))
}
// Testing complex And conditions
assertResult(Some(
FilterApi.and(gt(intColumn("a"), 5: Integer), lt(intColumn("a"), 10: Integer)))) {
parquetFilters.createFilter(
sources.And(
sources.GreaterThan("a", 5),
sources.And(
sources.StringContains("b", "prefix"),
sources.LessThan("a", 10)
)))
}
// Testing
// case sources.Not(pred) =>
// createFilterHelper(nameToParquetField, pred, canRemoveOneSideInAnd = false)
// .map(FilterApi.not)
//
// and
//
// Testing when `canRemoveOneSideInAnd == false`
// case sources.And(lhs, rhs) =>
// ...
// case (Some(lhsFilter), None) if canRemoveOneSideInAnd => Some(lhsFilter)
assertResult(None) {
parquetFilters.createFilter(
sources.Not(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix"))))
}
// Testing
// case sources.Not(pred) =>
// createFilterHelper(nameToParquetField, pred, canRemoveOneSideInAnd = false)
// .map(FilterApi.not)
//
// and
//
// Testing when `canRemoveOneSideInAnd == false`
// case sources.And(lhs, rhs) =>
// ...
// case (None, Some(rhsFilter)) if canRemoveOneSideInAnd => Some(rhsFilter)
assertResult(None) {
parquetFilters.createFilter(
sources.Not(
sources.And(
sources.StringContains("b", "prefix"),
sources.GreaterThan("a", 1))))
}
// Testing
// case sources.Not(pred) =>
// createFilterHelper(nameToParquetField, pred, canRemoveOneSideInAnd = false)
// .map(FilterApi.not)
//
// and
//
// Testing passing `canRemoveOneSideInAnd = false` into
// case sources.And(lhs, rhs) =>
// val lhsFilterOption = createFilterHelper(nameToParquetField, lhs, canRemoveOneSideInAnd)
assertResult(None) {
parquetFilters.createFilter(
sources.Not(
sources.And(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.GreaterThan("a", 2))))
}
// Testing
// case sources.Not(pred) =>
// createFilterHelper(nameToParquetField, pred, canRemoveOneSideInAnd = false)
// .map(FilterApi.not)
//
// and
//
// Testing passing `canRemoveOneSideInAnd = false` into
// case sources.And(lhs, rhs) =>
// val rhsFilterOption = createFilterHelper(nameToParquetField, rhs, canRemoveOneSideInAnd)
assertResult(None) {
parquetFilters.createFilter(
sources.Not(
sources.And(
sources.GreaterThan("a", 2),
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")))))
}
}
test("SPARK-27699 Converting disjunctions into Parquet filter predicates") {
val schema = StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType, nullable = true),
StructField("c", DoubleType, nullable = true)
))
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val parquetFilters = createParquetFilters(parquetSchema)
// Testing
// case sources.Or(lhs, rhs) =>
// ...
// lhsFilter <- createFilterHelper(nameToParquetField, lhs, canRemoveOneSideInAnd = true)
assertResult(Some(
FilterApi.or(gt(intColumn("a"), 1: Integer), gt(intColumn("a"), 2: Integer)))) {
parquetFilters.createFilter(
sources.Or(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.GreaterThan("a", 2)))
}
// Testing
// case sources.Or(lhs, rhs) =>
// ...
// rhsFilter <- createFilterHelper(nameToParquetField, rhs, canRemoveOneSideInAnd = true)
assertResult(Some(
FilterApi.or(gt(intColumn("a"), 2: Integer), gt(intColumn("a"), 1: Integer)))) {
parquetFilters.createFilter(
sources.Or(
sources.GreaterThan("a", 2),
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix"))))
}
// Testing
// case sources.Or(lhs, rhs) =>
// ...
// lhsFilter <- createFilterHelper(nameToParquetField, lhs, canRemoveOneSideInAnd = true)
// rhsFilter <- createFilterHelper(nameToParquetField, rhs, canRemoveOneSideInAnd = true)
assertResult(Some(
FilterApi.or(gt(intColumn("a"), 1: Integer), lt(intColumn("a"), 0: Integer)))) {
parquetFilters.createFilter(
sources.Or(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.And(
sources.LessThan("a", 0),
sources.StringContains("b", "foobar"))))
}
}
test("SPARK-27698 Convertible Parquet filter predicates") {
val schema = StructType(Seq(
StructField("a", IntegerType, nullable = false),
StructField("b", StringType, nullable = true),
StructField("c", DoubleType, nullable = true)
))
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val parquetFilters = createParquetFilters(parquetSchema)
assertResult(Seq(sources.And(sources.LessThan("a", 10), sources.GreaterThan("c", 1.5D)))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.LessThan("a", 10),
sources.GreaterThan("c", 1.5D))))
}
assertResult(Seq(sources.LessThan("a", 10))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.LessThan("a", 10),
sources.StringContains("b", "prefix"))))
}
assertResult(Seq(sources.LessThan("a", 10))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.StringContains("b", "prefix"),
sources.LessThan("a", 10))))
}
// Testing complex And conditions
assertResult(Seq(sources.And(sources.LessThan("a", 10), sources.GreaterThan("a", 5)))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.And(
sources.LessThan("a", 10),
sources.StringContains("b", "prefix")
),
sources.GreaterThan("a", 5))))
}
// Testing complex And conditions
assertResult(Seq(sources.And(sources.GreaterThan("a", 5), sources.LessThan("a", 10)))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.GreaterThan("a", 5),
sources.And(
sources.StringContains("b", "prefix"),
sources.LessThan("a", 10)
))))
}
// Testing complex And conditions
assertResult(Seq(sources.Or(sources.GreaterThan("a", 1), sources.GreaterThan("a", 2)))) {
parquetFilters.convertibleFilters(
Seq(sources.Or(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.GreaterThan("a", 2))))
}
// Testing complex And/Or conditions, the And condition under Or condition can't be pushed down.
assertResult(Seq(sources.And(sources.LessThan("a", 10),
sources.Or(sources.GreaterThan("a", 1), sources.GreaterThan("a", 2))))) {
parquetFilters.convertibleFilters(
Seq(sources.And(
sources.LessThan("a", 10),
sources.Or(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.GreaterThan("a", 2)))))
}
assertResult(Seq(sources.Or(sources.GreaterThan("a", 2), sources.GreaterThan("c", 1.1)))) {
parquetFilters.convertibleFilters(
Seq(sources.Or(
sources.GreaterThan("a", 2),
sources.And(
sources.GreaterThan("c", 1.1),
sources.StringContains("b", "prefix")))))
}
// Testing complex Not conditions.
assertResult(Seq.empty) {
parquetFilters.convertibleFilters(
Seq(sources.Not(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")))))
}
assertResult(Seq.empty) {
parquetFilters.convertibleFilters(
Seq(sources.Not(
sources.And(
sources.StringContains("b", "prefix"),
sources.GreaterThan("a", 1)))))
}
assertResult(Seq.empty) {
parquetFilters.convertibleFilters(
Seq(sources.Not(
sources.And(
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix")),
sources.GreaterThan("a", 2)))))
}
assertResult(Seq.empty) {
parquetFilters.convertibleFilters(
Seq(sources.Not(
sources.And(
sources.GreaterThan("a", 2),
sources.And(
sources.GreaterThan("a", 1),
sources.StringContains("b", "prefix"))))))
}
}
test("SPARK-16371 Do not push down filters when inner name and outer name are the same") {
withParquetDataFrame((1 to 4).map(i => Tuple1(Tuple1(i)))) { implicit df =>
// Here the schema becomes as below:
//
// root
// |-- _1: struct (nullable = true)
// | |-- _1: integer (nullable = true)
//
// The inner column name, `_1` and outer column name `_1` are the same.
// Obviously this should not push down filters because the outer column is struct.
assert(df.filter("_1 IS NOT NULL").count() === 4)
}
}
test("Filters should be pushed down for vectorized Parquet reader at row group level") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "true",
SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "false") {
withTempPath { dir =>
val path = s"${dir.getCanonicalPath}/table"
(1 to 1024).map(i => (101, i)).toDF("a", "b").write.parquet(path)
Seq(true, false).foreach { enablePushDown =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> enablePushDown.toString) {
val accu = new NumRowGroupsAcc
sparkContext.register(accu)
val df = spark.read.parquet(path).filter("a < 100")
df.foreachPartition((it: Iterator[Row]) => it.foreach(v => accu.add(0)))
if (enablePushDown) {
assert(accu.value == 0)
} else {
assert(accu.value > 0)
}
AccumulatorContext.remove(accu.id)
}
}
}
}
}
test("SPARK-17213: Broken Parquet filter push-down for string columns") {
withAllParquetReaders {
withTempPath { dir =>
import testImplicits._
val path = dir.getCanonicalPath
// scalastyle:off nonascii
Seq("a", "é").toDF("name").write.parquet(path)
// scalastyle:on nonascii
assert(spark.read.parquet(path).where("name > 'a'").count() == 1)
assert(spark.read.parquet(path).where("name >= 'a'").count() == 2)
// scalastyle:off nonascii
assert(spark.read.parquet(path).where("name < 'é'").count() == 1)
assert(spark.read.parquet(path).where("name <= 'é'").count() == 2)
// scalastyle:on nonascii
}
}
}
test("SPARK-31026: Parquet predicate pushdown for fields having dots in the names") {
import testImplicits._
withAllParquetReaders {
withSQLConf(
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> true.toString,
SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
withTempPath { path =>
Seq(Some(1), None).toDF("col.dots").write.parquet(path.getAbsolutePath)
val readBack = spark.read.parquet(path.getAbsolutePath).where("`col.dots` IS NOT NULL")
assert(readBack.count() == 1)
}
}
withSQLConf(
// Makes sure disabling 'spark.sql.parquet.recordFilter' still enables
// row group level filtering.
SQLConf.PARQUET_RECORD_FILTER_ENABLED.key -> "false",
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
withTempPath { path =>
val data = (1 to 1024)
data.toDF("col.dots").coalesce(1)
.write.option("parquet.block.size", 512)
.parquet(path.getAbsolutePath)
val df = spark.read.parquet(path.getAbsolutePath).filter("`col.dots` == 500")
// Here, we strip the Spark side filter and check the actual results from Parquet.
val actual = stripSparkFilter(df).collect().length
// Since those are filtered at row group level, the result count should be less
// than the total length but should not be a single record.
// Note that, if record level filtering is enabled, it should be a single record.
// If no filter is pushed down to Parquet, it should be the total length of data.
assert(actual > 1 && actual < data.length)
}
}
}
}
test("Filters should be pushed down for Parquet readers at row group level") {
import testImplicits._
withSQLConf(
// Makes sure disabling 'spark.sql.parquet.recordFilter' still enables
// row group level filtering.
SQLConf.PARQUET_RECORD_FILTER_ENABLED.key -> "false",
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true",
SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") {
withTempPath { path =>
val data = (1 to 1024)
data.toDF("a").coalesce(1)
.write.option("parquet.block.size", 512)
.parquet(path.getAbsolutePath)
val df = spark.read.parquet(path.getAbsolutePath).filter("a == 500")
// Here, we strip the Spark side filter and check the actual results from Parquet.
val actual = stripSparkFilter(df).collect().length
// Since those are filtered at row group level, the result count should be less
// than the total length but should not be a single record.
// Note that, if record level filtering is enabled, it should be a single record.
// If no filter is pushed down to Parquet, it should be the total length of data.
assert(actual > 1 && actual < data.length)
}
}
}
test("SPARK-23852: Broken Parquet push-down for partially-written stats") {
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true") {
// parquet-1217.parquet contains a single column with values -1, 0, 1, 2 and null.
// The row-group statistics include null counts, but not min and max values, which
// triggers PARQUET-1217.
val df = readResourceParquetFile("test-data/parquet-1217.parquet")
// Will return 0 rows if PARQUET-1217 is not fixed.
assert(df.where("col > 0").count() === 2)
}
}
test("filter pushdown - StringStartsWith") {
withParquetDataFrame((1 to 4).map(i => Tuple1(i + "str" + i))) { implicit df =>
checkFilterPredicate(
'_1.startsWith("").asInstanceOf[Predicate],
classOf[UserDefinedByInstance[_, _]],
Seq("1str1", "2str2", "3str3", "4str4").map(Row(_)))
Seq("2", "2s", "2st", "2str", "2str2").foreach { prefix =>
checkFilterPredicate(
'_1.startsWith(prefix).asInstanceOf[Predicate],
classOf[UserDefinedByInstance[_, _]],
"2str2")
}
Seq("2S", "null", "2str22").foreach { prefix =>
checkFilterPredicate(
'_1.startsWith(prefix).asInstanceOf[Predicate],
classOf[UserDefinedByInstance[_, _]],
Seq.empty[Row])
}
checkFilterPredicate(
!'_1.startsWith("").asInstanceOf[Predicate],
classOf[Operators.Not],
Seq().map(Row(_)))
Seq("2", "2s", "2st", "2str", "2str2").foreach { prefix =>
checkFilterPredicate(
!'_1.startsWith(prefix).asInstanceOf[Predicate],
classOf[Operators.Not],
Seq("1str1", "3str3", "4str4").map(Row(_)))
}
Seq("2S", "null", "2str22").foreach { prefix =>
checkFilterPredicate(
!'_1.startsWith(prefix).asInstanceOf[Predicate],
classOf[Operators.Not],
Seq("1str1", "2str2", "3str3", "4str4").map(Row(_)))
}
val schema = new SparkToParquetSchemaConverter(conf).convert(df.schema)
assertResult(None) {
createParquetFilters(schema).createFilter(sources.StringStartsWith("_1", null))
}
}
// SPARK-28371: make sure filter is null-safe.
withParquetDataFrame(Seq(Tuple1[String](null))) { implicit df =>
checkFilterPredicate(
'_1.startsWith("blah").asInstanceOf[Predicate],
classOf[UserDefinedByInstance[_, _]],
Seq.empty[Row])
}
import testImplicits._
// Test canDrop() has taken effect
testStringStartsWith(spark.range(1024).map(_.toString).toDF(), "value like 'a%'")
// Test inverseCanDrop() has taken effect
testStringStartsWith(spark.range(1024).map(c => "100").toDF(), "value not like '10%'")
}
test("SPARK-17091: Convert IN predicate to Parquet filter push-down") {
val schema = StructType(Seq(
StructField("a", IntegerType, nullable = false)
))
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val parquetFilters = createParquetFilters(parquetSchema)
assertResult(Some(FilterApi.eq(intColumn("a"), null: Integer))) {
parquetFilters.createFilter(sources.In("a", Array(null)))
}
assertResult(Some(FilterApi.eq(intColumn("a"), 10: Integer))) {
parquetFilters.createFilter(sources.In("a", Array(10)))
}
// Remove duplicates
assertResult(Some(FilterApi.eq(intColumn("a"), 10: Integer))) {
parquetFilters.createFilter(sources.In("a", Array(10, 10)))
}
assertResult(Some(or(or(
FilterApi.eq(intColumn("a"), 10: Integer),
FilterApi.eq(intColumn("a"), 20: Integer)),
FilterApi.eq(intColumn("a"), 30: Integer)))
) {
parquetFilters.createFilter(sources.In("a", Array(10, 20, 30)))
}
Seq(0, 10).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> threshold.toString) {
assert(createParquetFilters(parquetSchema)
.createFilter(sources.In("a", Array(10, 20, 30))).nonEmpty === threshold > 0)
}
}
import testImplicits._
withTempPath { path =>
val data = 0 to 1024
data.toDF("a").selectExpr("if (a = 1024, null, a) AS a") // convert 1024 to null
.coalesce(1).write.option("parquet.block.size", 512)
.parquet(path.getAbsolutePath)
val df = spark.read.parquet(path.getAbsolutePath)
Seq(true, false).foreach { pushEnabled =>
withSQLConf(
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> pushEnabled.toString) {
Seq(1, 5, 10, 11, 100).foreach { count =>
val filter = s"a in(${Range(0, count).mkString(",")})"
assert(df.where(filter).count() === count)
val actual = stripSparkFilter(df.where(filter)).collect().length
if (pushEnabled) {
// We support push down In predicate if its value exceeds threshold since SPARK-32792.
assert(actual > 1 && actual < data.length)
} else {
assert(actual === data.length)
}
}
assert(df.where("a in(null)").count() === 0)
assert(df.where("a = null").count() === 0)
assert(df.where("a is null").count() === 1)
}
}
}
}
test("SPARK-32792: Pushdown IN predicate to min-max filter") {
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> "2") {
val parquetFilters = createParquetFilters(
new SparkToParquetSchemaConverter(conf).convert(StructType.fromDDL("a int")))
assertResult(Some(and(
FilterApi.gtEq(intColumn("a"), 1: Integer),
FilterApi.ltEq(intColumn("a"), 20: Integer)))
) {
parquetFilters.createFilter(sources.In("a", (1 to 20).toArray))
}
assertResult(Some(and(
FilterApi.gtEq(intColumn("a"), -200: Integer),
FilterApi.ltEq(intColumn("a"), 40: Integer)))
) {
parquetFilters.createFilter(sources.In("A", Array(-100, 10, -200, 40)))
}
assertResult(Some(or(
FilterApi.eq(intColumn("a"), null: Integer),
and(
FilterApi.gtEq(intColumn("a"), 2: Integer),
FilterApi.ltEq(intColumn("a"), 7: Integer))))
) {
parquetFilters.createFilter(sources.In("a", Array(2, 3, 7, null, 6)))
}
assertResult(
Some(FilterApi.not(or(
FilterApi.eq(intColumn("a"), 2: Integer),
FilterApi.eq(intColumn("a"), 3: Integer))))
) {
parquetFilters.createFilter(sources.Not(sources.In("a", Array(2, 3))))
}
assertResult(
None
) {
parquetFilters.createFilter(sources.Not(sources.In("a", Array(2, 3, 7))))
}
}
}
test("SPARK-25207: Case-insensitive field resolution for pushdown when reading parquet") {
def testCaseInsensitiveResolution(
schema: StructType,
expected: FilterPredicate,
filter: sources.Filter): Unit = {
val parquetSchema = new SparkToParquetSchemaConverter(conf).convert(schema)
val caseSensitiveParquetFilters =
createParquetFilters(parquetSchema, caseSensitive = Some(true))
val caseInsensitiveParquetFilters =
createParquetFilters(parquetSchema, caseSensitive = Some(false))
assertResult(Some(expected)) {
caseInsensitiveParquetFilters.createFilter(filter)
}
assertResult(None) {
caseSensitiveParquetFilters.createFilter(filter)
}
}
val schema = StructType(Seq(StructField("cint", IntegerType)))
testCaseInsensitiveResolution(
schema, FilterApi.eq(intColumn("cint"), null.asInstanceOf[Integer]), sources.IsNull("CINT"))
testCaseInsensitiveResolution(
schema,
FilterApi.notEq(intColumn("cint"), null.asInstanceOf[Integer]),
sources.IsNotNull("CINT"))
testCaseInsensitiveResolution(
schema, FilterApi.eq(intColumn("cint"), 1000: Integer), sources.EqualTo("CINT", 1000))
testCaseInsensitiveResolution(
schema,
FilterApi.notEq(intColumn("cint"), 1000: Integer),
sources.Not(sources.EqualTo("CINT", 1000)))
testCaseInsensitiveResolution(
schema, FilterApi.eq(intColumn("cint"), 1000: Integer), sources.EqualNullSafe("CINT", 1000))
testCaseInsensitiveResolution(
schema,
FilterApi.notEq(intColumn("cint"), 1000: Integer),
sources.Not(sources.EqualNullSafe("CINT", 1000)))
testCaseInsensitiveResolution(
schema,
FilterApi.lt(intColumn("cint"), 1000: Integer), sources.LessThan("CINT", 1000))
testCaseInsensitiveResolution(
schema,
FilterApi.ltEq(intColumn("cint"), 1000: Integer),
sources.LessThanOrEqual("CINT", 1000))
testCaseInsensitiveResolution(
schema, FilterApi.gt(intColumn("cint"), 1000: Integer), sources.GreaterThan("CINT", 1000))
testCaseInsensitiveResolution(
schema,
FilterApi.gtEq(intColumn("cint"), 1000: Integer),
sources.GreaterThanOrEqual("CINT", 1000))
testCaseInsensitiveResolution(
schema,
FilterApi.or(
FilterApi.eq(intColumn("cint"), 10: Integer),
FilterApi.eq(intColumn("cint"), 20: Integer)),
sources.In("CINT", Array(10, 20)))
val dupFieldSchema = StructType(
Seq(StructField("cint", IntegerType), StructField("cINT", IntegerType)))
val dupParquetSchema = new SparkToParquetSchemaConverter(conf).convert(dupFieldSchema)
val dupCaseInsensitiveParquetFilters =
createParquetFilters(dupParquetSchema, caseSensitive = Some(false))
assertResult(None) {
dupCaseInsensitiveParquetFilters.createFilter(sources.EqualTo("CINT", 1000))
}
}
test("SPARK-25207: exception when duplicate fields in case-insensitive mode") {
withTempPath { dir =>
val count = 10
val tableName = "spark_25207"
val tableDir = dir.getAbsoluteFile + "/table"
withTable(tableName) {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
spark.range(count).selectExpr("id as A", "id as B", "id as b")
.write.mode("overwrite").parquet(tableDir)
}
sql(
s"""
|CREATE TABLE $tableName (A LONG, B LONG) USING PARQUET LOCATION '$tableDir'
""".stripMargin)
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
val e = intercept[SparkException] {
sql(s"select a from $tableName where b > 0").collect()
}
assert(e.getCause.isInstanceOf[RuntimeException] && e.getCause.getMessage.contains(
"""Found duplicate field(s) "B": [B, b] in case-insensitive mode"""))
}
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
checkAnswer(sql(s"select A from $tableName where B > 0"), (1 until count).map(Row(_)))
}
}
}
}
test("SPARK-30826: case insensitivity of StringStartsWith attribute") {
import testImplicits._
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
withTable("t1") {
withTempPath { dir =>
val path = dir.toURI.toString
Seq("42").toDF("COL").write.parquet(path)
spark.sql(
s"""
|CREATE TABLE t1 (col STRING)
|USING parquet
|OPTIONS (path '$path')
""".stripMargin)
checkAnswer(
spark.sql("SELECT * FROM t1 WHERE col LIKE '4%'"),
Row("42"))
}
}
}
}
test("Support Parquet column index") {
// block 1:
// null count min max
// page-0 0 0 99
// page-1 0 100 199
// page-2 0 200 299
// page-3 0 300 399
// page-4 0 400 449
//
// block 2:
// null count min max
// page-0 0 450 549
// page-1 0 550 649
// page-2 0 650 749
// page-3 0 750 849
// page-4 0 850 899
withTempPath { path =>
spark.range(900)
.repartition(1)
.write
.option(ParquetOutputFormat.PAGE_SIZE, "500")
.option(ParquetOutputFormat.BLOCK_SIZE, "2000")
.parquet(path.getCanonicalPath)
val parquetFile = path.listFiles().filter(_.getName.startsWith("part")).last
val in = HadoopInputFile.fromPath(
new Path(parquetFile.getCanonicalPath),
spark.sessionState.newHadoopConf())
Utils.tryWithResource(ParquetFileReader.open(in)) { reader =>
val blocks = reader.getFooter.getBlocks
assert(blocks.size() > 1)
val columns = blocks.get(0).getColumns
assert(columns.size() === 1)
val columnIndex = reader.readColumnIndex(columns.get(0))
assert(columnIndex.getMinValues.size() > 1)
val rowGroupCnt = blocks.get(0).getRowCount
// Page count = Second page min value - first page min value
val pageCnt = columnIndex.getMinValues.get(1).asLongBuffer().get() -
columnIndex.getMinValues.get(0).asLongBuffer().get()
assert(pageCnt < rowGroupCnt)
Seq(true, false).foreach { columnIndex =>
withSQLConf(ParquetInputFormat.COLUMN_INDEX_FILTERING_ENABLED -> s"$columnIndex") {
val df = spark.read.parquet(parquetFile.getCanonicalPath).where("id = 1")
df.collect()
val plan = df.queryExecution.executedPlan
val metrics = plan.collectLeaves().head.metrics
val numOutputRows = metrics("numOutputRows").value
if (columnIndex) {
assert(numOutputRows === pageCnt)
} else {
assert(numOutputRows === rowGroupCnt)
}
}
}
}
}
}
test("SPARK-34562: Bloom filter push down") {
withTempPath { dir =>
val path = dir.getCanonicalPath
spark.range(100).selectExpr("id * 2 AS id")
.write
.option(ParquetOutputFormat.BLOOM_FILTER_ENABLED + "#id", true)
// Disable dictionary because the distinct values less than 40000.
.option(ParquetOutputFormat.ENABLE_DICTIONARY, false)
.parquet(path)
Seq(true, false).foreach { bloomFilterEnabled =>
withSQLConf(ParquetInputFormat.BLOOM_FILTERING_ENABLED -> bloomFilterEnabled.toString) {
val accu = new NumRowGroupsAcc
sparkContext.register(accu)
val df = spark.read.parquet(path).filter("id = 19")
df.foreachPartition((it: Iterator[Row]) => it.foreach(_ => accu.add(0)))
if (bloomFilterEnabled) {
assert(accu.value === 0)
} else {
assert(accu.value > 0)
}
AccumulatorContext.remove(accu.id)
}
}
}
}
test("SPARK-36866: filter pushdown - year-month interval") {
def months(m: Int): Period = Period.ofMonths(m)
def monthsLit(m: Int): Literal = Literal(months(m))
val data = (1 to 4).map(i => Tuple1(Option(months(i))))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val iAttr = df(colName).expr
assert(df(colName).expr.dataType === YearMonthIntervalType())
checkFilterPredicate(iAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(iAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(months(i)))))
checkFilterPredicate(iAttr === monthsLit(1), classOf[Eq[_]], resultFun(months(1)))
checkFilterPredicate(iAttr <=> monthsLit(1), classOf[Eq[_]], resultFun(months(1)))
checkFilterPredicate(iAttr =!= monthsLit(1), classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(months(i)))))
checkFilterPredicate(iAttr < monthsLit(2), classOf[Lt[_]], resultFun(months(1)))
checkFilterPredicate(iAttr > monthsLit(3), classOf[Gt[_]], resultFun(months(4)))
checkFilterPredicate(iAttr <= monthsLit(1), classOf[LtEq[_]], resultFun(months(1)))
checkFilterPredicate(iAttr >= monthsLit(4), classOf[GtEq[_]], resultFun(months(4)))
checkFilterPredicate(monthsLit(1) === iAttr, classOf[Eq[_]], resultFun(months(1)))
checkFilterPredicate(monthsLit(1) <=> iAttr, classOf[Eq[_]], resultFun(months(1)))
checkFilterPredicate(monthsLit(2) > iAttr, classOf[Lt[_]], resultFun(months(1)))
checkFilterPredicate(monthsLit(3) < iAttr, classOf[Gt[_]], resultFun(months(4)))
checkFilterPredicate(monthsLit(1) >= iAttr, classOf[LtEq[_]], resultFun(months(1)))
checkFilterPredicate(monthsLit(4) <= iAttr, classOf[GtEq[_]], resultFun(months(4)))
checkFilterPredicate(!(iAttr < monthsLit(4)), classOf[GtEq[_]], resultFun(months(4)))
checkFilterPredicate(iAttr < monthsLit(2) || iAttr > monthsLit(3), classOf[Operators.Or],
Seq(Row(resultFun(months(1))), Row(resultFun(months(4)))))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(iAttr, Array(2, 3, 4, 5, 6, 7).map(monthsLit)),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun(months(2))), Row(resultFun(months(3))), Row(resultFun(months(4)))))
}
}
}
}
test("SPARK-36866: filter pushdown - day-time interval") {
def secs(m: Int): Duration = Duration.ofSeconds(m)
def secsLit(m: Int): Literal = Literal(secs(m))
val data = (1 to 4).map(i => Tuple1(Option(secs(i))))
withNestedParquetDataFrame(data) { case (inputDF, colName, resultFun) =>
implicit val df: DataFrame = inputDF
val iAttr = df(colName).expr
assert(df(colName).expr.dataType === DayTimeIntervalType())
checkFilterPredicate(iAttr.isNull, classOf[Eq[_]], Seq.empty[Row])
checkFilterPredicate(iAttr.isNotNull, classOf[NotEq[_]],
(1 to 4).map(i => Row.apply(resultFun(secs(i)))))
checkFilterPredicate(iAttr === secsLit(1), classOf[Eq[_]], resultFun(secs(1)))
checkFilterPredicate(iAttr <=> secsLit(1), classOf[Eq[_]], resultFun(secs(1)))
checkFilterPredicate(iAttr =!= secsLit(1), classOf[NotEq[_]],
(2 to 4).map(i => Row.apply(resultFun(secs(i)))))
checkFilterPredicate(iAttr < secsLit(2), classOf[Lt[_]], resultFun(secs(1)))
checkFilterPredicate(iAttr > secsLit(3), classOf[Gt[_]], resultFun(secs(4)))
checkFilterPredicate(iAttr <= secsLit(1), classOf[LtEq[_]], resultFun(secs(1)))
checkFilterPredicate(iAttr >= secsLit(4), classOf[GtEq[_]], resultFun(secs(4)))
checkFilterPredicate(secsLit(1) === iAttr, classOf[Eq[_]], resultFun(secs(1)))
checkFilterPredicate(secsLit(1) <=> iAttr, classOf[Eq[_]], resultFun(secs(1)))
checkFilterPredicate(secsLit(2) > iAttr, classOf[Lt[_]], resultFun(secs(1)))
checkFilterPredicate(secsLit(3) < iAttr, classOf[Gt[_]], resultFun(secs(4)))
checkFilterPredicate(secsLit(1) >= iAttr, classOf[LtEq[_]], resultFun(secs(1)))
checkFilterPredicate(secsLit(4) <= iAttr, classOf[GtEq[_]], resultFun(secs(4)))
checkFilterPredicate(!(iAttr < secsLit(4)), classOf[GtEq[_]], resultFun(secs(4)))
checkFilterPredicate(iAttr < secsLit(2) || iAttr > secsLit(3), classOf[Operators.Or],
Seq(Row(resultFun(secs(1))), Row(resultFun(secs(4)))))
Seq(3, 20).foreach { threshold =>
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_INFILTERTHRESHOLD.key -> s"$threshold") {
checkFilterPredicate(
In(iAttr, Array(2, 3, 4, 5, 6, 7).map(secsLit)),
if (threshold == 3) classOf[Operators.And] else classOf[Operators.Or],
Seq(Row(resultFun(secs(2))), Row(resultFun(secs(3))), Row(resultFun(secs(4)))))
}
}
}
}
}
@ExtendedSQLTest
class ParquetV1FilterSuite extends ParquetFilterSuite {
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_LIST, "parquet")
override def checkFilterPredicate(
df: DataFrame,
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
checker: (DataFrame, Seq[Row]) => Unit,
expected: Seq[Row]): Unit = {
val output = predicate.collect { case a: Attribute => a }.distinct
Seq(("parquet", true), ("", false)).foreach { case (pushdownDsList, nestedPredicatePushdown) =>
withSQLConf(
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_DATE_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_TIMESTAMP_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_DECIMAL_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_STARTSWITH_ENABLED.key -> "true",
// Disable adding filters from constraints because it adds, for instance,
// is-not-null to pushed filters, which makes it hard to test if the pushed
// filter is expected or not (this had to be fixed with SPARK-13495).
SQLConf.OPTIMIZER_EXCLUDED_RULES.key -> InferFiltersFromConstraints.ruleName,
SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false",
SQLConf.NESTED_PREDICATE_PUSHDOWN_FILE_SOURCE_LIST.key -> pushdownDsList) {
val query = df
.select(output.map(e => Column(e)): _*)
.where(Column(predicate))
val nestedOrAttributes = predicate.collectFirst {
case g: GetStructField => g
case a: Attribute => a
}
assert(nestedOrAttributes.isDefined, "No GetStructField nor Attribute is detected.")
val parsed = parseColumnPath(
PushableColumnAndNestedColumn.unapply(nestedOrAttributes.get).get)
val containsNestedColumnOrDot = parsed.length > 1 || parsed(0).contains(".")
var maybeRelation: Option[HadoopFsRelation] = None
val maybeAnalyzedPredicate = query.queryExecution.optimizedPlan.collect {
case PhysicalOperation(_, filters,
LogicalRelation(relation: HadoopFsRelation, _, _, _)) =>
maybeRelation = Some(relation)
filters
}.flatten.reduceLeftOption(_ && _)
assert(maybeAnalyzedPredicate.isDefined, "No filter is analyzed from the given query")
val (_, selectedFilters, _) =
DataSourceStrategy.selectFilters(maybeRelation.get, maybeAnalyzedPredicate.toSeq)
// If predicates contains nested column or dot, we push down the predicates only if
// "parquet" is in `NESTED_PREDICATE_PUSHDOWN_V1_SOURCE_LIST`.
if (nestedPredicatePushdown || !containsNestedColumnOrDot) {
assert(selectedFilters.nonEmpty, "No filter is pushed down")
val schema = new SparkToParquetSchemaConverter(conf).convert(df.schema)
val parquetFilters = createParquetFilters(schema)
// In this test suite, all the simple predicates are convertible here.
assert(parquetFilters.convertibleFilters(selectedFilters) === selectedFilters)
val pushedParquetFilters = selectedFilters.map { pred =>
val maybeFilter = parquetFilters.createFilter(pred)
assert(maybeFilter.isDefined, s"Couldn't generate filter predicate for $pred")
maybeFilter.get
}
// Doesn't bother checking type parameters here (e.g. `Eq[Integer]`)
assert(pushedParquetFilters.exists(_.getClass === filterClass),
s"${pushedParquetFilters.map(_.getClass).toList} did not contain ${filterClass}.")
checker(stripSparkFilter(query), expected)
} else {
assert(selectedFilters.isEmpty, "There is filter pushed down")
}
}
}
}
}
@ExtendedSQLTest
class ParquetV2FilterSuite extends ParquetFilterSuite {
// TODO: enable Parquet V2 write path after file source V2 writers are workable.
override protected def sparkConf: SparkConf =
super
.sparkConf
.set(SQLConf.USE_V1_SOURCE_LIST, "")
override def checkFilterPredicate(
df: DataFrame,
predicate: Predicate,
filterClass: Class[_ <: FilterPredicate],
checker: (DataFrame, Seq[Row]) => Unit,
expected: Seq[Row]): Unit = {
val output = predicate.collect { case a: Attribute => a }.distinct
withSQLConf(
SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_DATE_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_TIMESTAMP_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_DECIMAL_ENABLED.key -> "true",
SQLConf.PARQUET_FILTER_PUSHDOWN_STRING_STARTSWITH_ENABLED.key -> "true",
// Disable adding filters from constraints because it adds, for instance,
// is-not-null to pushed filters, which makes it hard to test if the pushed
// filter is expected or not (this had to be fixed with SPARK-13495).
SQLConf.OPTIMIZER_EXCLUDED_RULES.key -> InferFiltersFromConstraints.ruleName,
SQLConf.PARQUET_VECTORIZED_READER_ENABLED.key -> "false") {
val query = df
.select(output.map(e => Column(e)): _*)
.where(Column(predicate))
query.queryExecution.optimizedPlan.collectFirst {
case PhysicalOperation(_, filters,
DataSourceV2ScanRelation(_, scan: ParquetScan, _)) =>
assert(filters.nonEmpty, "No filter is analyzed from the given query")
val sourceFilters = filters.flatMap(DataSourceStrategy.translateFilter(_, true)).toArray
val pushedFilters = scan.pushedFilters
assert(pushedFilters.nonEmpty, "No filter is pushed down")
val schema = new SparkToParquetSchemaConverter(conf).convert(df.schema)
val parquetFilters = createParquetFilters(schema)
// In this test suite, all the simple predicates are convertible here.
assert(parquetFilters.convertibleFilters(sourceFilters) === pushedFilters)
val pushedParquetFilters = pushedFilters.map { pred =>
val maybeFilter = parquetFilters.createFilter(pred)
assert(maybeFilter.isDefined, s"Couldn't generate filter predicate for $pred")
maybeFilter.get
}
// Doesn't bother checking type parameters here (e.g. `Eq[Integer]`)
assert(pushedParquetFilters.exists(_.getClass === filterClass),
s"${pushedParquetFilters.map(_.getClass).toList} did not contain ${filterClass}.")
checker(stripSparkFilter(query), expected)
case _ =>
throw new AnalysisException("Can not match ParquetTable in the query.")
}
}
}
test("SPARK-36889: Respect disabling of filters pushdown for DSv2 by explain") {
import testImplicits._
withSQLConf(SQLConf.PARQUET_FILTER_PUSHDOWN_ENABLED.key -> "false") {
withTempPath { path =>
Seq(1, 2).toDF("c0").write.parquet(path.getAbsolutePath)
val readback = spark.read.parquet(path.getAbsolutePath).where("c0 == 1")
val explain = readback.queryExecution.explainString(ExplainMode.fromString("extended"))
assert(explain.contains("PushedFilters: []"))
}
}
}
}
class NumRowGroupsAcc extends AccumulatorV2[Integer, Integer] {
private var _sum = 0
override def isZero: Boolean = _sum == 0
override def copy(): AccumulatorV2[Integer, Integer] = {
val acc = new NumRowGroupsAcc()
acc._sum = _sum
acc
}
override def reset(): Unit = _sum = 0
override def add(v: Integer): Unit = _sum += v
override def merge(other: AccumulatorV2[Integer, Integer]): Unit = other match {
case a: NumRowGroupsAcc => _sum += a._sum
case _ => throw new UnsupportedOperationException(
s"Cannot merge ${this.getClass.getName} with ${other.getClass.getName}")
}
override def value: Integer = _sum
}
|
shaneknapp/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetFilterSuite.scala
|
Scala
|
apache-2.0
| 89,198 |
package org.viz.lightning
import org.scalatest.{BeforeAndAfterAll, FunSuite}
import org.viz.lightning.types.Make
class LightningThreeSuite extends FunSuite with BeforeAndAfterAll {
var lgn: Lightning = _
override def beforeAll() {
lgn = Lightning("http://public.lightning-viz.org")
lgn.createSession("test-three")
}
test("scatter3") {
lgn.scatter3(x = Make.values(n = 20),
y = Make.values(n = 20),
z = Make.values(n = 20),
label = Make.labels(n = 20),
size = Make.sizes(n = 20))
}
}
|
lightning-viz/lightning-scala
|
src/test/scala/org/viz/lightning/LightningThreeSuite.scala
|
Scala
|
mit
| 579 |
/**
* Copyright 2016 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.identity.components.cli
import java.io.File
import java.io.PrintStream
import java.io.InputStream
import java.net.URI
import javax.xml.transform.Source
import javax.xml.transform.stream.StreamSource
import org.clapper.argot.ArgotConverters._
import org.clapper.argot.{ArgotParser, ArgotUsageException}
import com.martiansoftware.nailgun.NGContext
import net.sf.saxon.s9api.Serializer
import net.sf.saxon.s9api.Destination
import com.rackspace.com.papi.components.checker.util.URLResolver
import com.rackspace.identity.components.AttributeMapper
object AttribMap2JSON {
val title = getClass.getPackage.getImplementationTitle
val version = getClass.getPackage.getImplementationVersion
def parseArgs(args: Array[String], // scalastyle:ignore
base: String, // This method is longer than 50 lines due to locally defined methods.
in: InputStream,
out: PrintStream,
err: PrintStream): Option[(Source, Destination, Boolean, String)] = {
val parser = new ArgotParser("attribmap2json", preUsage=Some(s"$title v$version"))
val policy = parser.parameter[String]("policy",
"Attribute mapping policy (in XML format)",
false)
val output = parser.parameter[String]("output",
"Output file. If not specified, stdout will be used.",
true)
val dontValidate = parser.flag[Boolean](List("D", "dont-validate"),
"Disable Validation (Validation will be enabled by default)")
val xsdEngine = parser.option[String](List("x", "xsd-engine"), "xsd-engine",
"XSD Engine to use. Valid values are auto, saxon, xerces (default is auto)")
val help = parser.flag[Boolean] (List("h", "help"),
"Display usage.")
val printVersion = parser.flag[Boolean] (List("version"),
"Display version.")
def policySource : Source = new StreamSource(URLResolver.toAbsoluteSystemId(policy.value.get, base))
def destination : Destination = {
if (output.value.isEmpty) {
AttributeMapper.processor.newSerializer(out)
} else {
AttributeMapper.processor.newSerializer(new File(new URI(URLResolver.toAbsoluteSystemId(output.value.get, base))))
}
}
try {
parser.parse(args)
if (help.value.getOrElse(false)) {
parser.usage() // throws ArgotUsageException
}
if (printVersion.value.getOrElse(false)) {
err.println(s"$title v$version") // scalastyle:ignore
None
} else {
Some((policySource, destination, !dontValidate.value.getOrElse(false),
xsdEngine.value.getOrElse("auto")))
}
} catch {
case e: ArgotUsageException => err.println(e.message) // scalastyle:ignore
None
case iae : IllegalArgumentException => err.println(iae.getMessage) // scalastyle:ignore
None
}
}
private def getBaseFromWorkingDir (workingDir : String) : String = {
(new File(workingDir)).toURI().toString
}
//
// Local run...
//
def main(args : Array[String]): Unit = {
parseArgs (args, getBaseFromWorkingDir(System.getProperty("user.dir")),
System.in, System.out, System.err) match {
case Some((policy : Source, dest : Destination, validate : Boolean, xsdEngine : String)) =>
AttributeMapper.policy2JSON (policy, dest, validate, xsdEngine)
case None => /* Bad args, Ignore */
}
}
//
// Nailgun run...
//
def nailMain(context : NGContext): Unit = {
parseArgs (context.getArgs, getBaseFromWorkingDir(context.getWorkingDirectory),
context.in, context.out, context.err) match {
case Some((policy : Source, dest : Destination, validate : Boolean, xsdEngine : String)) =>
AttributeMapper.policy2JSON (policy, dest, validate, xsdEngine)
case None => /* Bad args, Ignore */
}
}
}
|
RackerWilliams/attributeMapping
|
cli/attribmap2json/src/main/scala/com/rackspace/identity/components/cli/AttribMap2JSON.scala
|
Scala
|
apache-2.0
| 4,856 |
/*
* Copyright 2013 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s
package parser
import cats.data.NonEmptyList
import com.comcast.ip4s._
import org.http4s.EntityTag.Strong
import org.http4s.EntityTag.Weak
import org.http4s.headers._
import org.http4s.syntax.header._
import org.typelevel.ci._
class SimpleHeadersSpec extends Http4sSuite {
test("parse Accept-Patch") {
def parse(value: String) = `Accept-Patch`.parse(value)
val header =
`Accept-Patch`(
NonEmptyList.of(new MediaType("text", "example", extensions = Map("charset" -> "utf-8")))
)
assertEquals(parse(header.value), Right(header))
val multipleMediaTypes =
`Accept-Patch`(
NonEmptyList
.of(new MediaType("application", "example"), new MediaType("text", "example"))
)
assertEquals(parse(multipleMediaTypes.value), Right(multipleMediaTypes))
assert(parse("foo; bar").isLeft)
}
test("parse Access-Control-Allow-Headers") {
val header = `Access-Control-Allow-Headers`(
ci"Accept",
ci"Expires",
ci"X-Custom-Header",
ci"*",
)
assertEquals(`Access-Control-Allow-Headers`.parse(header.toRaw1.value), Right(header))
val invalidHeaderValue = "(non-token-name), non[&token]name"
assert(`Access-Control-Allow-Headers`.parse(invalidHeaderValue).isLeft)
assertEquals(
`Access-Control-Allow-Headers`.parse(""),
Right(`Access-Control-Allow-Headers`.empty),
)
}
test("parse Access-Control-Expose-Headers") {
val header = `Access-Control-Expose-Headers`(
ci"Content-Length",
ci"Authorization",
ci"X-Custom-Header",
ci"*",
)
assertEquals(`Access-Control-Expose-Headers`.parse(header.toRaw1.value), Right(header))
val invalidHeaderValue = "(non-token-name), non[&token]name"
assert(`Access-Control-Expose-Headers`.parse(invalidHeaderValue).isLeft)
assertEquals(
`Access-Control-Expose-Headers`.parse(""),
Right(`Access-Control-Expose-Headers`.empty),
)
}
test("parse Connection") {
val header = Connection(ci"closed")
assertEquals(Connection.parse(header.toRaw1.value), Right(header))
}
test("Parse Content-Length") {
assertEquals(`Content-Length`.parse("4"), Right(`Content-Length`.unsafeFromLong(4)))
assert(`Content-Length`.parse("foo").isLeft)
}
test("SimpleHeaders should parse Content-Encoding") {
val header = `Content-Encoding`(ContentCoding.`pack200-gzip`)
assertEquals(`Content-Encoding`.parse(header.value), Right(header))
}
test("SimpleHeaders should parse Content-Disposition") {
val header = `Content-Disposition`("foo", Map(ci"one" -> "two", ci"three" -> "four"))
val parse = `Content-Disposition`.parse(_)
assertEquals(parse(header.value), Right(header))
assert(parse("foo; bar").isLeft)
}
test("SimpleHeaders should parse Date") { // mills are lost, get rid of them
val header = Date(HttpDate.Epoch)
val stringRepr = "Thu, 01 Jan 1970 00:00:00 GMT"
assertEquals(Date(HttpDate.Epoch).value, stringRepr)
assertEquals(Date.parse(stringRepr), Right(header))
assert(Date.parse("foo").isLeft)
}
test("SimpleHeaders should parse Host") {
val header1 = headers.Host("foo", Some(5))
assertEquals(headers.Host.parse("foo:5"), Right(header1))
val header2 = headers.Host("foo", None)
assertEquals(headers.Host.parse("foo"), Right(header2))
assert(headers.Host.parse("foo:bar").isLeft)
}
test("parse Access-Control-Allow-Credentials") {
assert(`Access-Control-Allow-Credentials`.parse("false").isLeft)
// it is case sensitive
assert(`Access-Control-Allow-Credentials`.parse("True").isLeft)
}
test("SimpleHeaders should parse Last-Modified") {
val header = `Last-Modified`(HttpDate.Epoch)
val stringRepr = "Thu, 01 Jan 1970 00:00:00 GMT"
assertEquals(header.value, stringRepr)
assertEquals(Header[`Last-Modified`].parse(stringRepr), Right(header))
assert(Header[`Last-Modified`].parse("foo").isLeft)
}
test("SimpleHeaders should parse ETag") {
assertEquals(ETag.EntityTag("hash", Weak).toString(), "W/\\"hash\\"")
assertEquals(ETag.EntityTag("hash", Strong).toString(), "\\"hash\\"")
val headers = Seq("\\"hash\\"", "W/\\"hash\\"")
headers.foreach { header =>
assertEquals(ETag.parse(header).map(_.value), Right(header))
}
}
test("SimpleHeaders should parse If-None-Match") {
val headers = Seq(
`If-None-Match`(EntityTag("hash")),
`If-None-Match`(EntityTag("123-999")),
`If-None-Match`(EntityTag("123-999"), EntityTag("hash")),
`If-None-Match`(EntityTag("123-999", Weak), EntityTag("hash")),
`If-None-Match`.`*`,
)
headers.foreach { header =>
assertEquals(`If-None-Match`.parse(header.value), Right(header))
}
}
test("parse Max-Forwards") {
val headers = Seq(
`Max-Forwards`.unsafeFromLong(0),
`Max-Forwards`.unsafeFromLong(100),
)
headers.foreach { header =>
assertEquals(Header[`Max-Forwards`].parse(header.value), Right(header))
}
}
test("SimpleHeaders should parse Transfer-Encoding") {
val header = `Transfer-Encoding`(TransferCoding.chunked)
assertEquals(`Transfer-Encoding`.parse(header.value), Right(header))
val header2 = `Transfer-Encoding`(TransferCoding.compress)
assertEquals(`Transfer-Encoding`.parse(header2.value), Right(header2))
}
test("SimpleHeaders should parse User-Agent") {
val header = `User-Agent`(ProductId("foo", Some("bar")), List(ProductId("foo")))
assertEquals(header.value, "foo/bar foo")
assertEquals(`User-Agent`.parse(header.value), Right(header))
val header2 =
`User-Agent`(ProductId("foo"), List(ProductId("bar", Some("biz")), ProductComment("blah")))
assertEquals(header2.value, "foo bar/biz (blah)")
assertEquals(`User-Agent`.parse(header2.value), Right(header2))
val headerstr = "Mozilla/5.0 (Android; Mobile; rv:30.0) Gecko/30.0 Firefox/30.0"
val headerraw = Header.Raw(`User-Agent`.name, headerstr)
val parsed = `User-Agent`.parse(headerraw.value)
assertEquals(
parsed,
Right(
`User-Agent`(
ProductId("Mozilla", Some("5.0")),
List(
ProductComment("Android; Mobile; rv:30.0"),
ProductId("Gecko", Some("30.0")),
ProductId("Firefox", Some("30.0")),
),
)
),
)
assertEquals(parsed.map(_.value), Right(headerstr))
}
test("parse Server") {
val header = Server(ProductId("foo", Some("bar")), List(ProductComment("foo")))
assertEquals(header.value, "foo/bar (foo)")
assertEquals(Server.parse(header.toRaw1.value), Right(header))
val header2 =
Server(ProductId("foo"), List(ProductId("bar", Some("biz")), ProductComment("blah")))
assertEquals(header2.value, "foo bar/biz (blah)")
assertEquals(Server.parse(header2.toRaw1.value), Right(header2))
val headerstr = "nginx/1.14.0 (Ubuntu)"
assertEquals(
Server.parse(headerstr),
Right(
Server(
ProductId("nginx", Some("1.14.0")),
List(
ProductComment("Ubuntu")
),
)
),
)
val headerstr2 = "CERN/3.0 libwww/2.17"
assertEquals(
Server.parse(headerstr2),
Right(
Server(
ProductId("CERN", Some("3.0")),
List(
ProductId("libwww", Some("2.17"))
),
)
),
)
}
test("SimpleHeaders should parse X-Forwarded-For") {
// ipv4
val header2 = `X-Forwarded-For`(NonEmptyList.of(Some(ipv4"127.0.0.1")))
assertEquals(`X-Forwarded-For`.parse(header2.toRaw1.value), Right(header2))
// ipv6
val header3 = `X-Forwarded-For`(
NonEmptyList.of(Some(ipv6"::1"), Some(ipv6"2001:0db8:85a3:0000:0000:8a2e:0370:7334"))
)
assertEquals(`X-Forwarded-For`.parse(header3.toRaw1.value), Right(header3))
// "unknown"
val header4 = `X-Forwarded-For`(NonEmptyList.of(None))
assertEquals(`X-Forwarded-For`.parse(header4.toRaw1.value), Right(header4))
val bad = "foo"
assert(`X-Forwarded-For`.parse(bad).isLeft)
val bad2 = "256.56.56.56"
assert(`X-Forwarded-For`.parse(bad2).isLeft)
}
}
|
http4s/http4s
|
tests/shared/src/test/scala/org/http4s/parser/SimpleHeadersSpec.scala
|
Scala
|
apache-2.0
| 8,789 |
package com.twitter.zk
import scala.collection.{Seq, Set}
import scala.jdk.CollectionConverters._
import org.apache.zookeeper.common.PathUtils
import org.apache.zookeeper.data.{ACL, Stat}
import org.apache.zookeeper.{CreateMode, KeeperException, WatchedEvent}
import com.twitter.concurrent.{Broker, Offer}
import com.twitter.util.{Future, Return, Throw, Try}
import com.twitter.logging.Logger
/**
* A handle to a ZNode attached to a ZkClient
*/
trait ZNode {
/** Absolute path of ZNode */
val path: String
protected[zk] val zkClient: ZkClient
protected[this] lazy val log: Logger = zkClient.log
override def hashCode: Int = path.hashCode
override def toString: String = "ZNode(%s)".format(path)
/** ZNodes are equal if they share a path. */
override def equals(other: Any): Boolean = other match {
case z @ ZNode(_) => (z.hashCode == hashCode)
case _ => false
}
/*
* Helpers
*/
/** Return the ZkClient associated with this node. */
def client: ZkClient = zkClient
/** Get a child node. */
def apply(child: String): ZNode = ZNode(zkClient, childPath(child))
/** Build a ZNode with its metadata. */
def apply(stat: Stat): ZNode.Exists = ZNode.Exists(this, stat)
/** Build a ZNode with its metadata and children. */
def apply(stat: Stat, children: Seq[String]): ZNode.Children =
ZNode.Children(this, stat, children)
/** Build a ZNode with its metadata and data. */
def apply(stat: Stat, bytes: Array[Byte]): ZNode.Data = ZNode.Data(this, stat, bytes)
/** The 'basename' of the ZNode path. */
lazy val name: String = path.lastIndexOf('/') match {
case i if (i == -1 || i == path.length - 1) => ""
case i => path.substring(i + 1)
}
/** The parent node. The root node is its own parent. */
lazy val parent: ZNode = ZNode(zkClient, parentPath)
lazy val parentPath: String = path.lastIndexOf('/') match {
case i if (i <= 0) => "/"
case i => path.substring(0, i)
}
/** The absolute path of a child */
def childPath(child: String): String = path match {
case path if (!path.endsWith("/")) => path + "/" + child
case path => path + child
}
/** Create a copy of this ZNode with an alternate ZkClient. */
def withZkClient(zk: ZkClient): ZNode = ZNode(zk, path)
/*
* Remote node operations
*/
/**
* Create this ZNode; or if a child name is specified create that child.
*/
def create(
data: Array[Byte] = Array.empty[Byte],
acls: Seq[ACL] = zkClient.acl,
mode: CreateMode = zkClient.mode,
child: Option[String] = None
): Future[ZNode] = {
val creatingPath = child map { "%s/%s".format(path, _) } getOrElse path
zkClient.retrying { zk =>
val result = new StringCallbackPromise
zk.create(creatingPath, data, acls.asJava, mode, result, null)
result map { newPath => zkClient(newPath) }
}
}
/** Returns a Future that is satisfied with this ZNode */
def delete(version: Int = 0): Future[ZNode] = zkClient.retrying { zk =>
val result = new UnitCallbackPromise
zk.delete(path, version, result, null)
result map { _ => this }
}
/** Returns a Future that is satisfied with this ZNode with its metadata and data */
def setData(data: Array[Byte], version: Int): Future[ZNode.Data] = zkClient.retrying { zk =>
val result = new ExistsCallbackPromise(this)
zk.setData(path, data, version, result, null)
result map { _.apply(data) }
}
/** Returns a Future that is satisfied with a reference to this ZNode */
def sync(): Future[ZNode] = zkClient.retrying { zk =>
val result = new UnitCallbackPromise
zk.sync(path, result, null)
result map { _ => this }
}
/** Provides access to this node's children. */
val getChildren: ZOp[ZNode.Children] = new ZOp[ZNode.Children] {
import LiftableFuture._
/** Get this ZNode with its metadata and children */
def apply(): Future[ZNode.Children] = zkClient.retrying { zk =>
val result = new ChildrenCallbackPromise(ZNode.this)
zk.getChildren(path, false, result, null)
result
}
/**
* Get a ZNode with its metadata and children; and install a watch for changes.
*
* The returned ZNode.Watch encapsulates the return value from a ZNode operation and the
* watch that will fire when a ZNode operation completes. If the ZNode does not exist, the
* result will be a Throw containing a KeeperException.NoNodeExists, though the watch will
* fire when an event occurs. If any other errors occur when fetching the ZNode, the returned
* Future will error without returning a Watch.
*/
def watch() = zkClient.retrying { zk =>
val result = new ChildrenCallbackPromise(ZNode.this)
val update = new EventPromise
zk.getChildren(path, update, result, null)
result.liftNoNode map { ZNode.Watch(_, update) }
}
}
/** Provides access to this node's data. */
val getData: ZOp[ZNode.Data] = new ZOp[ZNode.Data] {
import LiftableFuture._
/** Get this node's data */
def apply(): Future[ZNode.Data] = zkClient.retrying { zk =>
val result = new DataCallbackPromise(ZNode.this)
zk.getData(path, false, result, null)
result
}
/**
* Get this node's metadata and data; and install a watch for changes.
*
* The returned ZNode.Watch encapsulates the return value from a ZNode operation and the
* watch that will fire when a ZNode operation completes. If the ZNode does not exist, the
* result will be a Throw containing a KeeperException.NoNodeExists, though the watch will
* fire when an event occurs. If any other errors occur when fetching the ZNode, the returned
* Future will error without returning a Watch.
*/
def watch() = zkClient.retrying { zk =>
val result = new DataCallbackPromise(ZNode.this)
val update = new EventPromise
zk.getData(path, update, result, null)
result.liftNoNode map { ZNode.Watch(_, update) }
}
}
/** Provides access to this node's metadata. */
val exists: ZOp[ZNode.Exists] = new ZOp[ZNode.Exists] {
import LiftableFuture._
/** Get this node's metadata. */
def apply() = zkClient.retrying { zk =>
val result = new ExistsCallbackPromise(ZNode.this)
zk.exists(path, false, result, null)
result
}
/** Get this node's metadata and watch for updates */
def watch() = zkClient.retrying { zk =>
val result = new ExistsCallbackPromise(ZNode.this)
val update = new EventPromise
zk.exists(path, update, result, null)
result.liftNoNode.map { ZNode.Watch(_, update) }
}
}
/**
* Continuously watch all nodes in this subtree for child updates.
*
* A ZNode.TreeUpdate is offered for each node in the tree.
*
* If this node is deleted and it had children, an offer is sent indicating that this
* node no longer has children. A watch is maintained on deleted nodes so that if the
* parent node is not monitored, the monitor continues to work when the node is restored.
*
* If an authorization failure or session expiration is encountered, the monitor will be lost
* silently. To detect these situations, receive events from ZkClient.monitorSession().
*/
def monitorTree(): Offer[ZNode.TreeUpdate] = {
val broker = new Broker[ZNode.TreeUpdate]
/** Pipe events from a subtree's monitor to this broker. */
def pipeSubTreeUpdates(next: Offer[ZNode.TreeUpdate]): Unit = {
next.sync().flatMap(broker ! _).onSuccess { _ => pipeSubTreeUpdates(next) }
}
/** Monitor a watch on this node. */
def monitorWatch(
watch: Future[ZNode.Watch[ZNode.Children]],
knownChildren: Set[ZNode]
): Unit = {
log.debug("monitoring %s with %d known children", path, knownChildren.size)
watch onFailure { e =>
// An error occurred and there's not really anything we can do about it.
log.error(e, "%s: watch could not be established".format(path))
} onSuccess {
// When a node is fetched with a watch, send a ZNode.TreeUpdate on the broker, and start
// monitoring
case ZNode.Watch(Return(zparent), eventUpdate) => {
val children = zparent.children.toSet
val treeUpdate = ZNode.TreeUpdate(
zparent,
added = children -- knownChildren,
removed = knownChildren -- children
)
log.debug("updating %s with %d children", path, treeUpdate.added.size)
broker.send(treeUpdate).sync.onSuccess { _ =>
log.debug("updated %s with %d children", path, treeUpdate.added.size)
treeUpdate.added foreach { z => pipeSubTreeUpdates(z.monitorTree()) }
eventUpdate onSuccess { event =>
log.debug("event received on %s: %s", path, event)
} onSuccess {
case MonitorableEvent() => monitorWatch(zparent.getChildren.watch(), children)
case event => log.debug("Unmonitorable event: %s: %s", path, event)
}
}
}
case ZNode.Watch(Throw(ZNode.Error(_path)), eventUpdate) => {
// Tell the broker about the children we lost; otherwise, if there were no children,
// this deletion should be reflected in a watch on the parent node, if one exists.
if (knownChildren.size > 0) {
broker.send(ZNode.TreeUpdate(this, removed = knownChildren)).sync
} else {
Future.Done
} onSuccess { _ =>
eventUpdate onSuccess {
case MonitorableEvent() => monitorWatch(parent.getChildren.watch(), Set.empty[ZNode])
case event => log.debug("Unmonitorable event: %s: %s", path, event)
}
}
}
}
}
// Initially, we don't know about any children for the node.
monitorWatch(getChildren.watch(), Set.empty[ZNode])
broker.recv
}
/** AuthFailed and Expired are unmonitorable. Everything else can be resumed. */
protected[this] object MonitorableEvent {
def unapply(event: WatchedEvent): Boolean = event match {
case StateEvent.AuthFailed() => false
case StateEvent.Expired() => false
case _ => true
}
}
}
/**
* ZNode utilities and return types.
*/
object ZNode {
/** Build a ZNode */
def apply(zk: ZkClient, _path: String): ZNode = new ZNode {
PathUtils.validatePath(_path)
protected[zk] val zkClient = zk
val path = _path
}
/** matcher */
def unapply(znode: ZNode): Some[String] = Some(znode.path)
/** A matcher for KeeperExceptions that have a non-null path. */
object Error {
def unapply(ke: KeeperException): Option[String] = Option(ke.getPath)
}
/** A ZNode with its Stat metadata. */
trait Exists extends ZNode {
val stat: Stat
override def equals(other: Any): Boolean = other match {
case Exists(p, s) => (p == path && s == stat)
case o => super.equals(o)
}
def apply(children: Seq[String]): ZNode.Children = apply(stat, children)
def apply(bytes: Array[Byte]): ZNode.Data = apply(stat, bytes)
}
object Exists {
def apply(znode: ZNode, _stat: Stat): Exists = new Exists {
val path = znode.path
protected[zk] val zkClient = znode.zkClient
val stat = _stat
}
def apply(znode: Exists): Exists = apply(znode, znode.stat)
def unapply(znode: Exists): Some[(String, Stat)] = Some((znode.path, znode.stat))
}
/** A ZNode with its Stat metadata and children znodes. */
trait Children extends Exists {
val stat: Stat
val children: Seq[ZNode]
override def equals(other: Any): Boolean = other match {
case Children(p, s, c) => (p == path && s == stat && c == children)
case o => super.equals(o)
}
}
object Children {
def apply(znode: Exists, _children: Seq[ZNode]): Children = new Children {
val path = znode.path
protected[zk] val zkClient = znode.zkClient
val stat = znode.stat
val children = _children
}
def apply(znode: ZNode, stat: Stat, children: Seq[String]): Children = {
apply(Exists(znode, stat), children.map(znode.apply))
}
def unapply(z: Children): Some[(String, Stat, Seq[ZNode])] = Some((z.path, z.stat, z.children))
}
/** A ZNode with its Stat metadata and data. */
trait Data extends Exists {
val stat: Stat
val bytes: Array[Byte]
override def equals(other: Any): Boolean = other match {
case Data(p, s, b) => (p == path && s == stat && b == bytes)
case o => super.equals(o)
}
}
object Data {
def apply(znode: ZNode, _stat: Stat, _bytes: Array[Byte]): Data = new Data {
val path = znode.path
protected[zk] val zkClient = znode.zkClient
val stat = _stat
val bytes = _bytes
}
def apply(znode: Exists, bytes: Array[Byte]): Data = apply(znode, znode.stat, bytes)
def unapply(znode: Data): Some[(String, Stat, Array[Byte])] =
Some((znode.path, znode.stat, znode.bytes))
}
case class Watch[T <: Exists](result: Try[T], update: Future[WatchedEvent]) {
/** Map this Watch to one of another type. */
def map[V <: Exists](toV: T => V): Watch[V] = new Watch(result.map(toV), update)
}
/** Describes an update to a node's children. */
case class TreeUpdate(
parent: ZNode,
added: Set[ZNode] = Set.empty[ZNode],
removed: Set[ZNode] = Set.empty[ZNode])
}
|
twitter/util
|
util-zk/src/main/scala/com/twitter/zk/ZNode.scala
|
Scala
|
apache-2.0
| 13,405 |
//-*- coding: utf-8-unix -*-
/**
* (C) IT Sky Consulting GmbH 2014
* http://www.it-sky-consulting.com/
* Author: Karl Brodowsky
* Date: 2014-05-20
* License: GPL v2 (See https://de.wikipedia.org/wiki/GNU_General_Public_License )
*
*/
import scala.annotation.tailrec
object FactorialTailRecWeakPrivate {
def main(args : Array[String]) : Unit = {
val n : BigInt = BigInt(args(0))
val f : FactorialTailRecWeakPrivate = new FactorialTailRecWeakPrivate()
val fOfN : BigInt = f.factorial(n)
println("" + n + "!=" + fOfN);
}
}
class FactorialTailRecWeakPrivate {
def factorial(n : BigInt) : BigInt = {
factorial2(n, 1)
}
@tailrec
private def factorial2(n : BigInt, partial : BigInt) : BigInt = {
if (n <= 1) {
partial
} else {
factorial2(n - 1, partial * n)
}
}
}
|
8l/sysprogramming-examples
|
scala/FactorialTailRecWeakPrivate.scala
|
Scala
|
gpl-2.0
| 827 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.