code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.mishiranu.instantimage.util
import java.io.File
import android.content.Context
import android.webkit.MimeTypeMap
object FileManager {
def cleanup(context: Context): Unit = {
val time = System.currentTimeMillis - 24 * 60 * 60 * 1000
Option(context.getFilesDir.listFiles).getOrElse(Array())
.filter(_.lastModified < time)
.foreach(_.delete())
}
def obtainSimpleFileName(originalUriString: String): String = {
System.currentTimeMillis + "." + Option(originalUriString).map { uriString =>
val index = uriString.lastIndexOf('.')
if (index >= 0 && uriString.indexOf('/', index) == -1) uriString.substring(index + 1) else null
}.filter(e => e != null && !e.isEmpty && e.length <= 5).getOrElse("jpeg")
}
case class FileItem(contentId: Int, displayName: String, file: File) {
def mimeType: String = {
val extension = {
val index = displayName.lastIndexOf('.')
if (index >= 0) displayName.substring(index + 1) else "jpeg"
}
Option(MimeTypeMap.getSingleton.getMimeTypeFromExtension(extension))
.filter(_ != null)
.filter(_.isEmpty)
.getOrElse("image/jpeg")
}
}
def obtainFile(context: Context, originalUriString: String, contentId: Int): FileItem = {
val filesDir = context.getFilesDir
filesDir.mkdirs()
val displayName = contentId + "-" + obtainSimpleFileName(originalUriString)
FileItem(contentId, displayName, new File(filesDir, displayName))
}
def listFiles(context: Context): List[FileItem] = {
case class Data(file: File, splitted: Array[String])
Option(context.getFilesDir.listFiles).getOrElse(Array())
.map(f => Data(f, f.getName.split("-")))
.filter(_.splitted != null)
.filter(_.splitted.length == 2)
.map(d => FileItem(d.splitted(0).toInt, d.splitted(1), d.file)).toList
}
def findFile(context: Context, contentId: Int): Option[FileItem] = {
listFiles(context).find(_.contentId == contentId)
}
}
| Mishiranu/Instant-Image | src/com/mishiranu/instantimage/util/FileManager.scala | Scala | mit | 1,998 |
package org.bitcoins.core
import org.bitcoins.core.number.Int64
// We extend AnyVal to avoid runtime allocation of new
// objects. See the Scala documentation on value classes
// and universal traits for more:
// https://docs.scala-lang.org/overviews/core/value-classes.html
package object currency {
/** Provides natural language syntax for bitcoins */
implicit class BitcoinsInt(private val i: Int) extends AnyVal {
def bitcoins: Bitcoins = Bitcoins(i)
def bitcoin: Bitcoins = bitcoins
def BTC: Bitcoins = bitcoins
}
/** Provides natural language syntax for bitcoins */
implicit class BitcoinsLong(private val i: Long) extends AnyVal {
def bitcoins: Bitcoins = Bitcoins(i)
def bitcoin: Bitcoins = bitcoins
def BTC: Bitcoins = bitcoins
}
/** Provides natural language syntax for satoshis */
implicit class SatoshisInt(private val i: Int) extends AnyVal {
def satoshis: Satoshis = Satoshis(Int64(i))
def satoshi: Satoshis = satoshis
def sats: Satoshis = satoshis
def sat: Satoshis = satoshis
}
/** Provides natural language syntax for satoshis */
implicit class SatoshisLong(private val i: Long) extends AnyVal {
def satoshis: Satoshis = Satoshis(Int64(i))
def satoshi: Satoshis = satoshis
def sats: Satoshis = satoshis
def sat: Satoshis = satoshis
}
}
| bitcoin-s/bitcoin-s-core | core/src/main/scala/org/bitcoins/core/currency/package.scala | Scala | mit | 1,339 |
package com.ubirch.backend.chain.model
/**
* author: cvandrei
* since: 2016-07-28
*/
object AnchorType {
val ubirch = "ubirchBlockChain"
val bitcoin = "Bitcoin"
}
| ubirch/ubirch-storage-service | model/src/main/scala/com/ubirch/backend/chain/model/AnchorType.scala | Scala | apache-2.0 | 177 |
/*
* Copyright 2007-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package http
import S._
import common._
import util._
import util.Helpers._
import http.js._
import http.js.AjaxInfo
import JE._
import JsCmds._
import scala.xml._
/**
* The SHtml object defines a suite of XHTML element generator methods
* to simplify the creation of markup, particularly with forms and AJAX.
*/
object SHtml {
/**
* Convert a T to a String for display in Select, MultiSelect,
* etc.
*/
trait PairStringPromoter[T] extends Function1[T, String]
/**
* A companion object that does implicit conversions
*/
object PairStringPromoter {
implicit val strPromot: PairStringPromoter[String] =
new PairStringPromoter[String]{ def apply(in: String): String = in}
implicit val intPromot: PairStringPromoter[Int] =
new PairStringPromoter[Int]{ def apply(in: Int): String = in.toString}
implicit def funcPromote[T](f: T => String): PairStringPromoter[T] =
new PairStringPromoter[T]{def apply(in: T): String = f(in)}
}
/**
* An attribute that can be applied to an element. Typically,
* this will be a key-value pair, but there is a class of HTML5
* attributes that should be similated in JavaScript
*/
trait ElemAttr extends Function1[Elem, Elem] {
/**
* Apply the attribute to the element
*/
def apply(in: Elem): Elem
}
/**
* The companion object that has some very helpful conversion
*/
object ElemAttr {
implicit def pairToBasic(in: (String, String)): ElemAttr =
new BasicElemAttr(in._1, in._2)
implicit def funcToElemAttr(f: Elem => Elem): ElemAttr =
new ElemAttr{def apply(in: Elem): Elem = f(in)}
implicit def strSeqToElemAttr(in: Seq[(String, String)]):
Seq[ElemAttr] = in.map(a => a: ElemAttr)
}
private class ApplicableElem(in: Elem) {
def %(attr: ElemAttr): Elem = attr.apply(in)
}
private implicit def elemToApplicable(e: Elem): ApplicableElem =
new ApplicableElem(e)
/**
* Any old attribute
*/
final case class BasicElemAttr(name: String, value: String) extends ElemAttr {
/**
* Apply the attribute to the element
*/
def apply(in: Elem): Elem = in % (name -> value)
}
/**
* Invokes the Ajax request
* @param in the JsExp that returns the request data
*/
def makeAjaxCall(in: JsExp): JsExp = new JsExp {
def toJsCmd = "liftAjax.lift_ajaxHandler(" + in.toJsCmd + ", null, null, null)"
}
/**
* Invokes the Ajax request
* @param in the JsExp that returns the request data
* @param context defines the response callback functions and the response type (JavaScript or JSON)
*/
def makeAjaxCall(in: JsExp, context: AjaxContext): JsExp = new JsExp {
def toJsCmd = "liftAjax.lift_ajaxHandler(" + in.toJsCmd + ", " + (context.success openOr "null") +
", " + (context.failure openOr "null") +
", " + context.responseType.toString.encJs +
")"
}
/**
* Build a JavaScript function that will perform an AJAX call based on a value calculated in JavaScript
*
* @param jsCalcValue the JavaScript that will be executed on the client to calculate the value to be sent to the server
* @param func the function to call when the data is sent
*
* @return the function ID and JavaScript that makes the call
*/
def ajaxCall(jsCalcValue: JsExp, func: String => JsCmd): (String, JsExp) = ajaxCall_*(jsCalcValue, SFuncHolder(func))
/**
* Build a JavaScript function that will perform an AJAX call based on a value calculated in JavaScript
*
* @param jsCalcValue the JavaScript that will be executed on the client to calculate the value to be sent to the server
* @param jsContext the context instance that defines JavaScript to be executed on call success or failure
* @param func the function to call when the data is sent
*
* @return the function ID and JavaScript that makes the call
*/
def ajaxCall(jsCalcValue: JsExp, jsContext: JsContext, func: String => JsCmd): (String, JsExp) =
ajaxCall_*(jsCalcValue, jsContext, SFuncHolder(func))
/**
* Build a JavaScript function that will perform a JSON call based on a value calculated in JavaScript
*
* @param jsCalcValue the JavaScript to calculate the value to be sent to the server
* @param jsContext the context instance that defines JavaScript to be executed on call success or failure
* @param func the function to call when the data is sent
*
* @return the function ID and JavaScript that makes the call
*/
def jsonCall(jsCalcValue: JsExp, func: Any => JsCmd): (String, JsExp) =
jsonCall_*(jsCalcValue, SFuncHolder(s => JSONParser.parse(s).map(func) openOr Noop))
/**
* Build a JavaScript function that will perform a JSON call based on a value calculated in JavaScript
*
* @param jsCalcValue the JavaScript to calculate the value to be sent to the server
* @param jsContext the context instance that defines JavaScript to be executed on call success or failure
* @param func the function to call when the data is sent
*
* @return the function ID and JavaScript that makes the call
*/
def jsonCall(jsCalcValue: JsExp, jsContext: JsContext, func: Any => JsCmd): (String, JsExp) =
jsonCall_*(jsCalcValue, jsContext, SFuncHolder(s => JSONParser.parse(s).map(func) openOr Noop))
/**
* Build a JavaScript function that will perform an AJAX call based on a value calculated in JavaScript
* @param jsCalcValue -- the JavaScript to calculate the value to be sent to the server
* @param func -- the function to call when the data is sent
*
* @return the function ID and JavaScript that makes the call
*/
private def jsonCall_*(jsCalcValue: JsExp, func: AFuncHolder): (String, JsExp) =
fmapFunc(contextFuncBuilder(func))(name =>
(name, makeAjaxCall(JsRaw("'" + name + "=' + encodeURIComponent(JSON.stringify(" + jsCalcValue.toJsCmd + "))"))))
/**
* Build a JavaScript function that will perform an AJAX call based on a value calculated in JavaScript
* @param jsCalcValue -- the JavaScript to calculate the value to be sent to the server
* @param ajaxContext -- the context defining the javascript callback functions and the response type
* @param func -- the function to call when the data is sent
*
* @return the function ID and JavaScript that makes the call
*/
private def jsonCall_*(jsCalcValue: JsExp,
ajaxContext: AjaxContext,
func: AFuncHolder): (String, JsExp) =
fmapFunc(contextFuncBuilder(func))(name =>
(name, makeAjaxCall(JsRaw("'" + name + "=' + encodeURIComponent(JSON.stringify(" + jsCalcValue.toJsCmd + "))"), ajaxContext)))
def fajaxCall[T](jsCalcValue: JsExp, func: String => JsCmd)(f: (String, JsExp) => T): T = {
val (name, js) = ajaxCall(jsCalcValue, func)
f(name, js)
}
def jsonCall(jsCalcValue: JsExp,
jsonContext: JsonContext,
func: String => JsObj): (String, JsExp) = ajaxCall_*(jsCalcValue, jsonContext, SFuncHolder(func))
def fjsonCall[T](jsCalcValue: JsExp, jsonContext: JsonContext, func: String => JsObj)(f: (String, JsExp) => T): T = {
val (name, js) = jsonCall(jsCalcValue, jsonContext, func)
f(name, js)
}
/**
* Build a JavaScript function that will perform an AJAX call based on a value calculated in JavaScript
* @param jsCalcValue -- the JavaScript to calculate the value to be sent to the server
* @param func -- the function to call when the data is sent
*
* @return the JavaScript that makes the call
*/
private def ajaxCall_*(jsCalcValue: JsExp, func: AFuncHolder): (String, JsExp) =
fmapFunc(contextFuncBuilder(func))(name =>
(name, makeAjaxCall(JsRaw("'" + name + "=' + encodeURIComponent(" + jsCalcValue.toJsCmd + ")"))))
/**
* Build a JavaScript function that will perform an AJAX call based on a value calculated in JavaScript
* @param jsCalcValue -- the JavaScript to calculate the value to be sent to the server
* @param ajaxContext -- the context defining the javascript callback functions and the response type
* @param func -- the function to call when the data is sent
*
* @return the JavaScript that makes the call
*/
private def ajaxCall_*(jsCalcValue: JsExp,
ajaxContext: AjaxContext,
func: AFuncHolder): (String, JsExp) =
fmapFunc(contextFuncBuilder(func))(name =>
(name, makeAjaxCall(JsRaw("'" + name + "=' + encodeURIComponent(" + jsCalcValue.toJsCmd + ")"), ajaxContext)))
private def deferCall(data: JsExp, jsFunc: Call): Call =
Call(jsFunc.function, (jsFunc.params ++ List(AnonFunc(makeAjaxCall(data)))): _*)
/**
* Create an Ajax button. When it's pressed, the function is executed
*
* @param text -- the name/text of the button
* @param func -- the function to execute when the button is pushed. Return Noop if nothing changes on the browser.
* @param attrs -- the list of node attributes
*
* @return a button to put on your page
*/
def ajaxButton(text: NodeSeq, func: () => JsCmd, attrs: ElemAttr*): Elem = {
attrs.foldLeft(fmapFunc(contextFuncBuilder(func))(name =>
<button onclick={makeAjaxCall(Str(name + "=true")).toJsCmd +
"; return false;"}>{text}</button>))((e, f) => f(e))
}
/**
* Memoize the NodeSeq used in apply() and then call
* applyAgain() in an Ajax call and you don't have to
* explicitly capture the template
*/
def memoize(f: => NodeSeq => NodeSeq): MemoizeTransform = {
val salt = (new Exception()).getStackTrace().apply(1).toString
new MemoizeTransform {
object latestNodeSeq extends RequestVar[NodeSeq](NodeSeq.Empty) {
override val __nameSalt = salt
}
def apply(ns: NodeSeq): NodeSeq = {
latestNodeSeq.set(ns)
f(ns)
}
def applyAgain(): NodeSeq = f(latestNodeSeq.get)
}
}
/**
* Create an Ajax buttun that when it's pressed it submits an Ajax request and expects back a JSON
* construct which will be passed to the <i>success</i> function
*
* @param text -- the name/text of the button
* @param func -- the function to execute when the button is pushed. Return Noop if nothing changes on the browser.
* @param ajaxContext -- defines the callback functions and the JSON response type
* @param attrs -- the list of node attributes
*
* @return a button to put on your page
*
*/
def jsonButton(text: NodeSeq, func: () => JsObj, ajaxContext: JsonContext, attrs: ElemAttr*): Elem = {
attrs.foldLeft(fmapFunc(contextFuncBuilder(func))(name =>
<button onclick={makeAjaxCall(Str(name + "=true"), ajaxContext).toJsCmd +
"; return false;"}>{text}</button>))((e, f) => f(e))
}
/**
* Create an Ajax button. When it's pressed, the function is executed
*
* @param text -- the name/text of the button
* @param func -- the function to execute when the button is pushed. Return Noop if nothing changes on the browser.
* @param attrs -- the list of node attributes
*
* @return a button to put on your page
*/
def ajaxButton(text: NodeSeq, jsExp: JsExp, func: String => JsCmd, attrs: ElemAttr*): Elem = {
attrs.foldLeft(fmapFunc(contextFuncBuilder(SFuncHolder(func)))(name =>
<button onclick={makeAjaxCall(JsRaw(name.encJs + "+'='+encodeURIComponent(" + jsExp.toJsCmd + ")")).toJsCmd +
"; return false;"}>{text}</button>))((e, f) => f(e))
}
/**
* Create an Ajax buttun that when it's pressed it submits an Ajax request and expects back a JSON
* construct which will be passed to the <i>success</i> function
*
* @param text -- the name/text of the button
* @param func -- the function to execute when the button is pushed. Return Noop if nothing changes on the browser.
* @param ajaxContext -- defines the callback functions and the JSON response type
* @param attrs -- the list of node attributes
*
* @return a button to put on your page
*
*/
def jsonButton(text: NodeSeq, jsExp: JsExp, func: Any => JsObj, ajaxContext: JsonContext, attrs: ElemAttr*): Elem = {
attrs.foldLeft(jsonFmapFunc(func)(name =>
<button onclick={makeAjaxCall(JsRaw(name.encJs + "+'='+ encodeURIComponent(JSON.stringify(" + jsExp.toJsCmd + "))"), ajaxContext).toJsCmd +
"; return false;"}>{text}</button>))(_ % _)
}
/**
* Create an Ajax button. When it's pressed, the function is executed
*
* @param text -- the name/text of the button
* @param jsFunc -- the user function that will be executed. This function will receive as last parameter
* the function that will actually do the ajax call. Hence the user function can decide when
* to make the ajax request.
* @param func -- the function to execute when the button is pushed. Return Noop if nothing changes on the browser.
*
* @return a button to put on your pagejsFunc.params ++ List(AnonFunc(makeAjaxCall(Str(name+"=true"))))
*/
def ajaxButton(text: NodeSeq, jsFunc: Call, func: () => JsCmd, attrs: ElemAttr*): Elem = {
attrs.foldLeft(fmapFunc(contextFuncBuilder(func))(name =>
<button onclick={deferCall(Str(name + "=true"), jsFunc).toJsCmd + "; return false;"}>{text}</button>))(_ % _)
}
/**
* Create an Ajax button. When it's pressed, the function is executed
*
* @param text -- the name/text of the button
* @param jsFunc -- the user function that will be executed. This function will receive as last parameter
* the function that will actually do the ajax call. Hence the user function can decide when
* to make the ajax request.
* @param func -- the function to execute when the button is pushed. Return Noop if nothing changes on the browser.
*
* @return a button to put on your page
*/
def ajaxButton(text: String, func: () => JsCmd, attrs: ElemAttr*): Elem =
ajaxButton(Text(text), func, attrs: _*)
/**
* Create an Ajax button. When it's pressed, the function is executed
*
* @param text -- the name/text of the button
* @param func -- the function to execute when the button is pushed. Return Noop if nothing changes on the browser.
*
* @return a button to put on your page
*/
def ajaxButton(text: String, jsFunc: Call, func: () => JsCmd, attrs: ElemAttr*): Elem =
ajaxButton(Text(text), jsFunc, func, attrs: _*)
/**
* This method generates an AJAX editable field.
*
* Normally, the displayContents will be shown, with an "Edit" button.
* If the "Edit" button is clicked, the field will be replaced with
* the edit form, along with an "OK" and "Cancel" button.
* If the OK button is pressed, the form fields are submitted and the onSubmit
* function is called, and then the displayContents are re-run to get a new display.
* If cancel is pressed then the original displayContents are re-shown.
*
* Note that the editForm NodeSeq is wrapped inside of an <code>ajaxForm</code>, so it can be comprised
* of normal (non-AJAX) <code>SHtml</code> form elements. For example:
*
* <pre name="code" class="scala">
* ajaxEditable(Test("Click me"),
* SHtml.text("Edit me", s => println("Edited with " +s)),
* () => { println("submitted"); Noop })
* </pre>
*/
def ajaxEditable (displayContents : => NodeSeq, editForm : => NodeSeq, onSubmit : () => JsCmd) : NodeSeq = {
import _root_.net.liftweb.http.js
import js.{jquery,JsCmd,JsCmds,JE}
import jquery.JqJsCmds
import JsCmds.{Noop,SetHtml}
import JE.Str
import JqJsCmds.{Hide,Show}
val divName = Helpers.nextFuncName
val dispName = divName + "_display"
val editName = divName + "_edit"
def swapJsCmd (show : String, hide : String) : JsCmd = Show(show) & Hide(hide)
def setAndSwap (show : String, showContents : => NodeSeq, hide : String) : JsCmd =
(SHtml.ajaxCall(Str("ignore"), {ignore : String => SetHtml(show, showContents)})._2.cmd & swapJsCmd(show,hide))
def displayMarkup : NodeSeq =
displayContents ++ Text(" ") ++
<input value={S.??("edit")} type="button" onclick={setAndSwap(editName, editMarkup, dispName).toJsCmd + " return false;"} />
def editMarkup : NodeSeq = {
val formData : NodeSeq =
editForm ++
<input type="submit" value={S.??("ok")} /> ++
hidden(onSubmit) ++
<input type="button" onclick={swapJsCmd(dispName,editName).toJsCmd + " return false;"} value={S.??("cancel")} />
ajaxForm(formData,
Noop,
setAndSwap(dispName, displayMarkup, editName))
}
<div>
<div id={dispName}>
{displayMarkup}
</div>
<div id={editName} style="display: none;">
{editMarkup}
</div>
</div>
}
/**
* Create an anchor tag around a body which will do an AJAX call and invoke the function
*
* @param func - the function to invoke when the link is clicked
* @param body - the NodeSeq to wrap in the anchor tag
* @param attrs - the anchor node attributes
*/
def a(func: () => JsCmd, body: NodeSeq, attrs: ElemAttr*): Elem = {
val key = formFuncName
addFunctionMap(key, contextFuncBuilder((a: List[String]) => func()))
attrs.foldLeft(<lift:a key={key}>{body}</lift:a>)(_ % _)
}
/**
* Create an anchor tag around a body which will do an AJAX call and invoke the function
*
* @param jsFunc -- the user function that will be executed. This function will receive as last parameter
* the function that will actually do the ajax call. Hence the user function can decide when
* to make the ajax request.
* @param func - the function to invoke when the link is clicked
* @param body - the NodeSeq to wrap in the anchor tag
* @param attrs - the anchor node attributes
*/
def a(jsFunc: Call, func: () => JsCmd, body: NodeSeq, attrs: ElemAttr*): Elem = {
attrs.foldLeft(fmapFunc(contextFuncBuilder(func))(name =>
<a href="javascript://" onclick={deferCall(Str(name + "=true"), jsFunc).toJsCmd + "; return false;"}>{body}</a>))(_ % _)
}
def a(func: () => JsObj,
jsonContext: JsonContext,
body: NodeSeq,
attrs: ElemAttr*): Elem = {
attrs.foldLeft(fmapFunc(contextFuncBuilder(func))(name =>
<a href="javascript://" onclick={makeAjaxCall(Str(name + "=true"), jsonContext).toJsCmd + "; return false;"}>{body}</a>))(_ % _)
}
/**
* Create an anchor with a body and the function to be executed when the anchor is clicked
*/
def a(body: NodeSeq, attrs: ElemAttr*)(func: => JsCmd): Elem =
a(() => func, body, attrs: _*)
/**
* Create an anchor with a body and the function to be executed when the anchor is clicked
* @param jsFunc -- the user function that will be executed. This function will receive as last parameter
* the function that will actually do the ajax call. Hence the user function can decide when
* to make the ajax request.
* @param body - the NodeSeq to wrap in the anchor tag
* @param attrs - the anchor node attributes
*/
def a(jsFunc: Call, body: NodeSeq, attrs: ElemAttr*)(func: => JsCmd): Elem =
a(jsFunc, () => func, body, attrs: _*)
/**
* Create an anchor that will run a JavaScript command when clicked
*/
def a(body: NodeSeq, cmd: JsCmd, attrs: ElemAttr*): Elem =
attrs.foldLeft(<a href="javascript://"
onclick={cmd.toJsCmd + "; return false;"}>{body}</a>)(_ % _)
/**
* Create a span that will run a JavaScript command when clicked
*/
def span(body: NodeSeq, cmd: JsCmd, attrs: ElemAttr*): Elem =
attrs.foldLeft(<span onclick={cmd.toJsCmd}>{body}</span>)(_ % _)
def toggleKids(head: Elem, visible: Boolean, func: () => JsCmd, kids: Elem): NodeSeq = {
fmapFunc(contextFuncBuilder(func)) {
funcName =>
val (nk, id) = findOrAddId(kids)
val rnk = if (visible) nk else nk % ("style" -> "display: none")
val nh = head %
("onclick" -> (LiftRules.jsArtifacts.toggle(id).cmd & makeAjaxCall(JsRaw("'" + funcName + "=true'")).cmd))
nh ++ rnk
}
}
/**
* This function does not really submit a JSON request to server instead json is a function
* that allows you to build a more complex JsCmd based on the JsExp <i>JE.JsRaw("this.value")</i>.
* This function is called by the overloaded version of jsonText.
*
* @param value - the initial value of the text field
* @param ignoreBlur - ignore the onblur event and only do the event if the enter key is pressed
* @param json - takes a JsExp which describes how to recover the
* value of the text field and returns a JsExp containing the thing
* to execute on blur/return
*
* @return a text field
*/
def jsonText(value: String, ignoreBlur: Boolean, json: JsExp => JsCmd, attrs: ElemAttr*): Elem =
(attrs.foldLeft(<input type="text" value={value}/>)(_ % _)) %
("onkeypress" -> """liftUtils.lift_blurIfReturn(event)""") %
(if (ignoreBlur) Null else ("onblur" -> (json(JE.JsRaw("this.value")))))
/**
* This function does not really submit a JSON request to server instead json is a function
* that allows you to build a more complex JsCmd based on the JsExp <i>JE.JsRaw("this.value")</i>.
* This function is called by the overloaded version of jsonText.
*
* @param value - the initial value of the text field
* @param json - takes a JsExp which describes how to recover the
* value of the text field and returns a JsExp containing the thing
* to execute on blur/return
*
* @return a text field
*/
def jsonText(value: String, json: JsExp => JsCmd, attrs: ElemAttr*): Elem = jsonText(value, false, json, attrs :_*)
/**
* Create a JSON text widget that makes a JSON call on blur or "return".
*
* @param value - the initial value of the text field
* @param cmd - the json command name
* @param json - the JsonCall returned from S.buildJsonFunc
*
* @return a text field
*/
def jsonText(value: String, cmd: String, json: JsonCall, attrs: ElemAttr*): Elem =
jsonText(value, exp => json(cmd, exp), attrs: _*)
def ajaxTextElem(settable: Settable{type ValueType = String}, attrs: ElemAttr*): Elem =
ajaxText(settable.get, (b: String) => {settable.set(b); Noop}, attrs :_*)
def ajaxText(value: String, func: String => JsCmd, attrs: ElemAttr*): Elem =
ajaxText_*(value, false, Empty, SFuncHolder(func), attrs: _*)
def ajaxText(value: String, jsFunc: Call, func: String => JsCmd, attrs: ElemAttr*): Elem =
ajaxText_*(value, false, Full(jsFunc), SFuncHolder(func), attrs: _*)
def ajaxText(value: String, ignoreBlur: Boolean, func: String => JsCmd, attrs: ElemAttr*): Elem =
ajaxText_*(value, ignoreBlur, Empty, SFuncHolder(func), attrs: _*)
def ajaxText(value: String, ignoreBlur: Boolean, jsFunc: Call, func: String => JsCmd, attrs: ElemAttr*): Elem =
ajaxText_*(value, ignoreBlur, Full(jsFunc), SFuncHolder(func), attrs: _*)
private def ajaxText_*(value: String, ignoreBlur: Boolean, jsFunc: Box[Call], func: AFuncHolder, attrs: ElemAttr*): Elem = {
val raw = (funcName: String, value: String) => JsRaw("'" + funcName + "=' + encodeURIComponent(" + value + ".value)")
val key = formFuncName
fmapFunc(contextFuncBuilder(func)) {
funcName =>
(attrs.foldLeft(<input type="text" value={value}/>)(_ % _)) %
("onkeypress" -> """liftUtils.lift_blurIfReturn(event)""") %
(if (ignoreBlur) Null else
("onblur" -> (jsFunc match {
case Full(f) => JsCrVar(key, JsRaw("this")) & deferCall(raw(funcName, key), f)
case _ => makeAjaxCall(raw(funcName, "this"))
})
))
}
}
/**
* This function does not really submit a JSON request to server instead json is a function
* that allows you to build a more complex JsCmd based on the JsExp <i>JE.JsRaw("this.value")</i>.
* This function is called by the overloaded version of jsonTextarea.
*
* @param value - the initial value of the text area field
* @param json - takes a JsExp which describes how to recover the
* value of the text area field and returns a JsExp containing the thing
* to execute on blur
*
* @return a text area field
*/
def jsonTextarea(value: String, json: JsExp => JsCmd, attrs: ElemAttr*): Elem =
(attrs.foldLeft(<textarea>{value}</textarea>)(_ % _)) %
("onblur" -> (json(JE.JsRaw("this.value"))))
/**
* Create a JSON text area widget that makes a JSON call on blur
*
* @param value - the initial value of the text field
* @param cmd - the json command name
* @param json - the JsonCall returned from S.buildJsonFunc
*
* @return a text field
*/
def jsonTextarea(value: String, cmd: String, json: JsonCall, attrs: ElemAttr*): Elem =
jsonTextarea(value, exp => json(cmd, exp), attrs: _*)
def ajaxTextarea(value: String, func: String => JsCmd, attrs: ElemAttr*): Elem =
ajaxTextarea_*(value, Empty, SFuncHolder(func), attrs: _*)
def ajaxTextarea(value: String, jsFunc: Call, func: String => JsCmd, attrs: ElemAttr*): Elem =
ajaxTextarea_*(value, Full(jsFunc), SFuncHolder(func), attrs: _*)
private def ajaxTextarea_*(value: String, jsFunc: Box[Call], func: AFuncHolder, attrs: ElemAttr*): Elem = {
val raw = (funcName: String, value: String) => JsRaw("'" + funcName + "=' + encodeURIComponent(" + value + ".value)")
val key = formFuncName
fmapFunc(contextFuncBuilder(func)) {
funcName =>
(attrs.foldLeft(<textarea>{value}</textarea>)(_ % _)) %
("onblur" -> (jsFunc match {
case Full(f) => JsCrVar(key, JsRaw("this")) & deferCall(raw(funcName, key), f)
case _ => makeAjaxCall(raw(funcName, "this"))
})
)
}
}
trait AreaShape {
def shape: String
def coords: String
}
case class RectShape(left: Int, top: Int, right: Int, bottom: Int) extends AreaShape {
def shape: String = "rect"
def coords: String = ""+left+", "+top+", "+right+", "+bottom
}
case class CircleShape(centerX: Int, centerY: Int, radius: Int) extends AreaShape {
def shape: String = "circle"
def coords: String = ""+centerX+", "+centerY+", "+radius
}
case class CirclePercentShape(centerX: Int, centerY: Int, radiusPercent: Int) extends AreaShape {
def shape: String = "circle"
def coords: String = ""+centerX+", "+centerY+", "+radiusPercent+"%"
}
case class PolyShape(polyCoords: (Int, Int)*) extends AreaShape {
def shape: String = "poly"
def coords: String = polyCoords.map{ case (x, y) => ""+x+", "+y}.mkString(", ")
}
/**
* Generate an Area tag
*
* @param shape - the shape of the area (RectShape, CircleShape, CirclePercentShape, PolyShape)
* @param alt - the contents of the alt attribute
* @param attrs - the balance of the attributes for the tag
*/
def area(shape: AreaShape, alt: String, attrs: ElemAttr*): Elem =
attrs.foldLeft(<area alt={alt} shape={shape.shape} coords={shape.coords} />)(_ % _)
/**
* Generate an Area tag
*
* @param shape - the shape of the area (RectShape, CircleShape, CirclePercentShape, PolyShape)
* @param jsCmd - the JavaScript to execute on the client when the area is clicked
* @param alt - the contents of the alt attribute
* @param attrs - the balance of the attributes for the tag
*/
def area(shape: AreaShape, jsCmd: JsCmd, alt: String, attrs: ElemAttr*): Elem =
area(shape, alt, (("onclick" -> jsCmd.toJsCmd): ElemAttr) :: attrs.toList :_*)
/**
* Generate an Area tag
*
* @param shape - the shape of the area (RectShape, CircleShape, CirclePercentShape, PolyShape)
* @param func - The server side function to execute when the area is clicked on.
* @param alt - the contents of the alt attribute
* @param attrs - the balance of the attributes for the tag
*/
def area(shape: AreaShape, func: () => JsCmd, alt: String, attrs: ElemAttr*): Elem = {
fmapFunc(contextFuncBuilder(func)) {
funcName =>
area(shape, alt, (("onclick" -> (makeAjaxCall(Str(funcName + "=true")).toJsCmd +
"; return false;")): ElemAttr) :: attrs.toList :_*)
}
}
def ajaxCheckboxElem(settable: Settable{type ValueType = Boolean}, attrs: ElemAttr*): Elem =
ajaxCheckbox(settable.get, (b: Boolean) => {settable.set(b); Noop}, attrs :_*)
def ajaxCheckbox(value: Boolean, func: Boolean => JsCmd, attrs: ElemAttr*): Elem =
ajaxCheckbox_*(value, Empty, LFuncHolder(in => func(in.exists(toBoolean(_)))), attrs: _*)
def ajaxCheckboxElem(settable: Settable{type ValueType = Boolean}, jsFunc: Call, attrs: ElemAttr*): Elem =
ajaxCheckbox_*(settable.get, Full(jsFunc),
LFuncHolder(in => {settable.set(in.exists(toBoolean( _)));
Noop}), attrs: _*)
def ajaxCheckbox(value: Boolean, jsFunc: Call, func: Boolean => JsCmd, attrs: ElemAttr*): Elem =
ajaxCheckbox_*(value, Full(jsFunc), LFuncHolder(in => func(in.exists(toBoolean(_)))), attrs: _*)
private def ajaxCheckbox_*(value: Boolean, jsFunc: Box[Call], func: AFuncHolder, attrs: ElemAttr*): Elem = {
val raw = (funcName: String, value: String) => JsRaw("'" + funcName + "=' + " + value + ".checked")
val key = formFuncName
fmapFunc(contextFuncBuilder(func)) {
funcName =>
(attrs.foldLeft(<input type="checkbox"/>)(_ % _)) %
checked(value) %
("onclick" -> (jsFunc match {
case Full(f) => JsCrVar(key, JsRaw("this")) & deferCall(raw(funcName, key), f)
case _ => makeAjaxCall(raw(funcName, "this"))
}))
}
}
/**
* Make a set of Ajax radio buttons. When the buttons are pressed,
* the function is called
*
* @param opts -- The possible values. These are not revealed to the browser
* @param deflt -- the default button
* @param ajaxFunc -- the function to invoke when the button is pressed
*/
def ajaxRadio[T](opts: Seq[T], deflt: Box[T], ajaxFunc: T => JsCmd, attrs: ElemAttr*): ChoiceHolder[T] = {
val groupName = Helpers.nextFuncName
val itemList = opts.map{
v => {
ChoiceItem(v, attrs.foldLeft(<input type="radio" name={groupName}
value={Helpers.nextFuncName}/>)(_ % _) %
checked(deflt == Full(v)) %
("onclick" -> ajaxCall(Str(""),
ignore => ajaxFunc(v))._2.toJsCmd))
}
}
ChoiceHolder(itemList)
}
/**
* Create a select box based on the list with a default value and the function
* to be executed on form submission
*
* @param options -- a list of value and text pairs (value, text to display)
* @param default -- the default value (or Empty if no default value)
* @param onSubmit -- the function to execute on form submission
*/
def ajaxSelectElem[T](options: Seq[T], default: Box[T], attrs: ElemAttr*)
(onSubmit: T => JsCmd)(implicit f: PairStringPromoter[T]):
Elem = {
ajaxSelectObj[T](options.map(v => (v -> f(v))),
default, onSubmit, attrs :_*)
}
/**
* Create a select box based on the list with a default value and the function
* to be executed on form submission
*
* @param options -- a list of value and text pairs (value, text to display)
* @param default -- the default value (or Empty if no default value)
* @param onSubmit -- the function to execute on form submission
*/
def ajaxSelectObj[T](options: Seq[(T, String)], default: Box[T],
onSubmit: T => JsCmd, attrs: ElemAttr*): Elem = {
val secure = options.map {case (obj, txt) => (obj, randomString(20), txt)}
val defaultNonce = default.flatMap(d => secure.find(_._1 == d).map(_._2))
val nonces = secure.map {case (obj, nonce, txt) => (nonce, txt)}
def process(nonce: String): JsCmd =
secure.find(_._2 == nonce).map(x => onSubmit(x._1)) getOrElse Noop
// (nonces, defaultNonce, SFuncHolder(process))
ajaxSelect_*(nonces,
defaultNonce,
Empty,
SFuncHolder(process _),
attrs: _*)
}
/**
* Create a select box based on the list with a default value and the function
* to be executed on form submission
*
* @param options -- a list of value and text pairs (value, text to display)
* @param default -- the default value (or Empty if no default value)
* @param onSubmit -- the function to execute on form submission
*/
def ajaxSelectElem[T](options: Seq[T], default: Box[T],
jsFunc: Call,
attrs: ElemAttr*)
(onSubmit: T => JsCmd)
(implicit f: PairStringPromoter[T]): Elem =
{
ajaxSelectObj[T](options.map(v => (v, f(v))), default,
jsFunc, onSubmit)
}
/**
* Create a select box based on the list with a default value and the function
* to be executed on form submission
*
* @param options -- a list of value and text pairs (value, text to display)
* @param default -- the default value (or Empty if no default value)
* @param onSubmit -- the function to execute on form submission
*/
def ajaxSelectObj[T](options: Seq[(T, String)], default: Box[T],
jsFunc: Call,
onSubmit: T => JsCmd, attrs: ElemAttr*): Elem = {
val secure = options.map {case (obj, txt) => (obj, randomString(20), txt)}
val defaultNonce = default.flatMap(d => secure.find(_._1 == d).map(_._2))
val nonces = secure.map {case (obj, nonce, txt) => (nonce, txt)}
def process(nonce: String): JsCmd =
secure.find(_._2 == nonce).map(x => onSubmit(x._1)) getOrElse Noop
// (nonces, defaultNonce, SFuncHolder(process))
ajaxSelect_*(nonces,
defaultNonce,
Full(jsFunc),
SFuncHolder(process _),
attrs: _*)
}
def ajaxSelect(opts: Seq[(String, String)], deflt: Box[String],
func: String => JsCmd, attrs: ElemAttr*): Elem =
ajaxSelect_*(opts, deflt, Empty, SFuncHolder(func), attrs: _*)
def ajaxSelect(opts: Seq[(String, String)], deflt: Box[String],
jsFunc: Call, func: String => JsCmd, attrs: ElemAttr*): Elem =
ajaxSelect_*(opts, deflt, Full(jsFunc), SFuncHolder(func), attrs: _*)
private def ajaxSelect_*(opts: Seq[(String, String)], deflt: Box[String],
jsFunc: Box[Call], func: AFuncHolder, attrs: ElemAttr*): Elem = {
val raw = (funcName: String, value: String) => JsRaw("'" + funcName + "=' + " + value + ".options[" + value + ".selectedIndex].value")
val key = formFuncName
val vals = opts.map(_._1)
val testFunc = LFuncHolder(in => in.filter(v => vals.contains(v)) match {case Nil => false case xs => func(xs)}, func.owner)
fmapFunc(contextFuncBuilder(testFunc)) {
funcName =>
(attrs.foldLeft(<select>{opts.flatMap {case (value, text) => (<option value={value}>{text}</option>) % selected(deflt.exists(_ == value))}}</select>)(_ % _)) %
("onchange" -> (jsFunc match {
case Full(f) => JsCrVar(key, JsRaw("this")) & deferCall(raw(funcName, key), f)
case _ => makeAjaxCall(raw(funcName, "this"))
}))
}
}
def ajaxInvoke(func: () => JsCmd): (String, JsExp) =
fmapFunc(contextFuncBuilder(NFuncHolder(func)))(name => (name, makeAjaxCall(name + "=true")))
/**
* Build a swappable visual element. If the shown element is clicked on, it turns into the hidden element and when
* the hidden element blurs, it swaps into the shown element.
*/
def swappable(shown: Elem, hidden: Elem): Elem = {
val (rs, sid) = findOrAddId(shown)
val (rh, hid) = findOrAddId(hidden)
val ui = LiftRules.jsArtifacts
(<span>{rs % ("onclick" -> (ui.hide(sid).cmd &
ui.showAndFocus(hid).cmd & JsRaw("return false;")))}{dealWithBlur(rh % ("style" -> "display: none"), (ui.show(sid).cmd & ui.hide(hid).cmd))}</span>)
}
def swappable(shown: Elem, hidden: String => Elem): Elem = {
val (rs, sid) = findOrAddId(shown)
val hid = formFuncName
val ui = LiftRules.jsArtifacts
val rh = <span id={hid}>{hidden(ui.show(sid).toJsCmd + ";" + ui.hide(hid).toJsCmd + ";")}</span>
(<span>{rs % ("onclick" -> (ui.hide(sid).toJsCmd + ";" + ui.show(hid).toJsCmd + "; return false;"))}{(rh % ("style" -> "display: none"))}</span>)
}
private def dealWithBlur(elem: Elem, blurCmd: String): Elem = {
(elem \\ "@onblur").toList match {
case Nil => elem % ("onblur" -> blurCmd)
case x :: xs => val attrs = elem.attributes.filter(_.key != "onblur")
Elem(elem.prefix, elem.label, new UnprefixedAttribute("onblur", Text(blurCmd + x.text), attrs), elem.scope, elem.child: _*)
}
}
/**
* create an anchor tag around a body
*
* @to - the target
* @param func - the function to invoke when the link is clicked
* @param body - the NodeSeq to wrap in the anchor tag
* @attrs - the (optional) attributes for the HTML element
*/
def link(to: String, func: () => Any, body: NodeSeq,
attrs: ElemAttr*): Elem = {
fmapFunc((a: List[String]) => {func(); true})(key =>
attrs.foldLeft(<a href={Helpers.appendFuncToURL(to, key + "=_")}>{body}</a>)(_ % _))
}
private def makeFormElement(name: String, func: AFuncHolder,
attrs: ElemAttr*): Elem =
fmapFunc(func)(funcName =>
attrs.foldLeft(<input type={name} name={funcName}/>)(_ % _))
def text_*(value: String, func: AFuncHolder, attrs: ElemAttr*): Elem =
text_*(value, func, Empty, attrs: _*)
def text_*(value: String, func: AFuncHolder, ajaxTest: String => JsCmd, attrs: ElemAttr*): Elem =
text_*(value, func, Full(ajaxTest), attrs: _*)
private def buildOnBlur(bf: Box[String => JsCmd]): MetaData = bf match {
case Full(func) =>
new UnprefixedAttribute("onblur", Text(ajaxCall(JsRaw("this.value"), func)._2.toJsCmd), Null)
case _ => Null
}
def text_*(value: String, ignoreBlur: Boolean, func: AFuncHolder, ajaxTest: Box[String => JsCmd], attrs: ElemAttr*): Elem =
makeFormElement("text", func, attrs: _*) % new UnprefixedAttribute("value", Text(value), Null) % (
if (ignoreBlur) Null else buildOnBlur(ajaxTest))
def text_*(value: String, func: AFuncHolder, ajaxTest: Box[String => JsCmd], attrs: ElemAttr*): Elem =
text_*(value, false, func, ajaxTest, attrs :_*)
def password_*(value: String, func: AFuncHolder, attrs: ElemAttr*): Elem =
makeFormElement("password", func, attrs: _*) % ("value" -> value)
def hidden_*(func: AFuncHolder, attrs: ElemAttr*): Elem =
makeFormElement("hidden", func, attrs: _*) % ("value" -> "true")
def submit_*(value: String, func: AFuncHolder, attrs: ElemAttr*): Elem =
{
def doit = makeFormElement("submit", func, attrs: _*) % ("value" -> value)
_formGroup.is match {
case Empty => formGroup(1)(doit)
case _ => doit
}
}
private def dupWithName(elem: Elem, name: String): Elem = {
new Elem(elem.prefix,
elem.label,
new UnprefixedAttribute("name", name,
elem.attributes.filter {
case up: UnprefixedAttribute =>
up.key != "name"
case _ => true
}),
elem.scope,
elem.child :_*)
}
private def isRadio(in: MetaData): Boolean =
in.get("type").map(_.text equalsIgnoreCase "radio") getOrElse false
private def isCheckbox(in: MetaData): Boolean =
in.get("type").map(_.text equalsIgnoreCase "checkbox") getOrElse false
/**
* If you want to update the href of an <a> tag,
* this method returns a function that mutates the href
* by adding a function that will be executed when the link
* is clicked:
* <code>
* "#my_link" #> SHtml.hrefFunc(() => println("howdy"))
* </code>
*/
def hrefFunc(func: () => Any): NodeSeq => NodeSeq = {
val allEvent = List("href")
ns => {
def runNodes(in: NodeSeq): NodeSeq =
in.flatMap {
case Group(g) => runNodes(g)
// button
case e: Elem => {
val oldAttr: Map[String, String] =
Map(allEvent.
flatMap(a => e.attribute(a).
map(v => a -> (v.text))) :_*)
val newAttr = e.attributes.filter{
case up: UnprefixedAttribute => !oldAttr.contains(up.key)
case _ => true
}
fmapFunc(func) {
funcName =>
new Elem(e.prefix, e.label,
allEvent.foldLeft(newAttr){
case (meta, attr) =>
new UnprefixedAttribute(attr,
Helpers.
appendFuncToURL(oldAttr.
getOrElse(attr, ""),
funcName+"=_"),
meta)
}, e.scope, e.child :_*)
}
}
case x => x
}
runNodes(ns)
}
}
/**
* Create something that's bindable to an event attribute
* and when the event happens, the command will fire:
* <code>
* "input [onblur]" #> SHtml.onEvent(s => Alert("Thanks: "+s))
* </code>
*/
def onEvent(func: String => JsCmd): (String, JsExp) =
ajaxCall(JsRaw("this.value"), func)
/**
* Specify the events (e.g., onblur, onchange, etc.)
* and the function to execute on those events. Returns
* a NodeSeq => NodeSeq that will add the events to all
* the Elements
* <code>
* ":text" #> SHtml.onEvents("onchange", "onblur")(s => Alert("yikes "+s))
* </code>
*/
def onEvents(event: String, events: String*)(func: String => JsCmd):
NodeSeq => NodeSeq = {
val allEvent = event :: events.toList
ns => {
def runNodes(in: NodeSeq): NodeSeq =
in.flatMap {
case Group(g) => runNodes(g)
// button
case e: Elem => {
val oldAttr: Map[String, String] =
Map(allEvent.
flatMap(a => e.attribute(a).
map(v => a -> (v.text+"; "))) :_*)
val newAttr = e.attributes.filter{
case up: UnprefixedAttribute => !oldAttr.contains(up.key)
case _ => true
}
val cmd = ajaxCall(JsRaw("this.value"), func)._2.toJsCmd
new Elem(e.prefix, e.label,
allEvent.foldLeft(newAttr){
case (meta, attr) =>
new UnprefixedAttribute(attr,
oldAttr.getOrElse(attr, "") +
cmd,
meta)
}, e.scope, e.child :_*)
}
case x => x
}
runNodes(ns)
}
}
/**
* execute the function when the form is submitted.
* This method returns a function that can be applied to
* form fields (input, button, textarea, select) and the
* function is executed when the form containing the field is submitted.
*/
def onSubmitUnit(func: () => Any): NodeSeq => NodeSeq =
onSubmitImpl(func: AFuncHolder)
/**
* execute the String function when the form is submitted.
* This method returns a function that can be applied to
* form fields (input, button, textarea, select) and the
* function is executed when the form containing the field is submitted.
*/
def onSubmit(func: String => Any): NodeSeq => NodeSeq = {
onSubmitImpl(func: AFuncHolder)
}
/**
* execute the List[String] function when the form is submitted.
* This method returns a function that can be applied to
* form fields (input, button, textarea, select) and the
* function is executed when the form containing the field is submitted.
*/
def onSubmitList(func: List[String] => Any): NodeSeq => NodeSeq =
onSubmitImpl(func: AFuncHolder)
/**
* Execute the Boolean function when the form is submitted.
* This method returns a function that can be applied to
* form fields (input, button, textarea, select) and the
* function is executed when the form containing the field is submitted.
*/
def onSubmitBoolean(func: Boolean => Any): NodeSeq => NodeSeq =
onSubmitImpl(func: AFuncHolder)
/**
* Execute the function when the form is submitted.
* This method returns a function that can be applied to
* form fields (input, button, textarea, select) and the
* function is executed when the form containing the field is submitted.
*/
def onSubmitImpl(func: AFuncHolder): NodeSeq => NodeSeq =
(in: NodeSeq) => {
var radioName: Box[String] = Empty
var checkBoxName: Box[String] = Empty
var checkBoxCnt = 0
def runNodes(in: NodeSeq): NodeSeq =
in.flatMap {
case Group(g) => runNodes(g)
// button
case e: Elem if e.label == "button" =>
fmapFunc(func) {dupWithName(e, _)}
// textarea
case e: Elem if e.label == "textarea" =>
fmapFunc(func) {dupWithName(e, _)}
// select
case e: Elem if e.label == "select" =>
fmapFunc(func) {dupWithName(e, _)}
// radio
case e: Elem if e.label == "input" && isRadio(e.attributes) =>
radioName match {
case Full(name) => dupWithName(e, name)
case _ =>
fmapFunc(func) {
name => {
radioName = Full(name)
dupWithName(e, name)
}
}
}
// checkbox
case e: Elem if e.label == "input" && isCheckbox(e.attributes) =>
checkBoxName match {
case Full(name) =>
checkBoxCnt += 1
dupWithName(e, name)
case _ =>
fmapFunc(func) {
name => {
checkBoxName = Full(name)
checkBoxCnt += 1
dupWithName(e, name)
}
}
}
// generic input
case e: Elem if e.label == "input" =>
fmapFunc(func) {dupWithName(e, _)}
case x => x
}
val ret = runNodes(in)
checkBoxName match {
// if we've got a single checkbox, add a hidden false checkbox
case Full(name) if checkBoxCnt == 1 => {
ret ++ <input type="hidden" name={name} value="false"/>
}
case _ => ret
}
}
def text(value: String, func: String => Any, attrs: ElemAttr*): Elem =
text_*(value, SFuncHolder(func), attrs: _*)
/**
* Generate an input element for the Settable
*/
def textElem(settable: Settable{type ValueType = String}, attrs: ElemAttr*): Elem =
text_*(settable.get, SFuncHolder(s => settable.set(s)), attrs: _*)
/**
* Generate an input field with type email. At some point,
* there will be graceful fallback for non-HTML5 browsers. FIXME
*/
def email(value: String, func: String => Any, attrs: ElemAttr*): Elem =
email_*(value, SFuncHolder(func), attrs: _*)
/**
* Generate an email input element for the Settable. At some point
* there will be graceful fallback for non-HTML5 browsers. FIXME
*/
def email(settable: Settable{type ValueType = String},
attrs: ElemAttr*): Elem =
email_*(settable.get, SFuncHolder(s => settable.set(s)), attrs: _*)
private def email_*(value: String, func: AFuncHolder, attrs: ElemAttr*): Elem =
makeFormElement("email", func, attrs: _*) %
new UnprefixedAttribute("value", Text(value), Null)
/**
* Generate an input field with type url. At some point,
* there will be graceful fallback for non-HTML5 browsers. FIXME
*/
def url(value: String, func: String => Any, attrs: ElemAttr*): Elem =
url_*(value, SFuncHolder(func), attrs: _*)
/**
* Generate a url input element for the Settable. At some point
* there will be graceful fallback for non-HTML5 browsers. FIXME
*/
def url(settable: Settable{type ValueType = String},
attrs: ElemAttr*): Elem =
url_*(settable.get, SFuncHolder(s => settable.set(s)), attrs: _*)
private def url_*(value: String, func: AFuncHolder, attrs: ElemAttr*): Elem =
makeFormElement("url", func, attrs: _*) %
new UnprefixedAttribute("value", Text(value), Null)
/**
* Generate an input field with type number. At some point,
* there will be graceful fallback for non-HTML5 browsers. FIXME
*/
def number(value: Int, func: Int => Any,
min: Int, max: Int, attrs: ElemAttr*): Elem =
number_*(value,
min, max,
SFuncHolder(s => Helpers.asInt(s).map(func)), attrs: _*)
/**
* Generate a number input element for the Settable. At some point
* there will be graceful fallback for non-HTML5 browsers. FIXME
*/
def number(settable: Settable{type ValueType = Int},
min: Int, max: Int,
attrs: ElemAttr*): Elem =
number_*(settable.get, min, max,
SFuncHolder(s => Helpers.asInt(s).map(s => settable.set(s))),
attrs: _*)
private def number_*(value: Int,
min: Int, max: Int,
func: AFuncHolder, attrs: ElemAttr*): Elem = {
import Helpers._
makeFormElement("number",
func,
attrs: _*) %
("value" -> value.toString) %
("min" -> min.toString) %
("max" -> max.toString)
}
/**
* Generate an input field with type range. At some point,
* there will be graceful fallback for non-HTML5 browsers. FIXME
*/
def range(value: Int, func: Int => Any,
min: Int, max: Int, attrs: ElemAttr*): Elem =
range_*(value,
min, max,
SFuncHolder(s => Helpers.asInt(s).map(func)), attrs: _*)
/**
* Generate a range input element for the Settable. At some point
* there will be graceful fallback for non-HTML5 browsers. FIXME
*/
def range(settable: Settable{type ValueType = Int},
min: Int, max: Int,
attrs: ElemAttr*): Elem =
range_*(settable.get, min, max,
SFuncHolder(s => Helpers.asInt(s).map(s => settable.set(s))),
attrs: _*)
private def range_*(value: Int,
min: Int, max: Int,
func: AFuncHolder, attrs: ElemAttr*): Elem = {
import Helpers._
makeFormElement("range",
func,
attrs: _*) %
("value" -> value.toString) %
("min" -> min.toString) %
("max" -> max.toString)
}
def textAjaxTest(value: String, func: String => Any, ajaxTest: String => JsCmd, attrs: ElemAttr*): Elem =
text_*(value, SFuncHolder(func), ajaxTest, attrs: _*)
def textAjaxTest(value: String, func: String => Any, ajaxTest: Box[String => JsCmd], attrs: ElemAttr*): Elem =
text_*(value, SFuncHolder(func), ajaxTest, attrs: _*)
def password(value: String, func: String => Any, attrs: ElemAttr*): Elem =
makeFormElement("password", SFuncHolder(func), attrs: _*) % new UnprefixedAttribute("value", Text(value), Null)
def passwordElem(settable: Settable{type ValueType = String}, attrs: ElemAttr*): Elem =
makeFormElement("password", SFuncHolder(s => settable.set(s)), attrs: _*) % new UnprefixedAttribute("value", Text(settable.get), Null)
def hidden(func: () => Any, attrs: ElemAttr*): Elem =
makeFormElement("hidden", NFuncHolder(func), attrs: _*) % ("value" -> "true")
def hidden(func: (String) => Any, defaultlValue: String, attrs: ElemAttr*): Elem =
makeFormElement("hidden", SFuncHolder(func), attrs: _*) % ("value" -> defaultlValue)
/**
* Create an HTML button with strOrNodeSeq as the body. The
* button will be type submit.
*
* @param strOrNodeSeq -- the String or NodeSeq (either will work just fine)
* to put into the body of the button
* @param func -- the function to execute when the form containing the button
* is posted
* @param attrs -- the attributes to append to the button
* @return a button HTML Element b
*/
def button(strOrNodeSeq: StringOrNodeSeq, func: () => Any, attrs: ElemAttr*): Elem = {
def doit: Elem = {
attrs.foldLeft(fmapFunc(contextFuncBuilder(func))(name =>
<button type="submit" name={name} value="_">{
strOrNodeSeq.nodeSeq}</button>))(_ % _)
}
_formGroup.is match {
case Empty => formGroup(1)(doit)
case _ => doit
}
}
/**
* Generates a form submission button.
*
* @param value The label for the button
* @param func The function that will be executed on form submission
* @param attrs Optional XHTML element attributes that will be applied to the button
*/
def submit(value: String, func: () => Any, attrs: ElemAttr*): Elem = {
def doit = {
makeFormElement("submit", NFuncHolder(func), attrs: _*) %
new UnprefixedAttribute("value", Text(value), Null)
}
_formGroup.is match {
case Empty => formGroup(1)(doit)
case _ => doit
}
}
/**
* Constructs an Ajax submit button that can be used inside ajax forms.
* Multiple buttons can be used in the same form.
*
* @param value - the button text
* @param func - the ajax function to be called
* @param attrs - button attributes
*
*/
def ajaxSubmit(value: String, func: () => JsCmd, attrs: ElemAttr*): Elem = {
val funcName = "z" + Helpers.nextFuncName
addFunctionMap(funcName, contextFuncBuilder(func))
(attrs.foldLeft(<input type="submit" name={funcName}/>)(_ % _)) %
new UnprefixedAttribute("value", Text(value), Null) %
("onclick" -> ("liftAjax.lift_uriSuffix = '"+funcName+"=_'; return true;"))
}
/**
* Generates a form submission button with a default label.
*
* @param func The function that will be executed on form submission
* @param attrs Optional XHTML element attributes that will be applied to the button
*/
def submitButton(func: () => Any, attrs: ElemAttr*): Elem = makeFormElement("submit", NFuncHolder(func), attrs: _*)
/**
* Takes a form and wraps it so that it will be submitted via AJAX.
*
* @param body The form body. This should not include the <form> tag.
*/
def ajaxForm(body: NodeSeq) = (<lift:form>{body}</lift:form>)
/**
* Takes a form and wraps it so that it will be submitted via AJAX.
*
* @param body The form body. This should not include the <form> tag.
* @param onSubmit JavaScript code to execute on the client prior to submission
*
* @deprecated Use ajaxForm(NodeSeq,JsCmd) instead
*/
def ajaxForm(onSubmit: JsCmd, body: NodeSeq) = (<lift:form onsubmit={onSubmit.toJsCmd}>{body}</lift:form>)
/**
* Takes a form and wraps it so that it will be submitted via AJAX.
*
* @param body The form body. This should not include the <form> tag.
* @param onSubmit JavaScript code to execute on the client prior to submission
*/
def ajaxForm(body: NodeSeq, onSubmit: JsCmd) = (<lift:form onsubmit={onSubmit.toJsCmd}>{body}</lift:form>)
/**
* Takes a form and wraps it so that it will be submitted via AJAX. This also
* takes a parameter for script code that will be executed after the form has been submitted.
*
* @param body The form body. This should not include the <form> tag.
* @param postSubmit Code that should be executed after a successful submission
*/
def ajaxForm(body : NodeSeq, onSubmit : JsCmd, postSubmit : JsCmd) = (<lift:form onsubmit={onSubmit.toJsCmd} postsubmit={postSubmit.toJsCmd}>{body}</lift:form>)
/**
* Takes a form and wraps it so that it will be submitted via AJAX and processed by
* a JSON handler. This can be useful if you may have dynamic client-side modification
* of the form (addition or removal).
*
* @param jsonHandler The handler that will process the form
* @param body The form body. This should not include the <form> tag.
*/
def jsonForm(jsonHandler: JsonHandler, body: NodeSeq): NodeSeq = jsonForm(jsonHandler, Noop, body)
/**
* Takes a form and wraps it so that it will be submitted via AJAX and processed by
* a JSON handler. This can be useful if you may have dynamic client-side modification
* of the form (addition or removal).
*
* @param jsonHandler The handler that will process the form
* @param onSubmit JavaScript code that will be executed on the client prior to submitting
* the form
* @param body The form body. This should not include the <form> tag.
*/
def jsonForm(jsonHandler: JsonHandler, onSubmit: JsCmd, body: NodeSeq): NodeSeq = {
val id = formFuncName
<form onsubmit={(onSubmit & jsonHandler.call("processForm", FormToJSON(id)) & JsReturn(false)).toJsCmd} id={id}>{body}</form>
}
/**
* Having a regular form, this method can be used to send the content of the form as JSON.
* the request will be processed by the jsonHandler
*
* @param jsonHandler - the handler that process this request
* @oaram formId - the id of the form
*/
def submitJsonForm(jsonHandler: JsonHandler, formId: String):JsCmd = jsonHandler.call("processForm", FormToJSON(formId))
/**
* Having a regular form, this method can be used to send the serialized content of the form.
*
* @oaram formId - the id of the form
*/
def submitAjaxForm(formId: String):JsCmd = SHtml.makeAjaxCall(LiftRules.jsArtifacts.serialize(formId))
/**
* Vend a function that will take all of the form elements and turns them
* into Ajax forms
*/
def makeFormsAjax: NodeSeq => NodeSeq = "form" #> ((ns: NodeSeq) =>
(ns match {
case e: Elem => {
val id: String = e.attribute("id").map(_.text) getOrElse
Helpers.nextFuncName
val newMeta = e.attributes.filter{
case up: UnprefixedAttribute =>
up.key match {
case "id" => false
case "action" => false
case "onsubmit" => false
case "method" => false
case _ => true
}
case _ => true
}
new Elem(e.prefix, e.label,
newMeta, e.scope, e.child :_*) % ("id" -> id) %
("action" -> "javascript://") %
("onsubmit" ->
(SHtml.makeAjaxCall(LiftRules.jsArtifacts.serialize(id)).toJsCmd +
"; return false;"))
}
case x => x
}): NodeSeq)
/**
* Submits a form denominated by a formId and execute the func function
* after form fields functions are executed.
*/
def submitAjaxForm(formId: String, func: () => JsCmd): JsCmd = {
val funcName = "Z" + Helpers.nextFuncName
addFunctionMap(funcName, contextFuncBuilder(func))
makeAjaxCall(JsRaw(
LiftRules.jsArtifacts.serialize(formId).toJsCmd + " + " +
Str("&" + funcName + "=true").toJsCmd))
}
/**
* Having a regular form, this method can be used to send the serialized content of the form.
*
* @oaram formId - the id of the form
* @param postSubmit - the function that needs to be called after a successfull request
*/
def submitAjaxForm(formId: String, postSubmit: Call):JsCmd =
SHtml.makeAjaxCall(LiftRules.jsArtifacts.serialize(formId), AjaxContext.js(Full(postSubmit.toJsCmd)))
private def secureOptions[T](options: Seq[(T, String)], default: Box[T],
onSubmit: T => Any): (Seq[(String, String)], Box[String], AFuncHolder) = {
val secure = options.map {case (obj, txt) => (obj, randomString(20), txt)}
val defaultNonce = default.flatMap(d => secure.find(_._1 == d).map(_._2))
val nonces = secure.map {case (obj, nonce, txt) => (nonce, txt)}
def process(nonce: String): Unit =
secure.find(_._2 == nonce).map(x => onSubmit(x._1))
(nonces, defaultNonce, SFuncHolder(process))
}
/**
* Create a select box based on the list with a default value and the function to be executed on
* form submission
*
* @param opts -- the options. A list of value and text pairs (value, text to display)
* @param deflt -- the default value (or Empty if no default value)
* @param func -- the function to execute on form submission
*/
def select(opts: Seq[(String, String)], deflt: Box[String], func: String => Any, attrs: ElemAttr*): Elem =
select_*(opts, deflt, SFuncHolder(func), attrs: _*)
/**
* Create a select box based on the list with a default value and the function
* to be executed on form submission
*
* @param options -- a list of values
* @param default -- the default value (or Empty if no default value)
* @param attrs -- the attributes to append to the resulting Elem,
* these may be name-value pairs (static attributes) or special
* HTML5 ElemAtts
* @param onSubmit -- the function to execute on form submission
* @param f -- the function that converts a T to a Display String.
*/
def selectElem[T](options: Seq[T], default: Box[T], attrs: ElemAttr*)
(onSubmit: T => Any)
(implicit f: PairStringPromoter[T]):
Elem = {
selectObj[T](options.map(v => (v, f(v))), default, onSubmit, attrs :_*)
}
/**
* Create a select box based on the list with a default value and the function
* to be executed on form submission
*
* @param options -- a list of values
* @param default -- the default value (or Empty if no default value)
* @param attrs -- the attributes to append to the resulting Elem,
* these may be name-value pairs (static attributes) or special
* HTML5 ElemAtts
* @param onSubmit -- the function to execute on form submission
* @param f -- the function that converts a T to a Display String.
*/
def selectElem[T](options: Seq[T],
settable: LiftValue[T],
attrs: ElemAttr*)
(implicit f: PairStringPromoter[T]):
Elem = {
selectObj[T](options.map(v => (v, f(v))), Full(settable.get),
s => settable.set(s), attrs :_*)
}
/**
* Create a select box based on the list with a default value and the function
* to be executed on form submission
*
* @param options -- a list of value and text pairs (value, text to display)
* @param default -- the default value (or Empty if no default value)
* @param onSubmit -- the function to execute on form submission
*/
def selectObj[T](options: Seq[(T, String)], default: Box[T],
onSubmit: T => Any, attrs: ElemAttr*): Elem = {
val (nonces, defaultNonce, secureOnSubmit) =
secureOptions(options, default, onSubmit)
select_*(nonces, defaultNonce, secureOnSubmit, attrs: _*)
}
/**
* Create a select box based on the list with a default value and the function to be executed on
* form submission
*
* @param opts -- the options. A list of value and text pairs
* @param deflt -- the default value (or Empty if no default value)
* @param func -- the function to execute on form submission
*/
def select_*(opts: Seq[(String, String)], deflt: Box[String],
func: AFuncHolder, attrs: ElemAttr*): Elem = {
val vals = opts.map(_._1)
val testFunc = LFuncHolder(in => in.filter(v => vals.contains(v)) match {case Nil => false case xs => func(xs)}, func.owner)
attrs.foldLeft(fmapFunc(testFunc)(fn => <select name={fn}>{opts.flatMap {case (value, text) => (<option value={value}>{text}</option>) % selected(deflt.exists(_ == value))}}</select>))(_ % _)
}
/**
* Create a select box based on the list with a default value and the function to be executed on
* form submission. No check is made to see if the resulting value was in the original list.
* For use with DHTML form updating.
*
* @param opts -- the options. A list of value and text pairs
* @param deflt -- the default value (or Empty if no default value)
* @param func -- the function to execute on form submission
*/
def untrustedSelect(opts: Seq[(String, String)], deflt: Box[String],
func: String => Any, attrs: ElemAttr*): Elem =
untrustedSelect_*(opts, deflt, SFuncHolder(func), attrs: _*)
/**
* Create a select box based on the list with a default value and the function to be executed on
* form submission. No check is made to see if the resulting value was in the original list.
* For use with DHTML form updating.
*
* @param opts -- the options. A list of value and text pairs
* @param deflt -- the default value (or Empty if no default value)
* @param func -- the function to execute on form submission
*/
def untrustedSelect_*(opts: Seq[(String, String)], deflt: Box[String],
func: AFuncHolder, attrs: ElemAttr*): Elem =
fmapFunc(func)(funcName =>
attrs.foldLeft(<select name={funcName}>{opts.flatMap {case (value, text) => (<option value={value}>{text}</option>) % selected(deflt.exists(_ == value))}}</select>)(_ % _))
/**
* Create a multiple select box based on the list with a default value and the function to be executed on
* form submission. No check is made to see if the resulting value was in the original list.
* For use with DHTML form updating.
*
* @param opts -- the options. A list of value and text pairs
* @param deflt -- the default value (or Empty if no default value)
* @param func -- the function to execute on form submission
*/
def untrustedMultiSelect(opts: Seq[(String, String)], deflt: Seq[String],
func: List[String] => Any, attrs: ElemAttr*): NodeSeq =
untrustedMultiSelect_*(opts, deflt, LFuncHolder(func), attrs: _*)
/**
* Create a multiple select box based on the list with a default value and the function to be executed on
* form submission. No check is made to see if the resulting value was in the original list.
* For use with DHTML form updating.
*
* @param opts -- the options. A list of value and text pairs
* @param deflt -- the default value (or Empty if no default value)
* @param func -- the function to execute on form submission
*/
def untrustedMultiSelect_*(opts: Seq[(String, String)], deflt: Seq[String],
lf: AFuncHolder, attrs: ElemAttr*): NodeSeq = {
val hiddenId = Helpers.nextFuncName
fmapFunc(LFuncHolder(l => lf(l.filter(_ != hiddenId)))) {
funcName => NodeSeq.fromSeq(
List(
attrs.foldLeft(<select multiple="true" name={funcName}>{opts.flatMap {case (value, text) => (<option value={value}>{text}</option>) % selected(deflt.contains(value))}}</select>)(_ % _),
<input type="hidden" value={hiddenId} name={funcName}/>
)
)
}
}
private def selected(in: Boolean) = if (in) new UnprefixedAttribute("selected", "selected", Null) else Null
def multiSelect(opts: Seq[(String, String)], deflt: Seq[String],
func: List[String] => Any, attrs: ElemAttr*): Elem =
multiSelect_*(opts, deflt, LFuncHolder(func), attrs: _*)
/**
* Create a select box based on the list with a default value and the function
* to be executed on form submission
*
* @param options -- a list of value and text pairs (value, text to display)
* @param default -- the default value (or Empty if no default value)
* @param onSubmit -- the function to execute on form submission
*/
def multiSelectElem[T](options: Seq[T], default: Seq[T], attrs: ElemAttr*)
(onSubmit: List[T] => Any)
(implicit f: PairStringPromoter[T]): Elem = {
multiSelectObj[T](options.map(v => (v, f(v))), default,
onSubmit, attrs :_*)
}
/**
* Create a select box based on the list with a default value and the function
* to be executed on form submission
*
* @param options -- a list of value and text pairs (value, text to display)
* @param default -- the default value (or Empty if no default value)
* @param onSubmit -- the function to execute on form submission
*/
def multiSelectObj[T](options: Seq[(T, String)], default: Seq[T],
onSubmit: List[T] => Any, attrs: ElemAttr*): Elem = {
val (nonces, defaultNonce, secureOnSubmit) =
secureMultiOptions(options, default, onSubmit)
multiSelect_*(nonces, defaultNonce, secureOnSubmit, attrs: _*)
}
private[http] def secureMultiOptions[T](options: Seq[(T, String)], default: Seq[T],
onSubmit: List[T] => Any): (Seq[(String, String)],
Seq[String], AFuncHolder) =
{
val o2 = options.toList
val secure: List[(T, String, String)] = o2.map {case (obj, txt) => (obj, randomString(20), txt)}
val sm: Map[String, T] = Map(secure.map(v => (v._2, v._1)): _*)
val defaultNonce: Seq[String] = default.flatMap(d => secure.find(_._1 == d).map(_._2))
val nonces: List[(String, String)] = secure.map {case (obj, nonce, txt) => (nonce, txt)}.toList
def process(info: List[String]): Unit = onSubmit(info.flatMap(sm.get))
(nonces, defaultNonce, LFuncHolder(process))
}
def multiSelect_*(opts: Seq[(String, String)],
deflt: Seq[String],
func: AFuncHolder, attrs: ElemAttr*): Elem =
fmapFunc(func)(funcName =>
attrs.foldLeft(<select multiple="true" name={funcName}>{opts.flatMap(o => (<option value={o._1}>{o._2}</option>) % selected(deflt.contains(o._1)))}</select>)(_ % _))
def textarea(value: String, func: String => Any, attrs: ElemAttr*): Elem =
textarea_*(value, SFuncHolder(func), attrs: _*)
def textareaElem(settable: Settable{type ValueType = String},
attrs: ElemAttr*):
Elem = textarea_*(settable.get, SFuncHolder(s => settable.set(s)), attrs: _*)
def textarea_*(value: String, func: AFuncHolder, attrs: ElemAttr*): Elem =
fmapFunc(func)(funcName =>
attrs.foldLeft(<textarea name={funcName}>{value}</textarea>)(_ % _))
def radio(opts: Seq[String], deflt: Box[String], func: String => Any,
attrs: ElemAttr*): ChoiceHolder[String] =
radio_*(opts, deflt, SFuncHolder(func), attrs: _*)
/**
* Generate a collection or radio box items from a sequence of
* things
*/
def radioElem[T](opts: Seq[T], deflt: Box[T], attrs: ElemAttr*)
(onSubmit: Box[T] => Any): ChoiceHolder[T] = {
val possible = opts.map(v => Helpers.nextFuncName -> v).toList
val hiddenId = Helpers.nextFuncName
fmapFunc(LFuncHolder(lst => lst.filter(_ != hiddenId) match {
case Nil => onSubmit(Empty)
case x :: _ => onSubmit(possible.filter(_._1 == x).
headOption.map(_._2))
})) {
name => {
val items = possible.zipWithIndex.map {
case ((id, value), idx) => {
val radio =
attrs.foldLeft(<input type="radio"
name={name} value={id}/>)(_ % _) %
checked(deflt.filter(_ == value).isDefined)
val elem = if (idx == 0) {
radio ++ <input type="hidden" value={hiddenId} name={name}/>
} else {
radio
}
ChoiceItem(value, elem)
}
}
ChoiceHolder(items)
}
}
}
def radio_*(opts: Seq[String], deflt: Box[String],
func: AFuncHolder, attrs: ElemAttr*): ChoiceHolder[String] = {
fmapFunc(func) {
name =>
val itemList = opts.map(v => ChoiceItem(v,
attrs.foldLeft(<input type="radio" name={name} value={v}/>)(_ % _) %
checked(deflt.filter((s: String) => s == v).isDefined)))
ChoiceHolder(itemList)
}
}
/**
* Defines a form element for a file upload that will call the
* specified function when the file is uploaded if the file size
* is greater than zero. Note that in order to use the fileUpload
* element you need to specify the multipart attribute on your
* snippet tag:
*
* <pre name="code" class="xml">
* <lift:Some.snippet form="POST" multipart="true">
* ...
* </lift:Some.snippet>
* </pre>
*/
def fileUpload(func: FileParamHolder => Any, attrs: ElemAttr*): Elem = {
val f2: FileParamHolder => Any = fp => if (fp.length > 0) func(fp)
fmapFunc(BinFuncHolder(f2)) { name =>
attrs.foldLeft(<input type="file" name={ name }/>) { _ % _ }
}
}
/** Holds a form control as HTML along with some user defined value */
final case class ChoiceItem[T](key: T, xhtml: NodeSeq)
/** Holds a series of choices: HTML for input controls alongside some user defined value */
final case class ChoiceHolder[T](items: Seq[ChoiceItem[T]]) {
/** Retrieve the ChoiceItem that has the given key, throwing NoSuchElementException if there is no matching ChoiceItem */
def apply(in: T): NodeSeq = items.filter(_.key == in).first.xhtml
/** Retrieve the nth ChoiceItem, 0-based */
def apply(in: Int): NodeSeq = items(in).xhtml
/** Apply a function to each ChoiceItem, collecting the results */
def map[A](f: ChoiceItem[T] => A) = items.map(f)
/** Apply a function to each ChoiceItem, concatenating the results */
def flatMap[A](f: ChoiceItem[T] => Iterable[A]) = items.flatMap(f)
/** Return the ChoiceItems that the given function returns true for */
def filter(f: ChoiceItem[T] => Boolean) = items.filter(f)
/** Generate a simple form by calling ChoiceItem.htmlize on each ChoiceItem and concatenating the resulting HTML */
def toForm: NodeSeq = flatMap(ChoiceHolder.htmlize)
}
object ChoiceHolder {
/** Convert a ChoiceItem into a span containing the control and the toString of the key */
var htmlize: ChoiceItem[_] => NodeSeq = c => (<span>{c.xhtml} {c.key.toString}<br/> </span>)
}
private def checked(in: Boolean) = if (in) new UnprefixedAttribute("checked", "checked", Null) else Null
private def setId(in: Box[String]) = in match {case Full(id) => new UnprefixedAttribute("id", Text(id), Null); case _ => Null}
/**
* Generate a ChoiceHolder of possible checkbox type inputs that calls back to the given function when the form is submitted.
*
* @param possible complete sequence of possible values, each a separate checkbox when rendered
* @param actual values to be preselected
* @param func function to receive all values corresponding to the checked boxes
* @param attrs sequence of attributes to apply to each checkbox input element
* @return ChoiceHolder containing the checkboxes and values in order
*/
def checkbox[T](possible: Seq[T], actual: Seq[T], func: Seq[T] => Any, attrs: ElemAttr*): ChoiceHolder[T] = {
val len = possible.length
fmapFunc(LFuncHolder((strl: List[String]) => {func(strl.map(toInt(_)).filter(x => x >= 0 && x < len).map(possible(_))); true})) {
name =>
ChoiceHolder(possible.toList.zipWithIndex.map(p =>
ChoiceItem(p._1,
attrs.foldLeft(<input type="checkbox" name={name} value={p._2.toString}/>)(_ % _) %
checked(actual.contains(p._1)) ++ (if (p._2 == 0) (<input type="hidden" name={name} value="-1"/>) else Nil))))
}
}
/**
* Defines a new checkbox for the Settable
*/
def checkboxElem(settable: Settable{type ValueType = Boolean}, attrs: ElemAttr*): NodeSeq = {
checkbox_id(settable.get, s => settable.set(s), Empty, attrs: _*)
}
/**
* Defines a new checkbox set to { @code value } and running { @code func } when the
* checkbox is submitted.
*/
def checkbox(value: Boolean, func: Boolean => Any, attrs: ElemAttr*): NodeSeq = {
checkbox_id(value, func, Empty, attrs: _*)
}
/**
* Defines a new checkbox for the Settable
*/
def checkbox_id(settable: Settable{type ValueType = Boolean},
id: Box[String], attrs: ElemAttr*): NodeSeq = {
def from(f: Boolean => Any): List[String] => Boolean = (in: List[String]) => {
f(in.exists(toBoolean(_)))
true
}
checkbox_*(settable.get, LFuncHolder(from(s => settable.set(s))), id, attrs: _*)
}
/**
* Defines a new checkbox set to { @code value } and running { @code func } when the
* checkbox is submitted. Has an id of { @code id }.
*/
def checkbox_id(value: Boolean, func: Boolean => Any,
id: Box[String], attrs: ElemAttr*): NodeSeq = {
def from(f: Boolean => Any): List[String] => Boolean = (in: List[String]) => {
f(in.exists(toBoolean(_)))
true
}
checkbox_*(value, LFuncHolder(from(func)), id, attrs: _*)
}
def checkbox_*(value: Boolean, func: AFuncHolder, id: Box[String],
attrs: ElemAttr*): NodeSeq = {
fmapFunc(func)(name =>
(attrs.foldLeft(<input type="checkbox" name={name} value="true"/>)(_ % _) % checked(value) % setId(id)) ++
(<input type="hidden" name={name} value="false"/>)
)
}
}
object AjaxType extends Enumeration("javascript", "json") {
val JavaScript, JSON = Value
}
object AjaxContext {
def js(success: Box[String], failure: Box[String]) = new JsContext(success, failure)
def js(success: Box[String]) = new JsContext(success, Empty)
def json(success: Box[String], failure: Box[String]) = new JsonContext(success, failure)
def json(success: Box[String]) = new JsonContext(success, Empty)
}
case class AjaxContext(success: Box[String], failure: Box[String], responseType: AjaxType.Value)
case class JsContext(override val success: Box[String], override val failure: Box[String]) extends AjaxContext(success, failure, AjaxType.JavaScript)
case class JsonContext(override val success: Box[String], override val failure: Box[String]) extends AjaxContext(success, failure, AjaxType.JSON)
object Html5ElemAttr {
/**
* The autofocus attribute
*/
final case object Autofocus extends SHtml.ElemAttr {
// FIXME detect HTML5 browser and do the right thing
def apply(in: Elem): Elem = in % ("autofocus" -> "true")
}
/**
* The required attribute
*/
final case object Required extends SHtml.ElemAttr {
// FIXME detect HTML5 browser and do the right thing
def apply(in: Elem): Elem = in % ("required" -> "true")
}
/**
* The placeholder attribute for HTML5.
*
* @param text - a String or () => String that will be the
* placeholder property in the attribute
*/
final case class Placeholder(text: StringFunc) extends SHtml.ElemAttr {
// FIXME detect HTML5 browser and do the right thing
def apply(in: Elem): Elem = in % ("placeholder" -> text.func())
}
}
/**
* Mix this trait into a snippet class so that you have a convenient
* value to redirect back to (whence).
* When you're done with the snippet, <code>S.redirectTo(whence)</code>
*/
trait Whence {
protected val whence = S.referer openOr "/"
}
/**
* Memoize the CSS Selector Transform and the most recent
* NodeSeq sent to the NodeSeq => NodeSeq so that when
* applyAgain() is called, the NodeSeq most recently used
* in apply() is used.
*/
trait MemoizeTransform extends Function1[NodeSeq, NodeSeq] {
def applyAgain(): NodeSeq
}
| wsaccaco/lift | framework/lift-base/lift-webkit/src/main/scala/net/liftweb/http/SHtml.scala | Scala | apache-2.0 | 80,739 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.cluster.master
import org.apache.gearpump.cluster.worker.WorkerId
import scala.util.Success
import akka.actor._
import akka.testkit.TestProbe
import com.typesafe.config.Config
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.apache.gearpump.cluster.AppMasterToMaster.RequestResource
import org.apache.gearpump.cluster.AppMasterToWorker.{LaunchExecutor, ShutdownExecutor}
import org.apache.gearpump.cluster.MasterToAppMaster.ResourceAllocated
import org.apache.gearpump.cluster.MasterToClient.SubmitApplicationResult
import org.apache.gearpump.cluster.WorkerToAppMaster.ExecutorLaunchRejected
import org.apache.gearpump.cluster.scheduler.{Resource, ResourceAllocation, ResourceRequest}
import org.apache.gearpump.cluster.{MasterHarness, TestUtil}
import org.apache.gearpump.util.ActorSystemBooter._
class AppMasterLauncherSpec extends FlatSpec with Matchers
with BeforeAndAfterEach with MasterHarness {
override def config: Config = TestUtil.DEFAULT_CONFIG
val appId = 1
val executorId = 2
var master: TestProbe = null
var client: TestProbe = null
var worker: TestProbe = null
var watcher: TestProbe = null
var appMasterLauncher: ActorRef = null
override def beforeEach(): Unit = {
startActorSystem()
master = createMockMaster()
client = TestProbe()(getActorSystem)
worker = TestProbe()(getActorSystem)
watcher = TestProbe()(getActorSystem)
appMasterLauncher = getActorSystem.actorOf(AppMasterLauncher.props(appId, executorId,
TestUtil.dummyApp, None, "username", master.ref, Some(client.ref)))
watcher watch appMasterLauncher
master.expectMsg(RequestResource(appId, ResourceRequest(Resource(1), WorkerId.unspecified)))
val resource = ResourceAllocated(
Array(ResourceAllocation(Resource(1), worker.ref, WorkerId(0, 0L))))
master.reply(resource)
worker.expectMsgType[LaunchExecutor]
}
override def afterEach(): Unit = {
shutdownActorSystem()
}
"AppMasterLauncher" should "launch appmaster correctly" in {
worker.reply(RegisterActorSystem("systempath"))
worker.expectMsgType[ActorSystemRegistered]
worker.expectMsgType[CreateActor]
worker.reply(ActorCreated(master.ref, "appmaster"))
client.expectMsg(SubmitApplicationResult(Success(appId)))
watcher.expectTerminated(appMasterLauncher)
}
"AppMasterLauncher" should "reallocate resource if executor launch rejected" in {
worker.reply(ExecutorLaunchRejected(""))
master.expectMsg(RequestResource(appId, ResourceRequest(Resource(1), WorkerId.unspecified)))
val resource = ResourceAllocated(
Array(ResourceAllocation(Resource(1), worker.ref, WorkerId(0, 0L))))
master.reply(resource)
worker.expectMsgType[LaunchExecutor]
worker.reply(RegisterActorSystem("systempath"))
worker.expectMsgType[ActorSystemRegistered]
worker.expectMsgType[CreateActor]
worker.reply(CreateActorFailed("", new Exception))
worker.expectMsgType[ShutdownExecutor]
assert(client.receiveN(1).head.asInstanceOf[SubmitApplicationResult].appId.isFailure)
watcher.expectTerminated(appMasterLauncher)
}
}
| manuzhang/incubator-gearpump | core/src/test/scala/org/apache/gearpump/cluster/master/AppMasterLauncherSpec.scala | Scala | apache-2.0 | 3,963 |
/*
* Copyright 2017 PayPal
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.squbs.unicomplex
import akka.actor.ActorSystem
import akka.pattern._
import akka.testkit.TestKit
import com.typesafe.config.ConfigFactory
import org.scalatest.OptionValues._
import org.scalatest.flatspec.AsyncFlatSpecLike
import org.scalatest.matchers.should.Matchers
import scala.util.Failure
object FailedFlow1Spec {
val dummyJarsDir = getClass.getClassLoader.getResource("classpaths").getPath
val classPath = dummyJarsDir + "/DummyFailedFlowSvc1/META-INF/squbs-meta.conf"
val config = ConfigFactory.parseString(
s"""
|squbs {
| actorsystem-name = FailedFlow1Spec
| ${JMX.prefixConfig} = true
|}
|default-listener.bind-port = 0
|akka.http.server.remote-address-header = on
""".stripMargin
)
import Timeouts._
val boot = UnicomplexBoot(config)
.createUsing {(name, config) => ActorSystem(name, config)}
.scanResources(withClassPath = false, classPath)
.start(startupTimeout)
}
class FailedFlow1Spec extends TestKit(FailedFlow1Spec.boot.actorSystem) with AsyncFlatSpecLike with Matchers {
"The DummyFailedFlowSvc1" should "fail" in {
import Timeouts._
Unicomplex(system).uniActor ? SystemState map { state =>
state shouldBe Failed
}
}
"The DummyFailedFlowSvc1" should "expose errors" in {
import Timeouts._
(Unicomplex(system).uniActor ? ReportStatus).mapTo[StatusReport] map { report =>
report.state shouldBe Failed
val initTry = report.cubes.values.head._2.value.reports.values.head.value
initTry should matchPattern { case Failure(e: InstantiationException) => }
}
}
}
| akara/squbs | squbs-unicomplex/src/test/scala/org/squbs/unicomplex/FailedFlow1Spec.scala | Scala | apache-2.0 | 2,214 |
/*
* Copyright 2013 agwlvssainokuni
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package batches
import java.util.Calendar
import java.util.Calendar.DATE
import java.util.Calendar.HOUR
import org.specs2.execute.AsResult
import org.specs2.execute.Result
import org.specs2.mutable.Specification
import batches.common.BatchStatus
import models.AsyncTask
import models.Member
import play.api._
import play.api.db._
import play.api.test._
class AsyncTaskCleanerSpec extends Specification {
def newDate(date: Int, hour: Int) = Calendar.getInstance() match {
case cal =>
cal.add(DATE, date)
cal.add(HOUR, hour)
cal.getTime()
}
abstract class TestApp extends WithApplication {
override def around[T: AsResult](t: => T): Result = super.around {
DB.withTransaction { implicit c =>
for {
i <- 1 to 150
email = "user" + i + "@domain"
nickname = "ニックネーム" + i
id <- Member.create(Member(email, nickname, None))
j <- 1 to i
name = "タスク名/" + i + "/" + j
startDtm = Some(newDate(i, 0))
endDtm = Some(newDate(i, j))
totalCount = Some((i + j).toLong)
okCount = Some(i.toLong)
ngCount = Some(j.toLong)
} {
val task = AsyncTask(name, AsyncTask.OkEnd, startDtm, endDtm, totalCount, okCount, ngCount)
AsyncTask.create(id, task)
}
}
t
}
}
def taskList(memberId: Long)(implicit app: Application): Seq[AsyncTask] =
DB.withTransaction { implicit c =>
AsyncTask.list(memberId, 0L, 150L)
}
"AsyncTaskCleaner" should {
"引数なし (100件残し)" in new TestApp {
val keep = 100
val before = for (i <- 1 to 150) yield taskList(i.toLong)
(new AsyncTaskCleaner)(Seq()) must_== BatchStatus.Ok
for ((list, i) <- before.zipWithIndex) {
taskList((i + 1).toLong) must_== list.take(keep)
}
}
"引数指定で110件残し (デフォルトより大きい)" in new TestApp {
val keep = 110
val before = for (i <- 1 to 150) yield taskList(i.toLong)
(new AsyncTaskCleaner)(Seq(keep.toString)) must_== BatchStatus.Ok
for ((list, i) <- before.zipWithIndex) {
taskList((i + 1).toLong) must_== list.take(keep)
}
}
"引数指定で90件残し (デフォルトより小さい)" in new TestApp {
val keep = 90
val before = for (i <- 1 to 150) yield taskList(i.toLong)
(new AsyncTaskCleaner)(Seq(keep.toString)) must_== BatchStatus.Ok
for ((list, i) <- before.zipWithIndex) {
taskList((i + 1).toLong) must_== list.take(keep)
}
}
"削除対象なし" in new WithApplication {
(new AsyncTaskCleaner)(Seq()) must_== BatchStatus.Warn
}
}
}
| agwlvssainokuni/lifelog | lifelog-batch/test/batches/AsyncTaskCleanerSpec.scala | Scala | apache-2.0 | 3,320 |
package logreceiver.routes
import akka.actor.{ActorRefFactory, ActorSystem}
import com.github.vonnagy.service.container.http.routing.RoutedEndpoints
import com.github.vonnagy.service.container.log.LoggingAdapter
import com.github.vonnagy.service.container.metrics.{Meter, Counter}
import logreceiver.processor.LogBatch
import logreceiver.{logplexFrameId, logplexMsgCount, logplexToken}
import spray.http.{HttpHeaders, StatusCodes}
import scala.util.Try
/**
* Created by ivannagy on 4/9/15.
*/
class LogEndpoints(implicit system: ActorSystem,
actorRefFactory: ActorRefFactory) extends RoutedEndpoints with LoggingAdapter {
val logCount = Counter("http.log.receive")
val logMeter = Meter("http.log.receive.meter")
val logFailedCount = Counter("http.log.receive.failed")
val route = {
post {
path("logs") {
logRequest("log-received", akka.event.Logging.DebugLevel) {
acceptableMediaTypes(logreceiver.`application/logplex-1`) {
requestEntityPresent {
logplexMsgCount { msgCount =>
logplexToken { token =>
logplexFrameId { frameId =>
entity(as[String]) { payload =>
noop { ctx =>
Try({
// Publish the batch to the waiting processor(s)
system.eventStream.publish(LogBatch(token, frameId, msgCount, payload))
// Increment the counter
logCount.incr
logMeter.mark
// Mark the request as complete
ctx.complete(StatusCodes.NoContent)
}) recover {
case e =>
log.error(s"Unable to handle the log: $logplexFrameId", e)
ctx.complete(StatusCodes.InternalServerError)
}
}
}
}
}
}
}
}
}
}
}
}
} | vonnagy/log-receiver | src/main/scala/logreceiver/routes/LogEndpoints.scala | Scala | apache-2.0 | 2,108 |
// code-examples/TypeLessDoMore/package-example2.scala
package com {
package example {
package pkg1 {
class Class11 {
def m = "m11"
}
class Class12 {
def m = "m12"
}
}
package pkg2 {
class Class21 {
def m = "m21"
def makeClass11 = {
new pkg1.Class11
}
def makeClass12 = {
new pkg1.Class12
}
}
}
package pkg3.pkg31.pkg311 {
class Class311 {
def m = "m21"
}
}
}
}
| XClouded/t4f-core | scala/src/tmp/TypeLessDoMore/package-example2.scala | Scala | apache-2.0 | 534 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.types
/**
* Utility functions for working with DataTypes in tests.
* 在测试中使用数据类型的实用功能
*/
object DataTypeTestUtils {
/**
* Instances of all [[IntegralType]]s.
*/
val integralType: Set[IntegralType] = Set(
ByteType, ShortType, IntegerType, LongType
)
/**
* Instances of all [[FractionalType]]s, including both fixed- and unlimited-precision
* decimal types.
* 所有[[FractionalType]]的实例,包括固定和无限精度的十进制类型
*/
val fractionalTypes: Set[FractionalType] = Set(
DecimalType.USER_DEFAULT,
DecimalType(20, 5),
DecimalType.SYSTEM_DEFAULT,
DoubleType,
FloatType
)
/**
* Instances of all [[NumericType]]s.
*/
val numericTypes: Set[NumericType] = integralType ++ fractionalTypes
// TODO: remove this once we find out how to handle decimal properly in property check
val numericTypeWithoutDecimal: Set[DataType] = integralType ++ Set(DoubleType, FloatType)
/**
* Instances of all [[NumericType]]s and [[CalendarIntervalType]]
* 所有[[NumericType]]和[[CalendarIntervalType]]的实例
*/
val numericAndInterval: Set[DataType] = numericTypeWithoutDecimal + CalendarIntervalType
/**
* All the types that support ordering
* 支持订购的所有类型
*/
val ordered: Set[DataType] =
numericTypeWithoutDecimal + BooleanType + TimestampType + DateType + StringType + BinaryType
/**
* All the types that we can use in a property check
* 我们可以在属性检查中使用的所有类型
*/
val propertyCheckSupported: Set[DataType] = ordered
/**
* Instances of all [[AtomicType]]s.
*/
val atomicTypes: Set[DataType] = numericTypes ++ Set(
BinaryType,
BooleanType,
DateType,
StringType,
TimestampType
)
/**
* Instances of [[ArrayType]] for all [[AtomicType]]s. Arrays of these types may contain null.
* [[ArrayType]]的所有[[AtomicType]]的实例,这些类型的数组可能包含null
*/
val atomicArrayTypes: Set[ArrayType] = atomicTypes.map(ArrayType(_, containsNull = true))
}
| tophua/spark1.52 | sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeTestUtils.scala | Scala | apache-2.0 | 2,934 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.parrot.server
import java.net.InetSocketAddress
import org.jboss.netty.buffer.ChannelBuffers
import org.junit.runner.RunWith
import org.scalatest.WordSpec
import org.scalatest.junit.JUnitRunner
import org.scalatest.matchers.MustMatchers
import com.twitter.conversions.time.intToTimeableNumber
import com.twitter.finagle.kestrel.protocol.Abort
import com.twitter.finagle.kestrel.protocol.Close
import com.twitter.finagle.kestrel.protocol.CloseAndOpen
import com.twitter.finagle.kestrel.protocol.Get
import com.twitter.finagle.kestrel.protocol.Open
import com.twitter.finagle.kestrel.protocol.Peek
import com.twitter.finagle.kestrel.protocol.Response
import com.twitter.finagle.kestrel.protocol.Set
import com.twitter.finagle.kestrel.protocol.Stored
import com.twitter.finagle.kestrel.protocol.Value
import com.twitter.finagle.kestrel.protocol.Values
import com.twitter.finagle.memcached.{ Server => MemcacheServer }
import com.twitter.io.TempFile
import com.twitter.parrot.config.ParrotServerConfig
import com.twitter.util.Eval
import com.twitter.util.RandomSocket
import com.twitter.util.Time
@RunWith(classOf[JUnitRunner])
class KestrelTransportSpec extends WordSpec with MustMatchers {
implicit def stringToChannelBuffer(s: String) = ChannelBuffers.wrappedBuffer(s.getBytes)
if (System.getenv.get("SBT_CI") == null && System.getProperty("SBT_CI") == null) "Kestrel Transport" should {
"work inside a server config" in {
val victimPort = RandomSocket.nextPort()
val serverConfig = makeServerConfig(victimPort)
val server: ParrotServer[ParrotRequest, Response] = new ParrotServerImpl(serverConfig)
server must not be null
}
"send requests to a 'Kestrel' service" in {
val victimPort = RandomSocket.nextPort()
val serverConfig = makeServerConfig(victimPort)
val transport = serverConfig.transport.getOrElse(fail("no transport configured"))
val server = new MemcacheServer(new InetSocketAddress(victimPort))
server.start()
val script = List[(String, Response)](
"set mah-key 0 0 8\\r\\nDEADBEEF" -> Stored(),
"set mah-other-key 0 0 8\\r\\nABADCAFE" -> Stored(),
"get mah-key\\r\\n" -> Values(List(Value("mah-key", "DEADBEEF"))))
script.foreach {
case (rawCommand, expectedResponse) =>
val request = new ParrotRequest(rawLine = rawCommand)
val rep: Response = transport.sendRequest(request).get()
rep must be(expectedResponse)
}
server.stop()
}
}
"KestrelCommandExtractor" should {
"parse GET commands" in {
KestrelCommandExtractor.unapply("GET FOO\\r\\n") must be(Some(Get("FOO", None)))
KestrelCommandExtractor.unapply("get foo\\r\\n") must be(Some(Get("foo", None)))
KestrelCommandExtractor.unapply("get foo") must be(Some(Get("foo", None)))
KestrelCommandExtractor.unapply("get foo \\r\\n") must be(Some(Get("foo", None)))
KestrelCommandExtractor.unapply("get foo/t=100\\r\\n") must be(Some(Get("foo", Some(100.milliseconds))))
KestrelCommandExtractor.unapply("get foo bar\\r\\n") must be(None)
KestrelCommandExtractor.unapply("get") must be(None)
KestrelCommandExtractor.unapply("get ") must be(None)
}
"parse GET command flags" in {
KestrelCommandExtractor.unapply("get q/abort") must be(Some(Abort("q", None)))
KestrelCommandExtractor.unapply("get q/close") must be(Some(Close("q", None)))
KestrelCommandExtractor.unapply("get q/open") must be(Some(Open("q", None)))
KestrelCommandExtractor.unapply("get q/peek") must be(Some(Peek("q", None)))
KestrelCommandExtractor.unapply("get q/close/open") must be(Some(CloseAndOpen("q", None)))
KestrelCommandExtractor.unapply("get q/open/close") must be(Some(CloseAndOpen("q", None)))
val timeout = Some(100.milliseconds)
KestrelCommandExtractor.unapply("get q/abort/t=100") must be(Some(Abort("q", None)))
KestrelCommandExtractor.unapply("get q/close/t=100") must be(Some(Close("q", None)))
KestrelCommandExtractor.unapply("get q/open/t=100") must be(Some(Open("q", timeout)))
KestrelCommandExtractor.unapply("get q/peek/t=100") must be(Some(Peek("q", timeout)))
KestrelCommandExtractor.unapply("get q/close/open/t=100") must be(Some(CloseAndOpen("q", timeout)))
KestrelCommandExtractor.unapply("get q/open/close/t=100") must be(Some(CloseAndOpen("q", timeout)))
KestrelCommandExtractor.unapply("get q/say-what-now") must be(None)
}
"parse SET commands" in {
KestrelCommandExtractor.unapply("SET FOO 0 0 8\\r\\n12345678") must
be(Some(Set("FOO", Time.fromSeconds(0), "12345678")))
KestrelCommandExtractor.unapply("set foo 123 100 8\\r\\n12345678") must
be(Some(Set("foo", Time.fromSeconds(100), "12345678")))
KestrelCommandExtractor.unapply("set foo 123 100 10\\r\\n1234\\r\\n5678") must
be(Some(Set("foo", Time.fromSeconds(100), "1234\\r\\n5678")))
KestrelCommandExtractor.unapply("set foo 0 0 100\\r\\n12345678") must be(None)
KestrelCommandExtractor.unapply("set foo 0 0\\r\\n1234") must be(None)
}
}
def makeServerConfig(victimPort: Int) = {
val result = new Eval().apply[ParrotServerConfig[ParrotRequest, Response]](
TempFile.fromResourcePath("/test-kestrel.scala"))
result.parrotPort = RandomSocket().getPort
result.victim = result.HostPortListVictim("localhost:" + victimPort)
result.transport = Some(KestrelTransportFactory(result))
result
}
}
| twitter/iago | src/test/scala/com/twitter/parrot/server/KestrelTransportSpec.scala | Scala | apache-2.0 | 6,073 |
package tuner.test.unit
import org.scalatest._
import org.scalatest.Matchers._
import tuner.Table
import tuner.gui.util.Histogram
class GpModelSpec extends WordSpec {
"estimating a point" when {
"estimating at a sample point" must {
"have an error of 0" in (pending)
}
"estimating away from a sample point" must {
"have an error of >0" in (pending)
"have an error of <=1" in (pending)
}
}
}
| gabysbrain/tuner | src/test/scala/tuner/unit/GpModelSpec.scala | Scala | mit | 433 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import org.apache.hadoop.fs.Path
import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.ml.{Estimator, Model, Pipeline, PipelineModel, PipelineStage, Transformer}
import org.apache.spark.ml.attribute.AttributeGroup
import org.apache.spark.ml.linalg.{Vector, VectorUDT}
import org.apache.spark.ml.param.{BooleanParam, Param, ParamMap, ParamValidators}
import org.apache.spark.ml.param.shared.{HasFeaturesCol, HasHandleInvalid, HasLabelCol}
import org.apache.spark.ml.util._
import org.apache.spark.sql.{DataFrame, Dataset}
import org.apache.spark.sql.types._
/**
* Base trait for [[RFormula]] and [[RFormulaModel]].
*/
private[feature] trait RFormulaBase extends HasFeaturesCol with HasLabelCol with HasHandleInvalid {
/**
* R formula parameter. The formula is provided in string form.
* @group param
*/
@Since("1.5.0")
val formula: Param[String] = new Param(this, "formula", "R model formula")
/** @group getParam */
@Since("1.5.0")
def getFormula: String = $(formula)
/**
* Force to index label whether it is numeric or string type.
* Usually we index label only when it is string type.
* If the formula was used by classification algorithms,
* we can force to index label even it is numeric type by setting this param with true.
* Default: false.
* @group param
*/
@Since("2.1.0")
val forceIndexLabel: BooleanParam = new BooleanParam(this, "forceIndexLabel",
"Force to index label whether it is numeric or string")
setDefault(forceIndexLabel -> false)
/** @group getParam */
@Since("2.1.0")
def getForceIndexLabel: Boolean = $(forceIndexLabel)
/**
* Param for how to handle invalid data (unseen or NULL values) in features and label column
* of string type. Options are 'skip' (filter out rows with invalid data),
* 'error' (throw an error), or 'keep' (put invalid data in a special additional
* bucket, at index numLabels).
* Default: "error"
* @group param
*/
@Since("2.3.0")
override val handleInvalid: Param[String] = new Param[String](this, "handleInvalid",
"How to handle invalid data (unseen or NULL values) in features and label column of string " +
"type. Options are 'skip' (filter out rows with invalid data), error (throw an error), " +
"or 'keep' (put invalid data in a special additional bucket, at index numLabels).",
ParamValidators.inArray(StringIndexer.supportedHandleInvalids))
setDefault(handleInvalid, StringIndexer.ERROR_INVALID)
/**
* Param for how to order categories of a string FEATURE column used by `StringIndexer`.
* The last category after ordering is dropped when encoding strings.
* Supported options: 'frequencyDesc', 'frequencyAsc', 'alphabetDesc', 'alphabetAsc'.
* The default value is 'frequencyDesc'. When the ordering is set to 'alphabetDesc', `RFormula`
* drops the same category as R when encoding strings.
*
* The options are explained using an example `'b', 'a', 'b', 'a', 'c', 'b'`:
* {{{
* +-----------------+---------------------------------------+----------------------------------+
* | Option | Category mapped to 0 by StringIndexer | Category dropped by RFormula |
* +-----------------+---------------------------------------+----------------------------------+
* | 'frequencyDesc' | most frequent category ('b') | least frequent category ('c') |
* | 'frequencyAsc' | least frequent category ('c') | most frequent category ('b') |
* | 'alphabetDesc' | last alphabetical category ('c') | first alphabetical category ('a')|
* | 'alphabetAsc' | first alphabetical category ('a') | last alphabetical category ('c') |
* +-----------------+---------------------------------------+----------------------------------+
* }}}
* Note that this ordering option is NOT used for the label column. When the label column is
* indexed, it uses the default descending frequency ordering in `StringIndexer`.
*
* @group param
*/
@Since("2.3.0")
final val stringIndexerOrderType: Param[String] = new Param(this, "stringIndexerOrderType",
"How to order categories of a string FEATURE column used by StringIndexer. " +
"The last category after ordering is dropped when encoding strings. " +
s"Supported options: ${StringIndexer.supportedStringOrderType.mkString(", ")}. " +
"The default value is 'frequencyDesc'. When the ordering is set to 'alphabetDesc', " +
"RFormula drops the same category as R when encoding strings.",
ParamValidators.inArray(StringIndexer.supportedStringOrderType))
setDefault(stringIndexerOrderType, StringIndexer.frequencyDesc)
/** @group getParam */
@Since("2.3.0")
def getStringIndexerOrderType: String = $(stringIndexerOrderType)
protected def hasLabelCol(schema: StructType): Boolean = {
schema.map(_.name).contains($(labelCol))
}
}
/**
* :: Experimental ::
* Implements the transforms required for fitting a dataset against an R model formula. Currently
* we support a limited subset of the R operators, including '~', '.', ':', '+', and '-'. Also see
* the R formula docs here: http://stat.ethz.ch/R-manual/R-patched/library/stats/html/formula.html
*
* The basic operators are:
* - `~` separate target and terms
* - `+` concat terms, "+ 0" means removing intercept
* - `-` remove a term, "- 1" means removing intercept
* - `:` interaction (multiplication for numeric values, or binarized categorical values)
* - `.` all columns except target
*
* Suppose `a` and `b` are double columns, we use the following simple examples
* to illustrate the effect of `RFormula`:
* - `y ~ a + b` means model `y ~ w0 + w1 * a + w2 * b` where `w0` is the intercept and `w1, w2`
* are coefficients.
* - `y ~ a + b + a:b - 1` means model `y ~ w1 * a + w2 * b + w3 * a * b` where `w1, w2, w3`
* are coefficients.
*
* RFormula produces a vector column of features and a double or string column of label.
* Like when formulas are used in R for linear regression, string input columns will be one-hot
* encoded, and numeric columns will be cast to doubles.
* If the label column is of type string, it will be first transformed to double with
* `StringIndexer`. If the label column does not exist in the DataFrame, the output label column
* will be created from the specified response variable in the formula.
*/
@Experimental
@Since("1.5.0")
class RFormula @Since("1.5.0") (@Since("1.5.0") override val uid: String)
extends Estimator[RFormulaModel] with RFormulaBase with DefaultParamsWritable {
@Since("1.5.0")
def this() = this(Identifiable.randomUID("rFormula"))
/**
* Sets the formula to use for this transformer. Must be called before use.
* @group setParam
* @param value an R formula in string form (e.g. "y ~ x + z")
*/
@Since("1.5.0")
def setFormula(value: String): this.type = set(formula, value)
/** @group setParam */
@Since("2.3.0")
def setHandleInvalid(value: String): this.type = set(handleInvalid, value)
/** @group setParam */
@Since("1.5.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("1.5.0")
def setLabelCol(value: String): this.type = set(labelCol, value)
/** @group setParam */
@Since("2.1.0")
def setForceIndexLabel(value: Boolean): this.type = set(forceIndexLabel, value)
/** @group setParam */
@Since("2.3.0")
def setStringIndexerOrderType(value: String): this.type = set(stringIndexerOrderType, value)
/** Whether the formula specifies fitting an intercept. */
private[ml] def hasIntercept: Boolean = {
require(isDefined(formula), "Formula must be defined first.")
RFormulaParser.parse($(formula)).hasIntercept
}
@Since("2.0.0")
override def fit(dataset: Dataset[_]): RFormulaModel = {
transformSchema(dataset.schema, logging = true)
require(isDefined(formula), "Formula must be defined first.")
val parsedFormula = RFormulaParser.parse($(formula))
val resolvedFormula = parsedFormula.resolve(dataset.schema)
val encoderStages = ArrayBuffer[PipelineStage]()
val oneHotEncodeColumns = ArrayBuffer[(String, String)]()
val prefixesToRewrite = mutable.Map[String, String]()
val tempColumns = ArrayBuffer[String]()
def tmpColumn(category: String): String = {
val col = Identifiable.randomUID(category)
tempColumns += col
col
}
// First we index each string column referenced by the input terms.
val indexed: Map[String, String] = resolvedFormula.terms.flatten.distinct.map { term =>
dataset.schema(term).dataType match {
case _: StringType =>
val indexCol = tmpColumn("stridx")
encoderStages += new StringIndexer()
.setInputCol(term)
.setOutputCol(indexCol)
.setStringOrderType($(stringIndexerOrderType))
.setHandleInvalid($(handleInvalid))
prefixesToRewrite(indexCol + "_") = term + "_"
(term, indexCol)
case _: VectorUDT =>
val group = AttributeGroup.fromStructField(dataset.schema(term))
val size = if (group.size < 0) {
dataset.select(term).first().getAs[Vector](0).size
} else {
group.size
}
encoderStages += new VectorSizeHint(uid)
.setHandleInvalid("optimistic")
.setInputCol(term)
.setSize(size)
(term, term)
case _ =>
(term, term)
}
}.toMap
// Then we handle one-hot encoding and interactions between terms.
var keepReferenceCategory = false
val encodedTerms = resolvedFormula.terms.map {
case Seq(term) if dataset.schema(term).dataType == StringType =>
val encodedCol = tmpColumn("onehot")
// Formula w/o intercept, one of the categories in the first category feature is
// being used as reference category, we will not drop any category for that feature.
if (!hasIntercept && !keepReferenceCategory) {
encoderStages += new OneHotEncoder(uid)
.setInputCols(Array(indexed(term)))
.setOutputCols(Array(encodedCol))
.setDropLast(false)
keepReferenceCategory = true
} else {
oneHotEncodeColumns += indexed(term) -> encodedCol
}
prefixesToRewrite(encodedCol + "_") = term + "_"
encodedCol
case Seq(term) =>
term
case terms =>
val interactionCol = tmpColumn("interaction")
encoderStages += new Interaction()
.setInputCols(terms.map(indexed).toArray)
.setOutputCol(interactionCol)
prefixesToRewrite(interactionCol + "_") = ""
interactionCol
}
if (oneHotEncodeColumns.nonEmpty) {
val (inputCols, outputCols) = oneHotEncodeColumns.toArray.unzip
encoderStages += new OneHotEncoder(uid)
.setInputCols(inputCols)
.setOutputCols(outputCols)
.setDropLast(true)
}
encoderStages += new VectorAssembler(uid)
.setInputCols(encodedTerms.toArray)
.setOutputCol($(featuresCol))
.setHandleInvalid($(handleInvalid))
encoderStages += new VectorAttributeRewriter($(featuresCol), prefixesToRewrite.toMap)
encoderStages += new ColumnPruner(tempColumns.toSet)
if ((dataset.schema.fieldNames.contains(resolvedFormula.label) &&
dataset.schema(resolvedFormula.label).dataType == StringType) || $(forceIndexLabel)) {
encoderStages += new StringIndexer()
.setInputCol(resolvedFormula.label)
.setOutputCol($(labelCol))
.setHandleInvalid($(handleInvalid))
}
val pipelineModel = new Pipeline(uid).setStages(encoderStages.toArray).fit(dataset)
copyValues(new RFormulaModel(uid, resolvedFormula, pipelineModel).setParent(this))
}
@Since("1.5.0")
// optimistic schema; does not contain any ML attributes
override def transformSchema(schema: StructType): StructType = {
require(!hasLabelCol(schema) || !$(forceIndexLabel),
"If label column already exists, forceIndexLabel can not be set with true.")
if (hasLabelCol(schema)) {
StructType(schema.fields :+ StructField($(featuresCol), new VectorUDT, true))
} else {
StructType(schema.fields :+ StructField($(featuresCol), new VectorUDT, true) :+
StructField($(labelCol), DoubleType, true))
}
}
@Since("1.5.0")
override def copy(extra: ParamMap): RFormula = defaultCopy(extra)
@Since("2.0.0")
override def toString: String = s"RFormula(${get(formula).getOrElse("")}) (uid=$uid)"
}
@Since("2.0.0")
object RFormula extends DefaultParamsReadable[RFormula] {
@Since("2.0.0")
override def load(path: String): RFormula = super.load(path)
}
/**
* :: Experimental ::
* Model fitted by [[RFormula]]. Fitting is required to determine the factor levels of
* formula terms.
*
* @param resolvedFormula the fitted R formula.
* @param pipelineModel the fitted feature model, including factor to index mappings.
*/
@Experimental
@Since("1.5.0")
class RFormulaModel private[feature](
@Since("1.5.0") override val uid: String,
private[ml] val resolvedFormula: ResolvedRFormula,
private[ml] val pipelineModel: PipelineModel)
extends Model[RFormulaModel] with RFormulaBase with MLWritable {
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
checkCanTransform(dataset.schema)
transformLabel(pipelineModel.transform(dataset))
}
@Since("1.5.0")
override def transformSchema(schema: StructType): StructType = {
checkCanTransform(schema)
val withFeatures = pipelineModel.transformSchema(schema)
if (resolvedFormula.label.isEmpty || hasLabelCol(withFeatures)) {
withFeatures
} else if (schema.exists(_.name == resolvedFormula.label)) {
val nullable = schema(resolvedFormula.label).dataType match {
case _: NumericType | BooleanType => false
case _ => true
}
StructType(withFeatures.fields :+ StructField($(labelCol), DoubleType, nullable))
} else {
// Ignore the label field. This is a hack so that this transformer can also work on test
// datasets in a Pipeline.
withFeatures
}
}
@Since("1.5.0")
override def copy(extra: ParamMap): RFormulaModel = {
val copied = new RFormulaModel(uid, resolvedFormula, pipelineModel).setParent(parent)
copyValues(copied, extra)
}
@Since("2.0.0")
override def toString: String = s"RFormulaModel($resolvedFormula) (uid=$uid)"
private def transformLabel(dataset: Dataset[_]): DataFrame = {
val labelName = resolvedFormula.label
if (labelName.isEmpty || hasLabelCol(dataset.schema)) {
dataset.toDF
} else if (dataset.schema.exists(_.name == labelName)) {
dataset.schema(labelName).dataType match {
case _: NumericType | BooleanType =>
dataset.withColumn($(labelCol), dataset(labelName).cast(DoubleType))
case other =>
throw new IllegalArgumentException("Unsupported type for label: " + other)
}
} else {
// Ignore the label field. This is a hack so that this transformer can also work on test
// datasets in a Pipeline.
dataset.toDF
}
}
private def checkCanTransform(schema: StructType) {
val columnNames = schema.map(_.name)
require(!columnNames.contains($(featuresCol)), "Features column already exists.")
require(
!columnNames.contains($(labelCol)) || schema($(labelCol)).dataType.isInstanceOf[NumericType],
s"Label column already exists and is not of type ${NumericType.simpleString}.")
}
@Since("2.0.0")
override def write: MLWriter = new RFormulaModel.RFormulaModelWriter(this)
}
@Since("2.0.0")
object RFormulaModel extends MLReadable[RFormulaModel] {
@Since("2.0.0")
override def read: MLReader[RFormulaModel] = new RFormulaModelReader
@Since("2.0.0")
override def load(path: String): RFormulaModel = super.load(path)
/** [[MLWriter]] instance for [[RFormulaModel]] */
private[RFormulaModel] class RFormulaModelWriter(instance: RFormulaModel) extends MLWriter {
override protected def saveImpl(path: String): Unit = {
// Save metadata and Params
DefaultParamsWriter.saveMetadata(instance, path, sc)
// Save model data: resolvedFormula
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(instance.resolvedFormula))
.repartition(1).write.parquet(dataPath)
// Save pipeline model
val pmPath = new Path(path, "pipelineModel").toString
instance.pipelineModel.save(pmPath)
}
}
private class RFormulaModelReader extends MLReader[RFormulaModel] {
/** Checked against metadata when loading model */
private val className = classOf[RFormulaModel].getName
override def load(path: String): RFormulaModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath).select("label", "terms", "hasIntercept").head()
val label = data.getString(0)
val terms = data.getAs[Seq[Seq[String]]](1)
val hasIntercept = data.getBoolean(2)
val resolvedRFormula = ResolvedRFormula(label, terms, hasIntercept)
val pmPath = new Path(path, "pipelineModel").toString
val pipelineModel = PipelineModel.load(pmPath)
val model = new RFormulaModel(metadata.uid, resolvedRFormula, pipelineModel)
metadata.getAndSetParams(model)
model
}
}
}
/**
* Utility transformer for removing temporary columns from a DataFrame.
* TODO(ekl) make this a public transformer
*/
private class ColumnPruner(override val uid: String, val columnsToPrune: Set[String])
extends Transformer with MLWritable {
def this(columnsToPrune: Set[String]) =
this(Identifiable.randomUID("columnPruner"), columnsToPrune)
override def transform(dataset: Dataset[_]): DataFrame = {
val columnsToKeep = dataset.columns.filter(!columnsToPrune.contains(_))
dataset.select(columnsToKeep.map(dataset.col): _*)
}
override def transformSchema(schema: StructType): StructType = {
StructType(schema.fields.filter(col => !columnsToPrune.contains(col.name)))
}
override def copy(extra: ParamMap): ColumnPruner = defaultCopy(extra)
override def write: MLWriter = new ColumnPruner.ColumnPrunerWriter(this)
}
private object ColumnPruner extends MLReadable[ColumnPruner] {
override def read: MLReader[ColumnPruner] = new ColumnPrunerReader
override def load(path: String): ColumnPruner = super.load(path)
/** [[MLWriter]] instance for [[ColumnPruner]] */
private[ColumnPruner] class ColumnPrunerWriter(instance: ColumnPruner) extends MLWriter {
private case class Data(columnsToPrune: Seq[String])
override protected def saveImpl(path: String): Unit = {
// Save metadata and Params
DefaultParamsWriter.saveMetadata(instance, path, sc)
// Save model data: columnsToPrune
val data = Data(instance.columnsToPrune.toSeq)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class ColumnPrunerReader extends MLReader[ColumnPruner] {
/** Checked against metadata when loading model */
private val className = classOf[ColumnPruner].getName
override def load(path: String): ColumnPruner = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath).select("columnsToPrune").head()
val columnsToPrune = data.getAs[Seq[String]](0).toSet
val pruner = new ColumnPruner(metadata.uid, columnsToPrune)
metadata.getAndSetParams(pruner)
pruner
}
}
}
/**
* Utility transformer that rewrites Vector attribute names via prefix replacement. For example,
* it can rewrite attribute names starting with 'foo_' to start with 'bar_' instead.
*
* @param vectorCol name of the vector column to rewrite.
* @param prefixesToRewrite the map of string prefixes to their replacement values. Each attribute
* name defined in vectorCol will be checked against the keys of this
* map. When a key prefixes a name, the matching prefix will be replaced
* by the value in the map.
*/
private class VectorAttributeRewriter(
override val uid: String,
val vectorCol: String,
val prefixesToRewrite: Map[String, String])
extends Transformer with MLWritable {
def this(vectorCol: String, prefixesToRewrite: Map[String, String]) =
this(Identifiable.randomUID("vectorAttrRewriter"), vectorCol, prefixesToRewrite)
override def transform(dataset: Dataset[_]): DataFrame = {
val metadata = {
val group = AttributeGroup.fromStructField(dataset.schema(vectorCol))
val attrs = group.attributes.get.map { attr =>
if (attr.name.isDefined) {
val name = prefixesToRewrite.foldLeft(attr.name.get) { case (curName, (from, to)) =>
curName.replace(from, to)
}
attr.withName(name)
} else {
attr
}
}
new AttributeGroup(vectorCol, attrs).toMetadata()
}
val otherCols = dataset.columns.filter(_ != vectorCol).map(dataset.col)
val rewrittenCol = dataset.col(vectorCol).as(vectorCol, metadata)
dataset.select(otherCols :+ rewrittenCol : _*)
}
override def transformSchema(schema: StructType): StructType = {
StructType(
schema.fields.filter(_.name != vectorCol) ++
schema.fields.filter(_.name == vectorCol))
}
override def copy(extra: ParamMap): VectorAttributeRewriter = defaultCopy(extra)
override def write: MLWriter = new VectorAttributeRewriter.VectorAttributeRewriterWriter(this)
}
private object VectorAttributeRewriter extends MLReadable[VectorAttributeRewriter] {
override def read: MLReader[VectorAttributeRewriter] = new VectorAttributeRewriterReader
override def load(path: String): VectorAttributeRewriter = super.load(path)
/** [[MLWriter]] instance for [[VectorAttributeRewriter]] */
private[VectorAttributeRewriter]
class VectorAttributeRewriterWriter(instance: VectorAttributeRewriter) extends MLWriter {
private case class Data(vectorCol: String, prefixesToRewrite: Map[String, String])
override protected def saveImpl(path: String): Unit = {
// Save metadata and Params
DefaultParamsWriter.saveMetadata(instance, path, sc)
// Save model data: vectorCol, prefixesToRewrite
val data = Data(instance.vectorCol, instance.prefixesToRewrite)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class VectorAttributeRewriterReader extends MLReader[VectorAttributeRewriter] {
/** Checked against metadata when loading model */
private val className = classOf[VectorAttributeRewriter].getName
override def load(path: String): VectorAttributeRewriter = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath).select("vectorCol", "prefixesToRewrite").head()
val vectorCol = data.getString(0)
val prefixesToRewrite = data.getAs[Map[String, String]](1)
val rewriter = new VectorAttributeRewriter(metadata.uid, vectorCol, prefixesToRewrite)
metadata.getAndSetParams(rewriter)
rewriter
}
}
}
| WindCanDie/spark | mllib/src/main/scala/org/apache/spark/ml/feature/RFormula.scala | Scala | apache-2.0 | 24,544 |
package com.twitter.scrooge.backend
/**
* Copyright 2011 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.twitter.scrooge.ast._
import com.twitter.scrooge.frontend.ResolvedService
import com.twitter.scrooge.mustache.Dictionary
import com.twitter.scrooge.mustache.Dictionary._
import scala.collection.mutable
import scala.util.Properties
trait ServiceTemplate { self: TemplateGenerator =>
def functionDictionary(function: Function, generic: Option[String]): Dictionary = {
val hasThrows = function.throws.size > 0
val throwsDictionaries =
if (hasThrows) {
function.throws map { ex =>
Dictionary(
"throwType" -> genType(ex.fieldType),
"throwName" -> genID(ex.sid))
}
} else {
Nil
}
val argNames = function.args.map { field => genID(field.sid).toData }
Dictionary(
"generic" -> v(generic.map(v)),
"docstring" -> v(function.docstring.getOrElse("")),
"hasThrows" -> v(hasThrows),
"throws" -> v(throwsDictionaries),
"funcName" -> genID(function.funcName.toCamelCase),
"originalFuncName" -> v(function.originalName),
"funcObjectName" -> genID(functionObjectName(function)),
"typeName" -> genType(function.funcType),
"fieldParams" -> genFieldParams(function.args), // A list of parameters with types: (a: A, b: B...)
"argNames" -> v(argNames.mkString(", ")),
"argsFieldNames" -> {
val code = argNames.map { field => s"args.$field" }.mkString(", ")
v(code)
},
"argTypes" -> {
function.args match {
case Nil => v("Unit")
case singleArg :: Nil => genType(singleArg.fieldType)
case args =>
val typesString = args.map { arg => genType(arg.fieldType) }.mkString(", ")
v(s"($typesString)")
}
},
"args" -> v(function.args.map { arg =>
Dictionary("arg" -> genID(arg.sid))
}),
"isVoid" -> v(function.funcType == Void || function.funcType == OnewayVoid),
"is_oneway" -> v(function.funcType == OnewayVoid),
"functionType" -> {
val returnType = s"Future[${genType(function.funcType)}]"
val types = s"[Args,${returnType}]"
v(s"Function1$types")
},
"moreThan22Args" -> v(function.args.size > 22)
)
}
def functionArgsStruct(f:Function): FunctionArgs = {
FunctionArgs(SimpleID("Args"),
internalArgsStructNameForWire(f),
f.args)
}
/**
* Thrift Result struct that includes success or exceptions returned.
*/
def resultStruct(f:Function): FunctionResult = {
val throws = f.throws map {
_.copy(requiredness = Requiredness.Optional)
}
val success = f.funcType match {
case Void => None
case OnewayVoid => None
case fieldType: FieldType =>
Some(Field(0, SimpleID("success"), "success", fieldType, None, Requiredness.Optional))
}
FunctionResult(
SimpleID("Result"),
resultStructNameForWire(f),
success, throws
)
}
def functionObjectName(f: Function): SimpleID = f.funcName.toTitleCase
/**
* The name used in RPC request, this needs to be same as Apache compiler
*/
def internalArgsStructNameForWire(f: Function): String =
f.funcName.name + "_args"
/**
* The name used in RPC request, this needs to be same as Apache compiler
*/
private def resultStructNameForWire(f: Function): String =
f.funcName.name + "_result"
def finagleClient(
service: Service,
namespace: Identifier
) =
Dictionary(
"package" -> genID(namespace),
"ServiceName" -> genID(service.sid.toTitleCase),
"docstring" -> v(service.docstring.getOrElse("")),
"hasParent" -> v(service.parent.isDefined),
"parent" -> v(service.parent.map { p =>
genID(getServiceParentID(p))
}),
"finagleClientParent" ->
service.parent.map(getParentFinagleClient).getOrElse(v("")),
"functions" -> v(service.functions.map {
f =>
Dictionary(
"function" -> v(templates("function")),
"functionInfo" -> v(functionDictionary(f, Some("Future"))),
"clientFuncNameForWire" -> v(f.originalName),
"__stats_name" -> genID(f.funcName.toCamelCase.prepend("__stats_")),
"type" -> genType(f.funcType),
"isVoid" -> v(f.funcType == Void || f.funcType == OnewayVoid),
"argNames" -> {
val code = f.args.map { field => genID(field.sid).toData }.mkString(", ")
v(code)
}
)
}),
"finagleClientFunction" -> v(templates("finagleClientFunction"))
)
def finagleService(
service: Service,
namespace: Identifier
) =
Dictionary(
"package" -> genID(namespace),
"ServiceName" -> genID(service.sid.toTitleCase),
"docstring" -> v(service.docstring.getOrElse("")),
"hasParent" -> v(service.parent.isDefined),
"finagleServiceParent" ->
service.parent.map(getParentFinagleService).getOrElse(genBaseFinagleService),
"function" -> v(templates("finagleServiceFunction")),
"functions" -> v(service.functions map {
f =>
Dictionary(
"serviceFuncNameForCompile" -> genID(f.funcName.toCamelCase),
"serviceFuncNameForWire" -> v(f.originalName),
"__stats_name" -> genID(f.funcName.toCamelCase.prepend("__stats_")),
"funcObjectName" -> genID(functionObjectName(f)),
"argNames" ->
v(f.args.map { field =>
"args." + genID(field.sid).toData
}.mkString(", ")),
"typeName" -> genType(f.funcType),
"isVoid" -> v(f.funcType == Void || f.funcType == OnewayVoid),
"resultNamedArg" ->
v(if (f.funcType != Void && f.funcType != OnewayVoid) "success = Some(value)" else ""),
"exceptions" -> v(f.throws map {
t =>
Dictionary(
"exceptionType" -> genType(t.fieldType),
"fieldName" -> genID(t.sid)
)
})
)
})
)
def unwrapArgs(arity: Int): String =
arity match {
case 0 => ""
case 1 => "args"
case _ =>
(1 to arity).map { i =>
s"args._$i"
}.mkString(", ")
}
def serviceDict(
service: Service,
namespace: Identifier,
includes: Seq[Include],
options: Set[ServiceOption]
) = {
val withFinagle = options.contains(WithFinagle)
Dictionary(
"function" -> v(templates("function")),
"package" -> genID(namespace),
"ServiceName" -> genID(service.sid.toTitleCase),
"docstring" -> v(service.docstring.getOrElse("")),
"syncParent" -> v(service.parent.map { p =>
genID(getServiceParentID(p)).append(".Iface")
}),
"parent" -> v(service.parent.map { p =>
genQualifiedID(getServiceParentID(p), namespace)
}),
"futureIfaceParent" -> v(service.parent.map { p =>
genQualifiedID(getServiceParentID(p), namespace).append(".FutureIface")
}),
"genericParent" -> service.parent.map { p =>
genID(getServiceParentID(p)).append("[MM]")
}.getOrElse(v("ThriftService")),
"syncFunctions" -> v(service.functions.map {
f => functionDictionary(f, None)
}),
"asyncFunctions" -> v(service.functions.map {
f => functionDictionary(f, Some("Future"))
}),
"genericFunctions" -> v(service.functions.map {
f => functionDictionary(f, Some("MM"))
}),
"struct" -> v(templates("struct")),
"thriftFunctions" -> v(service.functions.map { f =>
Dictionary(
"functionArgsStruct" ->
v(structDict(
functionArgsStruct(f),
Some(namespace),
includes,
options)),
"internalResultStruct" -> {
val functionResult = resultStruct(f)
v(structDict(
functionResult,
Some(namespace),
includes,
options) +
Dictionary(
"successFieldType" -> getSuccessType(functionResult),
"successFieldValue" -> getSuccessValue(functionResult),
"exceptionValues" -> getExceptionFields(functionResult)
)
)
},
"funcObjectName" -> genID(functionObjectName(f)),
"unwrapArgs" -> v(unwrapArgs(f.args.length))
) + functionDictionary(f, Some("Future"))
}),
"finagleClients" -> v(
if (withFinagle) Seq(finagleClient(service, namespace)) else Seq()
),
"finagleServices" -> v(
if (withFinagle) Seq(finagleService(service, namespace)) else Seq()
),
"disableCaseClass" -> {
val isScala210 = Properties.releaseVersion.exists(_.startsWith("2.10"))
val over22functions = {
val numParentFunctions = resolvedDoc.collectParentServices(service).map {
case (_, service) => service.functions.length
}.sum
val totalFunctions = service.functions.length + numParentFunctions
totalFunctions > 22
}
v(isScala210 && over22functions)
},
// scalac 2.11 fails to compile classes with more than 254 method arguments
// due to https://issues.scala-lang.org/browse/SI-7324
// We skip generation of ServiceIfaces for thrift services with 255+ methods.
"generateServiceIface" -> {
val numParentFunctions = resolvedDoc.collectParentServices(service).map {
case (_, service) => service.functions.length
}.sum
val totalFunctions = service.functions.length + numParentFunctions
v(totalFunctions <= 254)
},
"withFinagle" -> v(withFinagle),
"inheritedFunctions" -> {
// For service-per-endpoint, we generate a class with a value for each method, so
// method names must be unique.
val deduper = new NameDeduplicator()
val inheritedFunctions: Seq[Dictionary] =
// Note: inherited functions must be deduped first, so we walk the parent chain
// from the topmost parent down (hence the reverse).
resolvedDoc.resolveParentServices(service, namespaceLanguage, defaultNamespace).reverse.flatMap {
result: ResolvedService =>
result.service.functions.map { function =>
Dictionary(
"ParentServiceName" -> genID(result.serviceID),
"funcName" -> genID(deduper.dedupe(function.funcName.toCamelCase)),
"funcObjectName" -> genID(functionObjectName(function))
)
}
}
val ownFunctions: Seq[Dictionary] = service.functions.map {
function => Dictionary(
"ParentServiceName" -> v("self"),
"funcName" -> genID(deduper.dedupe(function.funcName.toCamelCase)),
"funcObjectName" -> genID(functionObjectName(function))
)
}
v(ownFunctions ++ inheritedFunctions)
},
"dedupedOwnFunctions" -> {
val deduper = new NameDeduplicator()
// We only generate own functions, but need to dedupe them from the inherited functions,
// so fill those in first.
resolvedDoc.collectParentServices(service).foreach { case (_, service) =>
service.functions.foreach { function =>
deduper.dedupe(function.funcName.toCamelCase)
}
}
val ownFunctions: Seq[Dictionary] = service.functions.map { function =>
functionDictionary(function, Some("Future")) ++=
(("dedupedFuncName" -> genID(deduper.dedupe(function.funcName.toCamelCase))))
}
v(ownFunctions)
}
)
}
private[this] class NameDeduplicator() {
private[this] val seenIDs = new mutable.HashSet[String]
/**
* Append a '_' to deduplicate function names for the case class members.
* This also stores the new ID in the set of seen IDs.
*/
def dedupe(id: SimpleID): SimpleID = {
var currentID = id
while (seenIDs.contains(currentID.toString)) {
currentID = currentID.append("_")
}
seenIDs.add(currentID.toString)
currentID
}
}
}
| thirstycrow/scrooge | scrooge-generator/src/main/scala/com/twitter/scrooge/backend/ServiceTemplate.scala | Scala | apache-2.0 | 12,868 |
package model
import play.api.libs.json._
/**
* Represents the Swagger definition for PipelineBranchesitemlatestRun.
* @param additionalProperties Any additional properties this model may have.
*/
@javax.annotation.Generated(value = Array("org.openapitools.codegen.languages.ScalaPlayFrameworkServerCodegen"), date = "2022-02-13T02:38:35.589632Z[Etc/UTC]")
case class PipelineBranchesitemlatestRun(
durationInMillis: Option[Int],
estimatedDurationInMillis: Option[Int],
enQueueTime: Option[String],
endTime: Option[String],
id: Option[String],
organization: Option[String],
pipeline: Option[String],
result: Option[String],
runSummary: Option[String],
startTime: Option[String],
state: Option[String],
`type`: Option[String],
commitId: Option[String],
`class`: Option[String]
additionalProperties:
)
object PipelineBranchesitemlatestRun {
implicit lazy val pipelineBranchesitemlatestRunJsonFormat: Format[PipelineBranchesitemlatestRun] = {
val realJsonFormat = Json.format[PipelineBranchesitemlatestRun]
val declaredPropNames = Set("durationInMillis", "estimatedDurationInMillis", "enQueueTime", "endTime", "id", "organization", "pipeline", "result", "runSummary", "startTime", "state", "`type`", "commitId", "`class`")
Format(
Reads {
case JsObject(xs) =>
val declaredProps = xs.filterKeys(declaredPropNames)
val additionalProps = JsObject(xs -- declaredPropNames)
val restructuredProps = declaredProps + ("additionalProperties" -> additionalProps)
val newObj = JsObject(restructuredProps)
realJsonFormat.reads(newObj)
case _ =>
JsError("error.expected.jsobject")
},
Writes { pipelineBranchesitemlatestRun =>
val jsObj = realJsonFormat.writes(pipelineBranchesitemlatestRun)
val additionalProps = jsObj.value("additionalProperties").as[JsObject]
val declaredProps = jsObj - "additionalProperties"
val newObj = declaredProps ++ additionalProps
newObj
}
)
}
}
| cliffano/swaggy-jenkins | clients/scala-play-server/generated/app/model/PipelineBranchesitemlatestRun.scala | Scala | mit | 2,064 |
package models.rest
import play.api.libs.json._
/**
* author: cvandrei
* since: 2016-02-03
*/
case class WebjazzNotification(auth: String,
vimeoId: Long,
hmsId: Long,
width: Int,
height: Int,
thumbnails: List[Thumbnail])
object WebjazzNotification {
implicit object WebjazzNotificationReads extends Format[WebjazzNotification] {
override def reads(json: JsValue): JsResult[WebjazzNotification] = {
val webjazzNotification = WebjazzNotification(
(json \\ "auth").as[String],
(json \\ "vimeo-id").as[Long],
(json \\ "hms-id").as[Long],
(json \\ "width").as[Int],
(json \\ "height").as[Int],
(json \\ "thumbnails").as[List[Thumbnail]]
)
JsSuccess(webjazzNotification)
}
def writes(wn: WebjazzNotification): JsValue = {
Json.obj(
"auth" -> wn.auth,
"vimeo-id" -> wn.vimeoId,
"hms-id" -> wn.hmsId,
"width" -> wn.width,
"height" -> wn.height,
"thumbnails" -> wn.thumbnails
)
}
}
}
| indarium/hbbTVPlugin | app/models/rest/WebjazzNotification.scala | Scala | agpl-3.0 | 1,187 |
package play.api.libs
/**
* OAuth integration helpers.
*/
package object oauth | michaelahlers/team-awesome-wedding | vendor/play-2.2.1/framework/src/play/src/main/scala/play/api/libs/oauth/package.scala | Scala | mit | 81 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play
import akka.util.Timeout
import org.jsoup.Jsoup
import org.jsoup.nodes.Document
import play.api.test.Helpers.contentAsString
import play.twirl.api.Content
package object views {
def jsoupDocument(from: Content)(implicit timeout: Timeout): Document =
Jsoup.parse(contentAsString(from))
}
| hmrc/play-ui | src/test/scala/uk/gov/hmrc/play/views/package.scala | Scala | apache-2.0 | 927 |
package collins.util
import play.api.mvc.Headers
import ApiVersion._
import org.specs2._
class VersionRouterSpec extends mutable.Specification {
val map: PartialFunction[ApiVersion,String] = {
case `1.1` => "A"
case `1.2` => "B"
}
case class FakeHeaders(headers: Map[String, Seq[String]], val data: Seq[(String, Seq[String])] = Seq.empty) extends Headers {
override def getAll(key: String) = headers(key)
override def keys = headers.keys.toSet
}
"version router" should {
"route to correct version" in {
val heads = FakeHeaders(Map("Accept" -> List("application/com.tumblr.collins;version=1.2", "foo", "com.tumblr.collins")))
VersionRouter.route(heads)(map) must_== "B"
}
"default route on missing header" in {
VersionRouter.route(FakeHeaders(Map[String, Seq[String]]()))(map) must_== map(ApiVersion.defaultVersion)
}
"default route on malformed header" in {
VersionRouter.route(FakeHeaders(Map("Accept" -> List("HASFIAFSHAF"))))(map) must_== map(ApiVersion.defaultVersion)
}
"throw exception on invalid version" in {
val heads = FakeHeaders(Map("Accept" -> List("application/com.tumblr.collins;version=26.12", "foo", "com.tumblr.collins")))
VersionRouter.route(heads)(map) must throwA[VersionException]
}
}
}
| byxorna/collins | test/collins/util/VersionRouterSpec.scala | Scala | apache-2.0 | 1,321 |
package io.github.mandar2812.dynaml.kernels
import io.github.mandar2812.dynaml.pipes.{ProductReducer, Reducer, SumReducer}
/**
* Represents a kernel on a product space [[R]] × [[S]]
*
* @param firstK The first covariance
* @param secondK The second covariance
* @param reducer An implicit parameter indicating how to combine the
* kernel values; it can only be [[Reducer.:+:]] or [[Reducer.:*:]]
* */
class TensorCombinationKernel[R, S](
firstK: LocalScalarKernel[R],
secondK: LocalScalarKernel[S])(implicit reducer: Reducer = Reducer.:*:)
extends CompositeCovariance[(R,S)] {
val fID = firstK.toString.split("\\\\.").last
val sID = secondK.toString.split("\\\\.").last
override val hyper_parameters: List[String] =
firstK.hyper_parameters.map(h => fID+"/"+h) ++ secondK.hyper_parameters.map(h => sID+"/"+h)
blocked_hyper_parameters =
firstK.blocked_hyper_parameters.map(h => fID+"/"+h) ++ secondK.blocked_hyper_parameters.map(h => sID+"/"+h)
state =
firstK.state.map(h => (fID+"/"+h._1, h._2)) ++ secondK.state.map(h => (sID+"/"+h._1, h._2))
private def getKernelConfigs(config: Map[String, Double]) = (
config.filter(_._1.contains(fID)).map(CompositeCovariance.truncateState),
config.filter(_._1.contains(sID)).map(CompositeCovariance.truncateState)
)
protected def getKernelHyp(s: Seq[String]) = (
s.filter(_.contains(fID)).map(CompositeCovariance.truncateHyp),
s.filter(_.contains(sID)).map(CompositeCovariance.truncateHyp)
)
override def evaluateAt(config: Map[String, Double])(x: (R, S), y: (R, S)): Double = {
val (firstKernelConfig, secondKernelConfig) = getKernelConfigs(config)
reducer(
Array(
firstK.evaluateAt(firstKernelConfig)(x._1, y._1),
secondK.evaluateAt(secondKernelConfig)(x._2, y._2)
)
)
}
override def setHyperParameters(h: Map[String, Double]): TensorCombinationKernel.this.type = {
//Sanity Check
assert(effective_hyper_parameters.forall(h.contains),
"All hyper parameters must be contained in the arguments")
//group the hyper params by kernel id
h.toSeq.filterNot(_._1.split("/").length == 1).map(kv => {
val idS = kv._1.split("/")
(idS.head, (idS.tail.mkString("/"), kv._2))
}).groupBy(_._1).map(hypC => {
val kid = hypC._1
val hyper_params = hypC._2.map(_._2).toMap
if(kid == fID) firstK.setHyperParameters(hyper_params) else secondK.setHyperParameters(hyper_params)
})
super.setHyperParameters(h)
}
override def block(h: String*) = {
val (firstKernelHyp, secondKernelHyp) = getKernelHyp(h)
firstK.block(firstKernelHyp:_*)
secondK.block(secondKernelHyp:_*)
super.block(h:_*)
}
override def gradientAt(config: Map[String, Double])(x: (R, S), y: (R, S)): Map[String, Double] = {
val (firstKernelConfig, secondKernelConfig) = getKernelConfigs(config)
reducer match {
case SumReducer =>
firstK.gradientAt(firstKernelConfig)(x._1, y._1).map(h => (fID+"/"+h._1, h._2)) ++
secondK.gradientAt(secondKernelConfig)(x._2, y._2).map(h => (sID+"/"+h._1, h._2))
case ProductReducer =>
firstK.gradientAt(firstKernelConfig)(x._1, y._1).map(k =>
(fID+"/"+k._1, k._2*secondK.evaluateAt(secondKernelConfig)(x._2, y._2))) ++
secondK.gradientAt(secondKernelConfig)(x._2, y._2).map(k =>
(sID+"/"+k._1, k._2*firstK.evaluateAt(firstKernelConfig)(x._1, y._1)))
case _ =>
super.gradientAt(config)(x, y)
}
}
}
class KroneckerProductKernel[R, S](firstK: LocalScalarKernel[R], secondK: LocalScalarKernel[S])
extends TensorCombinationKernel(firstK, secondK)(Reducer.:*:) | transcendent-ai-labs/DynaML | dynaml-core/src/main/scala/io/github/mandar2812/dynaml/kernels/TensorCombinationKernel.scala | Scala | apache-2.0 | 3,704 |
package com.omis.client.services
import com.omis.{EmpDetails, UserReg}
import org.scalajs.dom.ext.Ajax
import org.scalajs.dom.window
import play.api.libs.json.Json
import scala.concurrent.Future
import org.scalajs.dom.window
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
/**
* Created by shubham.k on 22-03-2017.
*/
object CoreApi {
private def ajaxPost(requestContent: String, apiUrl: String): Future[String] = {
Ajax.post(
url = apiUrl,
data = requestContent,
headers = Map("Content-Type" -> "application/json;charset=UTF-8", "X-Auth-Token" -> window.localStorage.getItem("X-Auth-Token"))
).map(_.responseText)
}
private def ajaxGet(url: String): Future[String] = {
Ajax.get(
url = s"/${url}",
headers = Map("X-Auth-Token" -> window.localStorage.getItem("X-Auth-Token"))
).map(_.responseText)
}
def login(userReg: UserReg) = {
ajaxPost(Json.stringify(Json.toJson[UserReg](userReg)), "login")
}
def signUp(userReg: UserReg) = {
ajaxPost(Json.stringify(Json.toJson[UserReg](userReg)), "signup")
}
def addEmployee(empDetails: EmpDetails) = {
ajaxPost(Json.stringify(Json.toJson[EmpDetails](empDetails)), "createemp")
}
def logout() = ajaxGet("logout")
def getAllEmp = ajaxGet("allemp")
def getEmp = ajaxGet("emp")
val authenticate = ajaxGet("authenticate")
}
| iriddhi/mis | client/src/main/scala/com/omis/client/services/CoreApi.scala | Scala | apache-2.0 | 1,380 |
package com.twitter.scrooge
import java.lang.reflect.Method
import org.apache.thrift.protocol.{TField, TProtocol}
import scala.collection.mutable.StringBuilder
/**
* A simple class for generic introspection on ThriftStruct classes.
*/
final class ThriftStructMetaData[T <: ThriftStruct](val codec: ThriftStructCodec[T]) {
private[this] def toCamelCase(str: String): String = {
str.takeWhile(_ == '_') + str.
split('_').
filterNot(_.isEmpty).
zipWithIndex.map { case (part, ind) =>
val first = if (ind == 0) part(0).toLower else part(0).toUpper
val isAllUpperCase = part.forall(_.isUpper)
val rest = if (isAllUpperCase) part.drop(1).toLowerCase else part.drop(1)
new StringBuilder(part.size).append(first).append(rest)
}.
mkString
}
/**
* The Class object for the ThriftStructCodec subclass.
*/
val codecClass = codec.getClass
/**
* The fully qualified name of the ThriftStruct sublcass.
*/
val structClassName = codecClass.getName.dropRight(1) // drop '$' from object name
/**
* Gets the unqualified name of the struct.
*/
val structName = structClassName.split("\\\\.").last
/**
* The Class object for ThriftStruct subclass.
*/
val structClass = codecClass.getClassLoader.loadClass(structClassName).asInstanceOf[Class[T]]
/**
* A Seq of ThriftStructFields representing the fields defined in the ThriftStruct.
*/
val fields: Seq[ThriftStructField[T]] =
codecClass.getMethods.toList filter { m =>
m.getParameterTypes.size == 0 && m.getReturnType == classOf[TField]
} map { m =>
val tfield = m.invoke(codec).asInstanceOf[TField]
val manifest: scala.Option[Manifest[_]] = try {
Some {
codecClass
.getMethod(m.getName + "Manifest")
.invoke(codec)
.asInstanceOf[Manifest[_]]
}
} catch { case _: Throwable => None }
val method = structClass.getMethod(toCamelCase(tfield.name))
new ThriftStructField[T](tfield, method, manifest)
}
}
final class ThriftStructField[T <: ThriftStruct](val tfield: TField, val method: Method, val manifest: scala.Option[Manifest[_]]) {
/**
* The TField field name, same as the method name on the ThriftStruct for the value.
*/
def name = tfield.name
/**
* The TField field id, as defined in the source thrift file.
*/
def id = tfield.id
/**
* The TField field type. See TType for possible values.
*/
def `type` = tfield.`type`
/**
* Gets the value of the field from the struct. You can specify the expected return
* type, rather than casting explicitly.
*/
def getValue[R](struct: T): R = method.invoke(struct).asInstanceOf[R]
}
/**
* Field information to be embedded in a generated struct's companion class.
* Allows for reflection on field types.
*/
final class ThriftStructFieldInfo(
val tfield: TField,
val isOptional: Boolean,
val manifest: Manifest[_],
val keyManifest: scala.Option[Manifest[_]],
val valueManifest: scala.Option[Manifest[_]],
val typeAnnotations: Map[String, String],
val fieldAnnotations: Map[String, String]
) {
/**
* Secondary constructor provided for backwards compatibility:
* Older scrooge-generator does not produce annotations.
*/
def this(
tfield: TField,
isOptional: Boolean,
manifest: Manifest[_],
keyManifest: scala.Option[Manifest[_]],
valueManifest: scala.Option[Manifest[_]]
) =
this(
tfield,
isOptional,
manifest,
keyManifest,
valueManifest,
Map.empty[String, String],
Map.empty[String, String]
)
}
| nshkrob/scrooge | scrooge-core/src/main/scala/com/twitter/scrooge/ThriftStructMetaData.scala | Scala | apache-2.0 | 3,649 |
import language.experimental.macros
object Macros {
def foo1: Nothing = macro ???
def foo2(x: Int): Nothing = macro ???
def foo3[T]: Nothing = macro ???
}
| lrytz/scala | test/files/pos/macro-qmarkqmarkqmark.scala | Scala | apache-2.0 | 162 |
/**
* Illustrates loading JSON data using Spark SQL
*/
package com.git.huanghaifeng.spark.load
import org.apache.spark._
import org.apache.spark.sql.SQLContext
object LoadFileJsonWithSparkSQL {
def main(args: Array[String]) {
if (args.length != 2) {
println("Usage: [sparkmaster] [inputFile]")
exit(1)
}
val master = args(0)
val inputFile = args(1)
val sc = new SparkContext(master, "LoadJsonWithSparkSQL", System.getenv("SPARK_HOME"))
val sqlCtx = new SQLContext(sc)
val input = sqlCtx.jsonFile(inputFile)
input.printSchema()
input.collect().foreach { println }
}
}
| prucehuang/quickly-start-spark | src/main/scala/com/git/huanghaifeng/spark/load/LoadFileJsonWithSparkSQL.scala | Scala | apache-2.0 | 677 |
/*
* Copyright 2010 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.naggati
package test
import scala.collection.mutable
import org.jboss.netty.buffer.ChannelBuffer
import org.jboss.netty.channel._
class Counter {
var readBytes = 0
var writtenBytes = 0
}
object TestCodec {
def apply(firstStage: Stage, encoder: PartialFunction[Any, ChannelBuffer]) = {
val counter = new Counter()
val codec = new Codec(firstStage, encoder, { n => counter.readBytes += n },
{ n => counter.writtenBytes += n })
val testCodec = new TestCodec(codec)
(testCodec, counter)
}
}
/**
* Netty doesn't appear to have a good set of fake objects yet, so this wraps a Codec in a fake
* environment that collects emitted objects and returns them.
*/
class TestCodec(codec: Codec) {
val downstreamOutput = new mutable.ListBuffer[AnyRef]
val upstreamOutput = new mutable.ListBuffer[AnyRef]
private def log(e: MessageEvent, list: mutable.ListBuffer[AnyRef]) {
e.getMessage match {
case buffer: ChannelBuffer =>
val bytes = new Array[Byte](buffer.readableBytes)
buffer.readBytes(bytes)
list += bytes
case x =>
list += x
}
}
private def toStrings(wrapped: Seq[Any]): Seq[String] = wrapped.map { item =>
item match {
case x: Array[Byte] => new String(x, "UTF-8")
case x => x.toString
}
}
val upstreamTerminus = new SimpleChannelUpstreamHandler() {
override def messageReceived(c: ChannelHandlerContext, e: MessageEvent) {
log(e, upstreamOutput)
}
}
val downstreamTerminus = new SimpleChannelDownstreamHandler() {
override def writeRequested(c: ChannelHandlerContext, e: MessageEvent) {
log(e, downstreamOutput)
}
}
val pipeline = Channels.pipeline()
pipeline.addLast("downstreamTerminus", downstreamTerminus)
pipeline.addLast("decoder", codec)
pipeline.addLast("upstreamTerminus", upstreamTerminus)
val context = pipeline.getContext(codec)
val sink = new AbstractChannelSink() {
def eventSunk(pipeline: ChannelPipeline, event: ChannelEvent) { }
}
val channel = new AbstractChannel(null, null, pipeline, sink) {
def getRemoteAddress() = null
def getLocalAddress() = null
def isConnected() = true
def isBound() = true
def getConfig() = new DefaultChannelConfig()
}
def apply(buffer: ChannelBuffer) = {
upstreamOutput.clear()
codec.messageReceived(context, new UpstreamMessageEvent(pipeline.getChannel, buffer, null))
upstreamOutput.toList
}
def send(obj: Any) = {
downstreamOutput.clear()
codec.handleDownstream(context, new DownstreamMessageEvent(pipeline.getChannel, Channels.future(pipeline.getChannel), obj, null))
toStrings(downstreamOutput.toList)
}
}
| taihsun/Gitest | src/main/scala/com/twitter/naggati/test/TestCodec.scala | Scala | apache-2.0 | 3,306 |
package spray.can.server
import akka.actor.ActorRef
import akka.actor.Props
import akka.io.Tcp
import spray.can.Http
import spray.can.HttpExt
import spray.can.parsing.SSLSessionInfoSupport
import spray.can.server.StatsSupport.StatsHolder
import spray.io.BackPressureHandling
import spray.io.ConnectionTimeouts
import spray.io.PreventHalfClosedConnections
import spray.io.TickGenerator
import spray.io.SslTlsSupportPatched
/**
* The only diff from HttpListener is:
* private val pipelineStage = UpgradableHttpListener.pipelineStage(settings, statsHolder)
*/
class UpgradableHttpListener(
bindCommander: ActorRef,
bind: Http.Bind,
httpSettings: HttpExt#Settings
) extends HttpListener(bindCommander, bind, httpSettings) {
import context.system
import bind._
private val connectionCounter = Iterator from 0
private val settings = bind.settings getOrElse ServerSettings(system)
private val statsHolder = if (settings.statsSupport) Some(new StatsHolder) else None
private val pipelineStage = UpgradableHttpListener.pipelineStage(settings, statsHolder)
override def connected(tcpListener: ActorRef): Receive = {
case Tcp.Connected(remoteAddress, localAddress) ⇒
val conn = sender()
context.actorOf(
props = Props(new HttpServerConnection(conn, listener, pipelineStage, remoteAddress, localAddress, settings))
.withDispatcher(httpSettings.ConnectionDispatcher),
name = connectionCounter.next().toString
)
case Http.GetStats ⇒ statsHolder foreach { holder ⇒ sender() ! holder.toStats }
case Http.ClearStats ⇒ statsHolder foreach { _.clear() }
case Http.Unbind(timeout) ⇒ unbind(tcpListener, Set(sender()), timeout)
case _: Http.ConnectionClosed ⇒
// ignore, we receive this event when the user didn't register the handler within the registration timeout period
}
}
/**
* Could be the replacement for HttpServerConnection.pipelineStage
*/
private object UpgradableHttpListener {
def pipelineStage(settings: ServerSettings, statsHolder: Option[StatsHolder]) = {
import settings._
import timeouts._
UpgradeSupport(settings) {
ServerFrontend(settings) >>
RequestChunkAggregation(requestChunkAggregationLimit) ? (requestChunkAggregationLimit > 0) >>
PipeliningLimiter(pipeliningLimit) ? (pipeliningLimit > 0) >>
StatsSupport(statsHolder.get) ? statsSupport >>
RemoteAddressHeaderSupport ? remoteAddressHeader >>
RemoteIpPortHeaderSupport ? remoteAddressHeader >>
SSLSessionInfoSupport ? parserSettings.sslSessionInfoHeader >>
RequestParsing(settings) >>
ResponseRendering(settings)
} >>
ConnectionTimeouts(idleTimeout) ? (reapingCycle.isFinite && idleTimeout.isFinite) >>
PreventHalfClosedConnections(sslEncryption) >>
SslTlsSupportPatched(maxEncryptionChunkSize, parserSettings.sslSessionInfoHeader) ? sslEncryption >>
TickGenerator(reapingCycle) ? (reapingCycle.isFinite && (idleTimeout.isFinite || requestTimeout.isFinite)) >>
BackPressureHandling(backpressureSettings.get.noAckRate, backpressureSettings.get.readingLowWatermark) ? autoBackPressureEnabled
}
} | smootoo/simple-spray-websockets | src/main/scala/spray/can/server/UpgradableHttpListener.scala | Scala | unlicense | 3,193 |
package edu.tum.cs.isabelle.api
import scala.collection.mutable.ListBuffer
import acyclic.file
// FIXME code mostly copied from xml.scala and yxml.scala
object XML {
private val X = '\\u0005'
private val Y = '\\u0006'
private def prettyEscape(string: String) = string
.replace("&", "&")
.replace("<", "<")
.replace(">", ">")
.replace("\\"", """)
.replace("'", "'")
sealed abstract class Tree {
def toYXML: String = bodyToYXML(List(this))
def pretty(indent: Int = 0): String
final def pretty: String = pretty()
}
final case class Elem(markup: Markup, body: Body) extends Tree {
def pretty(indent: Int = 0) = {
val attrs = (if (markup._2.isEmpty) "" else " ") + markup._2.map { case (k, v) => s"$k='${prettyEscape(v)}'" }.mkString(" ")
if (body.isEmpty) {
" " * indent + "<" + markup._1 + attrs + " />"
}
else {
val head = " " * indent + "<" + markup._1 + attrs + ">"
val rows = body.map(_.pretty(indent + 2)).mkString("\\n", "\\n", "\\n")
val foot = " " * indent + "</" + markup._1 + ">"
head + rows + foot
}
}
}
final case class Text(content: String) extends Tree {
def pretty(indent: Int = 0) =
" " * indent + prettyEscape(content)
}
type Body = List[Tree]
@inline
def elem(markup: Markup, body: Body): Tree = Elem(markup, body)
@inline
def text(content: String): Tree = Text(content)
private def parse_attrib(source: CharSequence) = {
val s = source.toString
val i = s.indexOf('=')
if (i <= 0) sys.error("bad attribute")
(s.substring(0, i), s.substring(i + 1))
}
def fromYXML(source: String): Tree = bodyFromYXML(source) match {
case List(result) => result
case Nil => Text("")
case _ => sys.error("multiple results")
}
def bodyFromYXML(source: String): Body = {
def buffer(): ListBuffer[Tree] = new ListBuffer[Tree]
var stack: List[(Markup, ListBuffer[Tree])] = List((("", Nil), buffer()))
def add(x: Tree) = (stack: @unchecked) match {
case ((_, body) :: _) => body += x; ()
}
def push(name: String, atts: List[(String, String)])
{
if (name == "") sys.error("bad element")
else stack = ((name, atts), buffer()) :: stack
}
def pop()
{
(stack: @unchecked) match {
case ((("", Nil), _) :: _) => sys.error("unbalanced element")
case ((markup, body) :: pending) =>
stack = pending
add(Elem(markup, body.toList))
}
}
for (chunk <- source.split(X) if chunk.length != 0) {
if (chunk.length == 1 && chunk.charAt(0) == Y) pop()
else {
chunk.split(Y).toList match {
case ch :: name :: atts if ch.length == 0 =>
push(name.toString, atts.map(parse_attrib))
case txts => for (txt <- txts) add(Text(txt.toString))
}
}
}
(stack: @unchecked) match {
case List((("", Nil), body)) => body.toList
case ((name, _), _) :: _ => sys.error("unbalanced element")
}
}
def bodyToYXML(body: Body): String = {
val s = new StringBuilder
def attrib(p: (String, String)) = { s += Y; s ++= p._1; s += '='; s ++= p._2; () }
def tree(t: Tree): Unit =
t match {
case Elem((name, atts), ts) =>
s += X; s += Y; s ++= name; atts.foreach(attrib); s += X
ts.foreach(tree)
s += X; s += Y; s += X
()
case Text(text) =>
s ++= text
()
}
body.foreach(tree)
s.toString
}
}
| wneuper/libisabelle | pide-interface/src/main/scala/XML.scala | Scala | mit | 3,564 |
package dotty.tools.scaladoc
import java.nio.file.{Path, Paths}
case class PathBased[T](entries: List[PathBased.Entry[T]], projectRoot: Path):
def get(path: Path): Option[PathBased.Result[T]] =
if path.isAbsolute && path.startsWith(projectRoot) then get(projectRoot.relativize(path))
else entries.filter(_.path.forall(p => path.startsWith(p))).maxByOption(_.path.map(_.toString.length)).map(entry =>
PathBased.Result(entry.path.fold(path)(_.relativize(path)), entry.elem)
)
trait ArgParser[T]:
def parse(s: String): Either[String, T]
object PathBased:
case class Entry[T](path: Option[Path], elem: T)
case class ParsingResult[T](errors: List[String], result: PathBased[T])
case class Result[T](path: Path, elem: T)
private val PathExtractor = "([^=]+)=(.+)".r
def parse[T](args: Seq[String], projectRoot: Path = Paths.get("").toAbsolutePath())(using parser: ArgParser[T]): ParsingResult[T] = {
val parsed = args.map {
case PathExtractor(path, arg) => parser.parse(arg).map(elem => Entry(Some(Paths.get(path)), elem))
case arg => parser.parse(arg).map(elem => Entry(None, elem))
}
val errors = parsed.collect {
case Left(error) => error
}.toList
val entries = parsed.collect {
case Right(entry) => entry
}.toList
ParsingResult(errors, PathBased(entries, projectRoot))
}
| dotty-staging/dotty | scaladoc/src/dotty/tools/scaladoc/PathBased.scala | Scala | apache-2.0 | 1,364 |
import java.io.File
import scala.jdk.CollectionConverters._
import scala.tools.asm.tree.{ClassNode, InvokeDynamicInsnNode}
import scala.tools.asm.{Handle, Opcodes}
import scala.tools.partest.BytecodeTest.modifyClassFile
import scala.tools.partest._
object Test extends DirectTest {
def code = ???
def compileCode(code: String) = {
val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
compileString(newCompiler("-cp", classpath, "-d", testOutput.path, "-opt:l:inline", "-opt-inline-from:**", "-Yopt-inline-heuristics:everything", "-opt-warnings:_"))(code)
}
def show(): Unit = {
val unknownBootstrapMethod = new Handle(
Opcodes.H_INVOKESTATIC,
"not/java/lang/SomeLambdaMetafactory",
"notAMetaFactoryMethod",
"(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;",
/* itf = */ false)
modifyClassFile(new File(testOutput.toFile, "A_1.class"))((cn: ClassNode) => {
val testMethod = cn.methods.iterator.asScala.find(_.name == "test").get
val indy = testMethod.instructions.iterator.asScala.collect({ case i: InvokeDynamicInsnNode => i }).next()
indy.bsm = unknownBootstrapMethod
cn
})
compileCode("class T { def foo = A_1.test }")
}
}
| martijnhoekstra/scala | test/files/run/noInlineUnknownIndy/Test.scala | Scala | apache-2.0 | 1,353 |
package com.taig.tmpltr.engine.html
import com.taig.tmpltr._
import com.taig.tmpltr.engine.{ html => element }
import play.api.mvc.Content
class boilerplate /*private*/( val attributes: Attributes, val content: Content )
extends Tag.Body[boilerplate, Content]
{
def this( attributes: Attributes, head: markup.head, body: markup.body ) =
{
this( attributes, new html( attributes, head += body ) )
}
val tag = null
override def toString = doctype.toString + "\\n" + content
}
object boilerplate
{
def apply( attributes: Attributes )( head: markup.head )( body: markup.body ): boilerplate =
{
new boilerplate( attributes, head, body )
}
def apply( head: markup.head )( body: markup.body ): boilerplate =
{
apply( Attributes.empty )( head )( body )
}
} | Taig/Play-Tmpltr | app/com/taig/tmpltr/engine/html/boilerplate.scala | Scala | mit | 771 |
package com.inocybe.pfm.template
import akka.actor.{ActorIdentity, Identify, RootActorPath, AddressFromURIString, Props, PoisonPill, ActorPath, ActorSystem}
import akka.pattern.ask
import akka.cluster.client.ClusterClient
import akka.cluster.client.ClusterClientSettings
import akka.cluster.singleton.ClusterSingletonManager
import akka.cluster.singleton.ClusterSingletonManagerSettings
import akka.event.{LogSource, Logging}
import akka.http.scaladsl.Http
import akka.japi.Util.immutableSeq
import akka.persistence.journal.leveldb.SharedLeveldbJournal
import akka.persistence.journal.leveldb.SharedLeveldbStore
import akka.stream.ActorMaterializer
import akka.util.Timeout
import com.inocybe.pfm.template.apis.InboundConnector
import com.inocybe.pfm.template.internal.{WorkerActor, MasterActor}
import com.inocybe.pfm.template.outbound.WorkExecutor
import com.typesafe.config.ConfigFactory
import scala.io.StdIn
import scala.concurrent.duration._
object Boot {
def main(args: Array[String]) {
implicit val logSource: LogSource[AnyRef] = new LogSource[AnyRef] {
def genString(o: AnyRef): String = o.getClass.getName
override def getClazz(o: AnyRef): Class[_] = o.getClass
}
implicit val system = ActorSystem("ClusterSystem")
implicit val materializer = ActorMaterializer()
implicit val executionContext = system.dispatcher
val log = Logging(system, this)
startBackend(2551, "backend")
startWorker(2550)
val route = new InboundConnector(system).route
val interface = "0.0.0.0"
val port = 8080
val bindingFuture = Http().bindAndHandle(route, interface, port)
log.info(s"bound to $interface:$port ... \\nPRESS ENTER TO EXIT")
StdIn.readLine() // let it run until user presses return
bindingFuture
.flatMap(_.unbind()) // trigger unbinding from the port
.onComplete(_ => system.terminate()) // and shutdown when done
}
def workTimeout = 10.seconds
def startBackend(port: Int, role: String): Unit = {
val conf = ConfigFactory.parseString(s"akka.cluster.roles=[$role]").
withFallback(ConfigFactory.parseString("akka.remote.netty.tcp.port=" + port)).
withFallback(ConfigFactory.load())
val system = ActorSystem("ClusterSystem", conf)
startupSharedJournal(system, startStore = (port == 2551), path =
ActorPath.fromString("akka.tcp://[email protected]:2551/user/store"))
system.actorOf(
ClusterSingletonManager.props(
MasterActor.props(workTimeout),
PoisonPill,
ClusterSingletonManagerSettings(system).withRole(role)
),
"master")
}
def startWorker(port: Int): Unit = {
// load worker.conf
val conf = ConfigFactory.parseString("akka.remote.netty.tcp.port=" + port).
withFallback(ConfigFactory.load("worker"))
val system = ActorSystem("WorkerSystem", conf)
val initialContacts = immutableSeq(conf.getStringList("contact-points")).map {
case AddressFromURIString(addr) ⇒ RootActorPath(addr) / "system" / "receptionist"
}.toSet
val clusterClient = system.actorOf(
ClusterClient.props(
ClusterClientSettings(system)
.withInitialContacts(initialContacts)),
"clusterClient")
system.actorOf(WorkerActor.props(clusterClient, Props[WorkExecutor]), "worker")
}
def startupSharedJournal(system: ActorSystem, startStore: Boolean, path: ActorPath): Unit = {
// Start the shared journal one one node (don't crash this SPOF)
// This will not be needed with a distributed journal
if (startStore)
system.actorOf(Props[SharedLeveldbStore], "store")
// register the shared journal
import system.dispatcher
implicit val timeout = Timeout(15.seconds)
val f = system.actorSelection(path) ? Identify(None)
f.onSuccess {
case ActorIdentity(_, Some(ref)) => SharedLeveldbJournal.setStore(ref, system)
case _ =>
system.log.error("Shared journal not started at {}", path)
system.terminate()
}
f.onFailure {
case _ =>
system.log.error("Lookup of shared journal at {} timed out", path)
system.terminate()
}
}
} | botekchristophe/akka-http-cluster-remote | src/main/scala/com/inocybe/pfm/template/Boot.scala | Scala | mit | 4,143 |
package apdl.parser
import apdl.ApdlParserException
import cats.implicits._
import scala.Function.tupled
import scala.util.matching.Regex
class MainParsers extends DefineParsers {
override protected val whiteSpace: Regex = "[ \t\r\f\n]+".r
override def skipWhitespace: Boolean = true
val ws: Regex = whiteSpace
def program: Parser[ApdlProject] = {
def process(xs: List[Object]): ApdlProject = {
val projectName: String = xs.find(_.isInstanceOf[String]) match {
case Some(value) => value.asInstanceOf[String]
case None => throw new ApdlParserException("No project name specifying")
}
val devices: List[ApdlDevice] = xs.filter(_.isInstanceOf[ApdlDevice]).map(_.asInstanceOf[ApdlDevice])
val defineInputs: List[ApdlDefineInput] = xs
.filter(_.isInstanceOf[ApdlDefineInput])
.map(_.asInstanceOf[ApdlDefineInput])
val defineComponents: List[ApdlDefineComponent] = xs
.filter(_.isInstanceOf[ApdlDefineComponent])
.map(_.asInstanceOf[ApdlDefineComponent])
val defineTransforms: List[ApdlDefineTransform] = xs
.filter(_.isInstanceOf[ApdlDefineTransform])
.map(_.asInstanceOf[ApdlDefineTransform])
ApdlProject(projectName, devices, defineInputs, defineComponents, defineTransforms)
}
rep1(projectName | apdlDevice | apdlDefine) ^^ {
xs =>
process(xs)
}
}
def projectName: Parser[String] = "project_name" ~ "=" ~ "\"" ~> literalString <~ "\"" ^^ { str => str }
def keyValue: Parser[KeyValue[String, String]] = {
identifier ~ ("=" ~ "\"" ~> literalString <~ "\"") ^^ { case (k ~ v) => KeyValue(k, v) } |
identifier ~ "=" ~ identifier ^^ { case (k ~ _ ~ v) => KeyValue(k, v) }
}
def apdlInput: Parser[ApdlInput] = "@input" ~> identifier ~ identifier ~ apdlParameters ^^ {
case (name ~ typ ~ params) => ApdlInput(name, typ, params)
}
def apdlParameters: Parser[List[String]] = rep(apdlParameter)
def apdlParameter: Parser[String] = "[^ \t\f\n\r{}@]+".r ^^ { str => str }
def apdlSerial: Parser[ApdlSerial] = "@serial" ~> identifier ~ apdlSampling ^^ {
case (ident ~ sampling) => ApdlSerial(ident, sampling)
}
def apdlSampling: Parser[ApdlSampling] = apdlSamplingUpdate | apdlSamplingTimer
def apdlSamplingUpdate: Parser[ApdlSamplingUpdate.type] = "update" ^^ { _ => ApdlSamplingUpdate }
def apdlSamplingTimer: Parser[ApdlSamplingTimer] = "each" ~> "[0-9]+".r ~ timeUnit ^^ { case (value ~ tu) => ApdlSamplingTimer(value.toInt, tu) }
def timeUnit: Parser[ApdlTimeUnit] = {
"ns" ^^ { _ => ApdlTimeUnit.ns } |
"ms" ^^ { _ => ApdlTimeUnit.ms } |
"s" ^^ { _ => ApdlTimeUnit.s } |
"m" ^^ { _ => ApdlTimeUnit.m } |
"h" ^^ { _ => ApdlTimeUnit.h } |
"d" ^^ { _ => ApdlTimeUnit.d }
}
def apdlDevice: Parser[ApdlDevice] = {
def process(ident: String, xs: List[Object]): ApdlDevice = {
val inputs = xs.filter(_.isInstanceOf[ApdlInput]).map(_.asInstanceOf[ApdlInput])
val serials = xs.filter(_.isInstanceOf[ApdlSerial]).map(_.asInstanceOf[ApdlSerial])
val keyValues = xs.filter(_.isInstanceOf[KeyValue[String, String]]).map(_.asInstanceOf[KeyValue[String, String]])
val framework = keyValues.find(kv => kv.key === "framework").getOrElse(throw new ApdlParserException(s"No framework specify for $ident")).value
val id = keyValues.find(kv => kv.key === "id").getOrElse(throw new ApdlParserException(s"No id specify for $ident")).value
val port = keyValues.find(kv => kv.key === "port").getOrElse(throw new ApdlParserException(s"No port specify for $ident")).value
val parameters = keyValues.filter(kv => kv.key =!= "id" && kv.key =!= "framework" && kv.key =!= "port").toKVMap
ApdlDevice(ident, id, framework, port, inputs, serials, parameters)
}
"@device" ~> identifier ~ (lb ~> rep1(keyValue | apdlInput | apdlSerial) <~ rb) ^^ { case (ident ~ xs) => process(ident, xs) }
}
case class KeyValue[K, V](key: K, value: V)
implicit class KeyValues[K,V](keyValues: Seq[KeyValue[K,V]]) {
def toKVMap : Map[K,V] = keyValues.map(kv => kv.key -> kv.value).toMap
}
}
case class ApdlProject(name: String,
devices: List[ApdlDevice],
defineInputs: List[ApdlDefineInput],
defineComponents: List[ApdlDefineComponent],
defineTransforms: List[ApdlDefineTransform])
case class ApdlDevice(name: String,
id: String,
framework: String,
port: String,
inputs: List[ApdlInput],
serials: List[ApdlSerial],
additionalParameters: Map[String, String])
case class ApdlInput(identifier: String, defineInputIdentifier: String, args: List[String])
case class ApdlSerial(inputName: String, sampling: ApdlSampling)
sealed trait ApdlSampling
case object ApdlSamplingUpdate extends ApdlSampling
case class ApdlSamplingTimer(value: Int, timeUnit: ApdlTimeUnit) extends ApdlSampling {
def ms: Int = timeUnit match {
case ApdlTimeUnit.ns => value / 1000
case ApdlTimeUnit.ms => value
case ApdlTimeUnit.s => value * 1000
case ApdlTimeUnit.m => value * 1000 * 60
case ApdlTimeUnit.h => value * 1000 * 60 * 60
case ApdlTimeUnit.d => value * 1000 * 60 * 60 * 24
}
def s: Int = timeUnit match {
case ApdlTimeUnit.ns => value / 1000000
case ApdlTimeUnit.ms => value / 1000
case ApdlTimeUnit.s => value
case ApdlTimeUnit.m => value * 60
case ApdlTimeUnit.h => value * 60 * 60
case ApdlTimeUnit.d => value * 60 * 60 * 24
}
}
sealed trait ApdlTimeUnit
object ApdlTimeUnit {
case object ns extends ApdlTimeUnit
case object ms extends ApdlTimeUnit
case object s extends ApdlTimeUnit
case object m extends ApdlTimeUnit
case object h extends ApdlTimeUnit
case object d extends ApdlTimeUnit
def values: Seq[ApdlTimeUnit] = Seq(ns, ms, s, m, h, d)
}
| SnipyJulmy/APDL | src/main/scala/apdl/parser/MainParsers.scala | Scala | lgpl-3.0 | 6,000 |
// ticket #3432
object Test {
trait B[@specialized(Int) T] {
def value: T
}
class A[@specialized(Int) T](x: T) {
def foo: B[T] = new B[T] {
def value = x
}
}
def main(args: Array[String]) {
println((new A("abc")).foo.value)
println((new A(10)).foo.value)
// before fixing SI-7343, this was printing 3. Now it's printing 2,
// since the anonymous class created by doing new B[T] { ... } when
// T = Int is now rewired to B$mcI$sp instead of just B[Int]
println(runtime.BoxesRunTime.integerBoxCount)
}
}
| felixmulder/scala | test/files/specialized/spec-ame.scala | Scala | bsd-3-clause | 558 |
package com.google.protobuf
import scala.collection.SpecificIterableFactory
abstract class ByteStringCompanionParent
extends SpecificIterableFactory[Byte, ByteString] { self: ByteString.type =>
override def fromSpecific(it: IterableOnce[Byte]): ByteString = {
val builder = newBuilder
builder ++= it
builder.result
}
}
| trueaccord/protobuf-scala-runtime | shared/src/main/scala-2.13+/com/google/protobuf/ByteStringCompanionParent.scala | Scala | apache-2.0 | 341 |
package rip.hansolo.wrapper
import rx.Ctx
/**
* Created by Giymo11 on 14.02.2016.
*/
case class ImplicitOauth(mobile: Boolean, clientId: String, redirectUri: String, scope: Seq[String])
class RedditBase(userAgent: String, oauth: ImplicitOauth)(implicit ctx: Ctx.Owner) {
}
| Giymo11/http4s-hansolo.rip | hansolo/js/src/main/scala/rip/hansolo/wrapper/RedditBase.scala | Scala | mit | 283 |
package org.scanamo
import scala.reflect.runtime.universe._
import software.amazon.awssdk.services.dynamodb.model._
import software.amazon.awssdk.services.dynamodb.model.ScalarAttributeType._
import org.scalacheck._
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
import org.scanamo.generic.auto._
class DynamoFormatTest extends AnyFunSpec with Matchers with ScalaCheckDrivenPropertyChecks {
// Test that an arbitrary DynamoFormat can be written to dynamo, and then read, producing the same result
def testReadWrite[A: DynamoFormat: TypeTag](gen: Gen[A]): Unit = {
val typeLabel = typeTag[A].tpe.toString
it(s"should write and then read a $typeLabel from dynamo") {
val client = LocalDynamoDB.client()
LocalDynamoDB.usingRandomTable(client)("name" -> S) { t =>
final case class Person(name: String, item: A)
val format = DynamoFormat[Person]
forAll(gen) { a: A =>
val person = Person("bob", a)
client.putItem(PutItemRequest.builder.tableName(t).item(format.write(person).toAttributeValue.m).build).get
val resp =
client.getItem(GetItemRequest.builder.tableName(t).key(DynamoObject("name" -> "bob").toJavaMap).build).get
format.read(DynamoObject(resp.item).toDynamoValue) shouldBe Right(person)
}
}
}
}
def testReadWrite[A: DynamoFormat: TypeTag]()(implicit arb: Arbitrary[A]): Unit =
testReadWrite(arb.arbitrary)
testReadWrite[Set[Int]]()
testReadWrite[Set[Long]]()
// Generate limited values for double and big decimal
// see: https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html#HowItWorks.DataTypes.Number
testReadWrite[Set[Double]](Gen.containerOf[Set, Double](Arbitrary.arbLong.arbitrary.map(_.toDouble)))
testReadWrite[Set[BigDecimal]](
Gen.containerOf[Set, BigDecimal](Arbitrary.arbLong.arbitrary.map(BigDecimal(_)))
)
val nonEmptyStringGen: Gen[String] =
Gen.nonEmptyContainerOf[Array, Char](Arbitrary.arbChar.arbitrary).map(arr => new String(arr))
testReadWrite[Set[String]](Gen.containerOf[Set, String](nonEmptyStringGen))
testReadWrite[Option[String]](Gen.option(nonEmptyStringGen))
testReadWrite[Option[Int]]()
testReadWrite[Map[String, Long]](Gen.mapOf[String, Long] {
for {
key <- nonEmptyStringGen
value <- Arbitrary.arbitrary[Long]
} yield key -> value
})
testReadWrite[List[String]](Gen.listOf(nonEmptyStringGen))
testReadWrite[List[Int]](Gen.listOfN(0, Gen.posNum[Int]))
}
| scanamo/scanamo | scanamo/src/test/scala-2.x/org/scanamo/DynamoFormatTest.scala | Scala | apache-2.0 | 2,634 |
package dk.bayes.clustergraph.infer
import dk.bayes.clustergraph.Cluster
/**
* Specifies order of clusters in which messages are sent for a single iteration of Belief Propagation.
*
* @author Daniel Korzekwa
*/
trait MessageOrder {
/**
* Returns order of clusters in which messages are sent for a single iteration of Belief Propagation.
*/
def ordered(clusters:Seq[Cluster]):Seq[Cluster]
} | danielkorzekwa/bayes-scala | src/main/scala/dk/bayes/clustergraph/infer/MessageOrder.scala | Scala | bsd-2-clause | 407 |
/*
* Copyright (c) 2013-2014 Telefónica Investigación y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.servicemanager.configuration
/** Source for run-time, dynamic cluster configuration. */
trait DynamicPropertiesFactory {
/** Properties to configure services and components. */
type ConfigProperties = Map[ConfigurationKeys.Value, String]
/** Generate dynamic configuration properties for a given cluster.
* This is useful for properties that differ from cluster to cluster such as host names.
*
* @param masterName the cluster master node hostname
* @param slaveNames the cluster slave nodes hostnames
* @return the dynamically generated configuration properties
* @see [[es.tid.cosmos.servicemanager.configuration.ConfigurationKeys]]
*/
def forCluster(masterName: String, slaveNames: Seq[String]): ConfigProperties
}
| telefonicaid/fiware-cosmos-platform | service-manager/src/main/scala/es/tid/cosmos/servicemanager/configuration/DynamicPropertiesFactory.scala | Scala | apache-2.0 | 1,436 |
package opennlp.scalabha.ngram
import scala.annotation.tailrec
import scala.collection.generic.CanBuildFrom
import opennlp.scalabha.tag.support.CondCountsTransformer
import opennlp.scalabha.tag.support.CondFreqDist
import opennlp.scalabha.tag.support.MultinomialFreqDist
import opennlp.scalabha.util.CollectionUtils._
import opennlp.scalabha.util.CollectionUtil._
import opennlp.scalabha.util.LogNum
import opennlp.scalabha.util.Pattern.{ :+ }
import opennlp.scalabha.tag.support.PassthroughCondCountsTransformer
class Ngram[T, S](
val n: Int,
val cfd: Seq[Option[T]] => MultinomialFreqDist[Option[T]]) {
/**
* Calculate the probability of the given COMPLETE sequence. This method
* appends start and end symbols to the given sequence before calculating
* the probability.
*
* The given sequence can be any length.
*/
def sentenceProb(sentence: Seq[T]): LogNum = {
liftedSeqProb(Vector.fill(n - 1)(None) ++ sentence.map(Option(_)) :+ None)
}
/**
* Calculate the probability of the given SUB-sequence. This method
* DOES NOT append start or end symbols to the sequence before calculating
* the probability.
*
* The given sequence must be at least N items in length.
*/
def seqProb(seq: Seq[T]): LogNum = {
liftedSeqProb(seq.map(Option(_)))
}
/**
* Calculate the probability of the given SUB-sequence. This method
* DOES NOT append start or end symbols to the sequence before calculating
* the probability, but it DOES allow start or end symbols (None) to be
* included in the given sequence.
*
* The given sequence must be at least N items in length.
*/
def liftedSeqProb(seq: Seq[Option[T]]): LogNum = {
require(seq.length >= n, "seq must have length at least N=%s".format(n))
seq
.sliding(n)
.map { case context :+ word => cfd(context.toIndexedSeq)(word) }
.product
}
/**
* Generate a random complete sequence based on this n-gram model.
*/
def generate(implicit bf: CanBuildFrom[S, T, S]): S = {
val b = bf()
@tailrec def inner(cur: Seq[Option[T]]) {
cfd(cur).sample match {
case None =>
case next =>
b += next.get
inner(cur.drop(1) :+ next)
}
}
inner(Vector.fill(n - 1)(None))
b.result
}
}
object Ngram {
def apply[T](n: Int, cfd: Seq[Option[T]] => MultinomialFreqDist[Option[T]]) = new Ngram[T, Seq[T]](n, cfd)
def unapply[T, S](ngram: Ngram[T, S]) = Some(ngram.n, ngram.cfd)
}
case class NgramTrainer[T](
n: Int,
countsTransformer: CondCountsTransformer[Seq[Option[T]], Option[T]] = PassthroughCondCountsTransformer[Seq[Option[T]], Option[T]]()) {
def apply[S <% Seq[T]](sentences: TraversableOnce[S]): Ngram[T, S] = {
new Ngram[T, S](n, CondFreqDist(
countsTransformer(
sentences
.flatMap { sentence =>
(Seq.fill(n - 1)(None) ++ sentence.map(Option(_)) :+ None)
.sliding(n)
.map { case context :+ word => context -> word }
}
.toIterator
.groupByKey
.mapVals(_.counts))))
}
}
| eponvert/Scalabha | src/main/scala/opennlp/scalabha/ngram/Ngram.scala | Scala | apache-2.0 | 3,107 |
package provingground.interface
import provingground._
import provingground.{FiniteDistribution => FD, ProbabilityDistribution => PD}
import learning._
// import breeze.linalg.{Vector => _, _}
// import breeze.stats.distributions._
// import breeze.plot._
import scala.concurrent._
import scala.util.{Try, Random}
import scala.concurrent.ExecutionContext.Implicits.global
object Sampler {
val rand = new Random
def total[A](x: Vector[(A, Int)]) = (x map (_._2)).sum
def combine[A](x: Map[A, Int], y: Map[A, Int]) = {
val supp = x.keySet union (y.keySet)
(supp map ((a) => (a, x.getOrElse(a, 0) + y.getOrElse(a, 0)))).toMap
}
def collapse[A](ps: Vector[(A, Int)]) =
(ps.groupBy(_._1).view.mapValues ((x) => total(x))).toMap
def combineAll[A](xs: Vector[Map[A, Int]]) = {
collapse((xs map (_.toVector)).flatten)
}
def fromPMF[A](pmf: Vector[Weighted[A]], size: Int): Map[A, Int] =
if ((pmf map (_.weight)).sum > 0) {
val vec = pmf map (_.elem)
val ps = pmf map (_.weight)
getMultinomial(vec, ps, size)
} else Map()
def getBucketSizes[A](xs: Vector[A],
ps: Vector[Double],
sample: Vector[Double]): Map[A, Int] =
xs match {
case Vector() => throw new Exception("Empty bucket")
case Vector(x) => Map(x -> sample.size)
case head +: tail =>
val p = ps.head
val tailSample = sample.filter(_ > p).map((a) => a - p)
getBucketSizes(xs.tail, ps.tail, tailSample) + (head -> (sample.size - tailSample.size))
}
def getMultinomial[A](xs: Vector[A],
ps: Vector[Double],
size: Int): Map[A, Int] =
if (size == 0) Map()
else {
// // val mult = Multinomial(DenseVector(ps.toArray))
// val samp : Map[Int, Int] = mult.sample(size).groupBy(identity).view.mapValues(_.size)
// samp map { case (j, m) => (xs(j), m) }
getBucketSizes(xs, ps, (1 to size).toVector.map((_) => rand.nextDouble()))
}
def toFD[A](sample: Map[A, Int]) = {
val tot = total(sample.toVector)
FiniteDistribution(sample.toVector map {
case (x, n) => Weighted(x, n.toDouble / tot)
})
}
def linear[A](m: Map[A, Int]) =
m.toVector flatMap {
case (a, n) => Vector.fill(n)(a)
}
def grouped[A](vec: Vector[A]) =
vec.groupBy(identity).view.mapValues (_.size).toMap
import ProbabilityDistribution._
import monix.eval._
implicit object MonixBreezeSamples
extends MonixSamples
with TangSamples[Task] {
def sample[A](pd: PD[A], n: Int) = Task(Sampler.sample(pd, n))
}
def reSample[A](samp: Map[A, Int], n: Int): Map[A, Int] = {
val tot = samp.values.sum
if (tot == 0 || tot == n) samp
else {
val xs = samp.keys.toVector
val ps = xs map ((a) => samp(a).toDouble / tot)
getMultinomial(xs, ps, n)
}
}
def binomial(n: Int, p: Double): Int =
(1 to n).map((_) => rand.nextDouble()).filter(_ > p).size
def sample[A](pd: ProbabilityDistribution[A], n: Int): Map[A, Int] =
if (n < 1) Map()
else
pd match {
case FiniteDistribution(pmf) => fromPMF(pmf, n)
case mx: Mixin[u] =>
val m: Int = binomial(n, mx.q)
combine(sample(mx.first, n - m), sample(mx.second, m))
case mx: MixinOpt[u] =>
val m: Int = binomial(n, mx.q)
val optSample: Map[Option[u], Int] =
Try(sample(mx.second, m)).getOrElse(Map(None -> 1))
val secondPreSample = for ((xo, n) <- optSample; x <- xo)
yield (x, n)
val secondSample = reSample(secondPreSample, m)
combine(sample(mx.first, n - total(secondSample.toVector)),
secondSample)
case mx: Mixture[u] =>
val sampSizes = getMultinomial(mx.dists, mx.ps, n)
val polySamp = (for ((d, m) <- sampSizes) yield sample(d, m))
combineAll(polySamp.toVector)
case Mapped(base, f) =>
collapse((sample(base, n) map { case (x, n) => (f(x), n) }).toVector)
case FlatMapped(base, f) =>
val baseSamp = sample(base, n)
val sampsVec =
(for ((a, m) <- baseSamp) yield sample(f(a), m)).toVector
combineAll(sampsVec)
case Product(first, second) =>
val firstSamp = sample(first, n)
val secondSamp = sample(second, n)
grouped(rand.shuffle(linear(firstSamp)).zip(linear(secondSamp)))
case fp: FiberProduct[u, q, v] =>
import fp._
val baseSamp = sample(base, n)
val groups = baseSamp groupBy { case (x, n) => quotient(x) }
val sampVec = groups.keys.toVector.flatMap { (a) =>
val size = groups(a).values.sum
val fiberSamp = sample(fibers(a), size)
rand.shuffle(linear(groups(a))).zip(linear(fiberSamp))
}
grouped(sampVec)
case Conditioned(base, p) =>
val firstSamp = sample(base, n).view.filterKeys (p)
val tot = firstSamp.values.sum
if (tot == 0) Map()
else if (tot == n) firstSamp.toMap
else {
val xs = firstSamp.keys.toVector
val ps = xs map ((a) => firstSamp(a).toDouble / tot)
getMultinomial(xs, ps, n)
}
case Flattened(base) =>
val optSamp = sample(base, n)
val firstSamp =
for {
(optx, p) <- optSamp
x <- optx
} yield (x -> p)
val tot = firstSamp.values.sum
if (tot == 0) Map()
else if (tot == n) firstSamp
else {
val xs = firstSamp.keys.toVector
val ps = xs map ((a) => firstSamp(a).toDouble / tot)
getMultinomial(xs, ps, n)
}
case CondMapped(base, f) =>
val optSamp = sample(base, n) map { case (x, n) => (f(x), n) }
val firstSamp =
for {
(optx, p) <- optSamp
x <- optx
} yield (x -> p)
val tot = firstSamp.values.sum
if (tot == 0) Map()
else if (tot == n) firstSamp
else {
val xs = firstSamp.keys.toVector
val ps = xs map ((a) => firstSamp(a).toDouble / tot)
getMultinomial(xs, ps, n)
}
case Scaled(base, sc) => sample(base, (n * sc).toInt)
case Sum(first, second) => combine(sample(first, n), sample(second, n))
}
}
import HoTT._
object TermSampler {
import Sampler._
// lazy val fig = Figure("Term Sample")
//
// lazy val entPlot = fig.subplot(0)
//
// import java.awt.Color
//
// def plotEntsThms(thms: ThmEntropies) = {
// val X = DenseVector((thms.entropyPairs map (_._2._1)).toArray)
// val Y = DenseVector((thms.entropyPairs map (_._2._2)).toArray)
// val names = (n: Int) => thms.entropyPairs(n)._1.toString
// val colours = (n: Int) => if (X(n) < Y(n)) Color.RED else Color.BLUE
// entPlot += scatter(X, Y, (_) => 0.1, colors = colours, tips = names)
// entPlot.xlabel = "statement entropy"
// entPlot.ylabel = "proof entropy"
// }
//
// def plotEnts(sample: Map[Term, Int]) = plotEntsThms(thmEntropies(sample))
//
// def plotEnts(fd: FiniteDistribution[Term]) = plotEntsThms(ThmEntropies(fd))
def thmEntropies(sample: Map[Term, Int]) = ThmEntropies(toFD(sample))
def thmEntropies(sample: Map[Term, Int], d: BasicDeducer) =
ThmEntropies(toFD(sample), d.vars, d.lambdaWeight)
}
class TermSampler(d: BasicDeducer) {
import Sampler._
def flow(sampleSize: Int,
derSampleSize: Int,
epsilon: Double,
sc: Double,
inertia: Double): FD[Term] => FD[Term] =
(p: FD[Term]) =>
NextSample(p, sampleSize, derSampleSize, sc, epsilon, inertia)
.shiftedFD(derSampleSize, epsilon)
def iterator(init: FD[Term],
sampleSize: Int,
derSampleSize: Int,
epsilon: Double,
sc: Double,
inertia: Double) =
Iterator.iterate(init)(
flow(sampleSize, derSampleSize, epsilon, sc, inertia))
def loggedIterator(init: FD[Term],
sampleSize: Int,
derSampleSize: Int,
epsilon: Double,
sc: Double,
inertia: Double) =
Iterator.iterate(
NextSample(init, sampleSize, derSampleSize, sc, epsilon, inertia))((ns) =>
ns.succ)
var live: Boolean = true
def stop() = { live = false }
def loggedBuffer(init: FD[Term],
sampleSize: Int,
derSampleSize: Int,
epsilon: Double,
sc: Double,
inertia: Double) = {
val it = loggedIterator(init,
sampleSize: scala.Int,
derSampleSize: scala.Int,
epsilon: scala.Double,
sc: scala.Double,
inertia: scala.Double).takeWhile((_) => live)
val buf = scala.collection.mutable.ArrayBuffer[NextSample]()
Future {
it.foreach((ns) => buf.append(ns))
}
buf
}
case class NextSample(p: FD[Term],
size: Int,
derTotalSize: Int,
sc: Double,
epsilon: Double,
inertia: Double) {
lazy val init = d.hFunc(sc)(p)
lazy val nextSamp = sample(init, size)
lazy val nextFD = toFD(nextSamp) * (1.0 - inertia) ++ (p * inertia)
// def plotEntropies = plotEnts(nextFD)
lazy val thmEntropies = ThmEntropies(nextFD, d.vars, d.lambdaWeight)
def derivativePD(tang: PD[Term]): PD[Term] = d.hDerFunc(sc)(nextFD)(tang)
/**
* Sample sizes for tangents at atomic vectors
*/
lazy val derSamplesSizes = sample(nextFD, derTotalSize)
/**
* Finite distributions as derivatives at nextFD of the atomic tangent vectors with chosen sample sizes.
*/
lazy val derFDs =
derSamplesSizes map {
case (x, n) =>
val tang = FD.unif(x) //tangent vecror, atom at `x`
val dPD =
d.hDerFunc(sc)(nextFD)(tang) //recursive distribution based on derivative for sampling
val samp = sample(dPD, n)
x -> toFD(samp)
}
lazy val feedBacks =
derFDs map {
case (x, tfd) =>
x -> thmEntropies.feedbackTermDist(tfd)
}
def derivativeFD(p: PD[Term], n: Int) = toFD(sample(derivativePD(p), n))
def vecFlow(vec: PD[Term], n: Int) =
thmEntropies.feedbackTermDist(derivativeFD(p, n))
def termFlow(x: Term, n: Int) = vecFlow(FD.unif(x), n)
def totalFlow(totalSize: Int): Map[Term, Double] =
(sample(nextFD, totalSize) map {
case (x, n) =>
val flow = termFlow(x, n)
x -> flow
}).toMap
def shiftedFD(totalSize: Int, epsilon: Double) = {
val tf = feedBacks // totalFlow(totalSize)
val shift = (x: Term) => tf.getOrElse(x, 0.0)
val pmf =
nextFD.pmf map {
case Weighted(x, p) =>
Weighted(x, p * math.exp(shift(x) * epsilon))
}
FD(pmf).flatten.normalized()
}
lazy val succFD = shiftedFD(derTotalSize, epsilon)
lazy val succ = this.copy(p = succFD)
}
}
| siddhartha-gadgil/ProvingGround | mantle/src/main/scala/provingground/interface/Sampler.scala | Scala | mit | 11,455 |
/*
* PaintIcon.scala
* (Mellite)
*
* Copyright (c) 2012-2022 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Affero General Public License v3+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.mellite.impl.component
import java.awt.{Component, Graphics, Paint}
import javax.swing.Icon
import scala.swing.Graphics2D
class PaintIcon(var paint: Paint, width: Int, height: Int) extends Icon {
def getIconWidth : Int = width
def getIconHeight: Int = height
def paintIcon(c: Component, g: Graphics, x: Int, y: Int): Unit = {
val g2 = g.asInstanceOf[Graphics2D]
g2.setPaint(paint)
g2.fillRect(x, y, width, height)
}
}
| Sciss/Mellite | app/src/main/scala/de/sciss/mellite/impl/component/PaintIcon.scala | Scala | agpl-3.0 | 748 |
package com.abien.xray.roller
import java.util.HashMap
import org.junit._
import Assert._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.junit.JUnitSuite
import org.scalatest.junit.ShouldMatchersForJUnit
class XRayModelTest extends JUnitSuite with MockitoSugar with ShouldMatchersForJUnit{
var cut:XRayModel = _
@Before
def setUp: Unit = {
cut = new XRayModel();
}
@Test
def initWithNullMap = {
cut.init(null)
def xray = cut.getXray
xray should not be (null)
val baseUrl = xray.getBaseUrl
baseUrl should be (XRayModel.URL)
}
@Test
def initWithMap = {
def map = new HashMap()
cut.init(map)
def xray = cut.getXray
xray should not be (null)
val baseUrl = xray.getBaseUrl
baseUrl should be (XRayModel.URL)
}
@Test
def initWithConfiguredMap = {
var map = new HashMap[String,String]()
def customURL = "custom url"
map.put(XRayModel.XRAYURL,customURL)
cut.init(map)
def xray = cut.getXray
xray should not be (null)
val baseUrl = xray.getBaseUrl
baseUrl should be (customURL)
}
@Test
def extractUrl = {
var map = new HashMap[String,String]()
def customURL = "custom url"
map.put(XRayModel.XRAYURL,customURL)
val url = this.cut.extractUrl(map);
url should be (customURL)
}
@Test
def modelName = {
val modelName = cut.getModelName
modelName should be (XRayModel.XRAYMODEL_NAME)
}
}
| abhijitsarkar/legacy | adam-bien/x-ray/x-ray-roller-adapter/src/test/java/com/abien/xray/roller/XRayModelTest.scala | Scala | gpl-3.0 | 1,563 |
package net.fyrie.redis
import org.specs2._
class SetSpec extends mutable.Specification with TestClient {
"sadd" >> {
"should add a non-existent value to the set" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
}
"should not add an existing value to the set" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "foo") === false
}
/*"should fail if the key points to a non-set" ! client { r ⇒
r.sync.lpush("list-1", "foo") === (1)
r.sync.sadd("list-1", "foo") must throwA[RedisErrorException]("ERR Operation against a key holding the wrong kind of value")
}*/
}
"srem" >> {
"should remove a value from the set" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.srem("set-1", "bar") === true
r.sync.srem("set-1", "foo") === true
}
"should not do anything if the value does not exist" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.srem("set-1", "bar") === false
}
/*"should fail if the key points to a non-set" ! client { r ⇒
r.sync.lpush("list-1", "foo") === 1
r.sync.srem("list-1", "foo") must throwA[RedisErrorException]("ERR Operation against a key holding the wrong kind of value")
}*/
}
"spop" >> {
"should pop a random element" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.spop("set-1").parse[String] must beOneOf(Some("foo"), Some("bar"), Some("baz"))
}
"should return nil if the key does not exist" ! client { r ⇒
r.sync.spop("set-1") === (None)
}
}
"smove" >> {
"should move from one set to another" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sadd("set-2", "1") === true
r.sync.sadd("set-2", "2") === true
r.sync.smove("set-1", "set-2", "baz") === true
r.sync.sadd("set-2", "baz") === false
r.sync.sadd("set-1", "baz") === true
}
"should return 0 if the element does not exist in source set" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.smove("set-1", "set-2", "bat") === false
r.sync.smove("set-3", "set-2", "bat") === false
}
/*"should give error if the source or destination key is not a set" ! client { r ⇒
r.sync.lpush("list-1", "foo") === (1)
r.sync.lpush("list-1", "bar") === (2)
r.sync.lpush("list-1", "baz") === (3)
r.sync.sadd("set-1", "foo") === true
r.sync.smove("list-1", "set-1", "bat") must throwA[RedisErrorException]("ERR Operation against a key holding the wrong kind of value")
}*/
}
"scard" >> {
"should return cardinality" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.scard("set-1") === (3)
}
"should return 0 if key does not exist" ! client { r ⇒
r.sync.scard("set-1") === (0)
}
}
"sismember" >> {
"should return true for membership" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sismember("set-1", "foo") === true
}
"should return false for no membership" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sismember("set-1", "fo") === false
}
"should return false if key does not exist" ! client { r ⇒
r.sync.sismember("set-1", "fo") === false
}
}
"sinter" >> {
"should return intersection" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sadd("set-2", "foo") === true
r.sync.sadd("set-2", "bat") === true
r.sync.sadd("set-2", "baz") === true
r.sync.sadd("set-3", "for") === true
r.sync.sadd("set-3", "bat") === true
r.sync.sadd("set-3", "bay") === true
r.sync.sinter(Set("set-1", "set-2")).parse[String] === (Set("foo", "baz"))
r.sync.sinter(Set("set-1", "set-3")).parse[String] === (Set.empty)
}
"should return empty set for non-existing key" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sinter(Set("set-1", "set-4")).parse[String] === (Set.empty)
}
}
"sinterstore" >> {
"should store intersection" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sadd("set-2", "foo") === true
r.sync.sadd("set-2", "bat") === true
r.sync.sadd("set-2", "baz") === true
r.sync.sadd("set-3", "for") === true
r.sync.sadd("set-3", "bat") === true
r.sync.sadd("set-3", "bay") === true
r.sync.sinterstore("set-r", Set("set-1", "set-2")) === (2)
r.sync.scard("set-r") === (2)
r.sync.sinterstore("set-s", Set("set-1", "set-3")) === (0)
r.sync.scard("set-s") === (0)
}
"should return empty set for non-existing key" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sinterstore("set-r", Seq("set-1", "set-4")) === (0)
r.sync.scard("set-r") === (0)
}
}
"sunion" >> {
"should return union" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sadd("set-2", "foo") === true
r.sync.sadd("set-2", "bat") === true
r.sync.sadd("set-2", "baz") === true
r.sync.sadd("set-3", "for") === true
r.sync.sadd("set-3", "bat") === true
r.sync.sadd("set-3", "bay") === true
r.sync.sunion(Set("set-1", "set-2")).parse[String] === (Set("foo", "bar", "baz", "bat"))
r.sync.sunion(Set("set-1", "set-3")).parse[String] === (Set("foo", "bar", "baz", "for", "bat", "bay"))
}
"should return empty set for non-existing key" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sunion(Seq("set-1", "set-2")).parse[String] === (Set("foo", "bar", "baz"))
}
}
"sunionstore" >> {
"should store union" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sadd("set-2", "foo") === true
r.sync.sadd("set-2", "bat") === true
r.sync.sadd("set-2", "baz") === true
r.sync.sadd("set-3", "for") === true
r.sync.sadd("set-3", "bat") === true
r.sync.sadd("set-3", "bay") === true
r.sync.sunionstore("set-r", Set("set-1", "set-2")) === (4)
r.sync.scard("set-r") === (4)
r.sync.sunionstore("set-s", Set("set-1", "set-3")) === (6)
r.sync.scard("set-s") === (6)
}
"should treat non-existing keys as empty sets" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sunionstore("set-r", Set("set-1", "set-4")) === (3)
r.sync.scard("set-r") === (3)
}
}
"sdiff" >> {
"should return diff" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sadd("set-2", "foo") === true
r.sync.sadd("set-2", "bat") === true
r.sync.sadd("set-2", "baz") === true
r.sync.sadd("set-3", "for") === true
r.sync.sadd("set-3", "bat") === true
r.sync.sadd("set-3", "bay") === true
r.sync.sdiff("set-1", Set("set-2", "set-3")).parse[String] === (Set("bar"))
}
"should treat non-existing keys as empty sets" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.sdiff("set-1", Set("set-2")).parse[String] === (Set("foo", "bar", "baz"))
}
}
"smembers" >> {
"should return members of a set" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.smembers("set-1").parse[String] === (Set("foo", "bar", "baz"))
}
"should return None for an empty set" ! client { r ⇒
r.sync.smembers("set-1").parse[String] === (Set.empty)
}
}
"srandmember" >> {
"should return a random member" ! client { r ⇒
r.sync.sadd("set-1", "foo") === true
r.sync.sadd("set-1", "bar") === true
r.sync.sadd("set-1", "baz") === true
r.sync.srandmember("set-1").parse[String] must beOneOf(Some("foo"), Some("bar"), Some("baz"))
}
"should return None for a non-existing key" ! client { r ⇒
r.sync.srandmember("set-1").parse[String] === (None)
}
}
}
| Tjoene/thesis | Case_Programs/fyrie-redis-master/src/test/scala/net/fyrie/redis/SetSpec.scala | Scala | gpl-2.0 | 9,413 |
package example
object Traits {
trait MiscTrait {
val anotherParam: String
}
trait BaseTrait {
val param0: MiscTrait
def showAnotherParam: String = param0.anotherParam
}
} | Alex-At-Home/ammonite-failure-case | src/main/scala/example/Traits.scala | Scala | apache-2.0 | 214 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package scalaguide.forms.scalaforms {
import javax.inject.Inject
import java.net.URL
import play.api.{Configuration, Environment}
import play.api.i18n._
import scalaguide.forms.scalaforms.controllers.routes
import play.api.mvc._
import play.api.test.{WithApplication, _}
import play.api.test._
import org.specs2.mutable.Specification
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import scala.concurrent.{ExecutionContext, Future}
// #form-imports
import play.api.data._
import play.api.data.Forms._
// #form-imports
// #validation-imports
import play.api.data.validation.Constraints._
// #validation-imports
@RunWith(classOf[JUnitRunner])
class ScalaFormsSpec extends Specification with ControllerHelpers {
val messagesApi = new DefaultMessagesApi()
implicit val messages: Messages = messagesApi.preferred(Seq.empty)
"A scala forms" should {
"generate from map" in new WithApplication {
val controller = app.injector.instanceOf[controllers.Application]
val userForm = controller.userForm
//#userForm-generate-map
val anyData = Map("name" -> "bob", "age" -> "21")
val userData = userForm.bind(anyData).get
//#userForm-generate-map
userData.name === "bob"
}
"generate from request" in new WithApplication {
import play.api.libs.json.Json
val controller = app.injector.instanceOf[controllers.Application]
val userForm = controller.userForm
val anyData = Json.parse( """{"name":"bob","age":"21"}""")
implicit val request = FakeRequest().withBody(anyData)
//#userForm-generate-request
val userData = userForm.bindFromRequest.get
//#userForm-generate-request
userData.name === "bob"
}
"get user info from form" in new WithApplication {
val controller = app.injector.instanceOf[controllers.Application]
controller.userFormName === "bob"
controller.userFormVerifyName === "bob"
controller.userFormConstraintsName === "bob"
controller.userFormConstraints2Name === "bob"
controller.userFormConstraintsAdhocName === "bob"
controller.userFormNestedCity === "Shanghai"
controller.userFormRepeatedEmails === List("[email protected]", "[email protected]")
controller.userFormOptionalEmail === None
controller.userFormStaticId === 23
controller.userFormTupleName === "bob"
}
"handling form with errors" in new WithApplication {
val controller = app.injector.instanceOf[controllers.Application]
val userFormConstraints2 = controller.userFormConstraints2
implicit val request = FakeRequest().withFormUrlEncodedBody("name" -> "", "age" -> "25")
//#userForm-constraints-2-with-errors
val boundForm = userFormConstraints2.bind(Map("bob" -> "", "age" -> "25"))
boundForm.hasErrors must beTrue
//#userForm-constraints-2-with-errors
}
"handling binding failure" in new WithApplication {
val controller = app.injector.instanceOf[controllers.Application]
val userForm = controller.userFormConstraints
implicit val request = FakeRequest().withFormUrlEncodedBody("name" -> "", "age" -> "25")
val boundForm = userForm.bindFromRequest
boundForm.hasErrors must beTrue
}
"display global errors user template" in new WithApplication {
val controller = app.injector.instanceOf[controllers.Application]
val userForm = controller.userFormConstraintsAdHoc
implicit val request = FakeRequest().withFormUrlEncodedBody("name" -> "Johnny Utah", "age" -> "25")
val boundForm = userForm.bindFromRequest
boundForm.hasGlobalErrors must beTrue
val html = views.html.user(boundForm)
html.body must contain("Failed form constraints!")
}
"map single values" in new WithApplication {
//#form-single-value
val singleForm = Form(
single(
"email" -> email
)
)
val emailValue = singleForm.bind(Map("email" -> "[email protected]")).get
//#form-single-value
emailValue must beEqualTo("[email protected]")
}
"fill selects with options and set their defaults" in new WithApplication {
val controller = app.injector.instanceOf[controllers.Application]
val boundForm = controller.filledAddressSelectForm
val html = views.html.select(boundForm)
html.body must contain("option value=\"London\" selected")
}
}
}
package models {
case class User(name: String, age: Int)
object User {
def create(user: User): Int = 42
}
}
// We are sneaky and want these classes available without exposing our test package structure.
package views.html {
//#userData-define
case class UserData(name: String, age: Int)
//#userData-define
// #userData-nested
case class AddressData(street: String, city: String)
case class UserAddressData(name: String, address: AddressData)
// #userData-nested
// #userListData
case class UserListData(name: String, emails: List[String])
// #userListData
// #userData-optional
case class UserOptionalData(name: String, email: Option[String])
// #userData-optional
// #userData-custom-datatype
case class UserCustomData(name:String, website: java.net.URL)
// #userData-custom-datatype
//#messages-request
class MessagesRequest[A](request: Request[A], val messages: Messages)
extends WrappedRequest(request) with play.api.i18n.MessagesProvider {
def lang: Lang = messages.lang
}
//#messages-request
}
package views.html.contact {
// #contact-define
case class Contact(firstname: String,
lastname: String,
company: Option[String],
informations: Seq[ContactInformation])
object Contact {
def save(contact: Contact): Int = 99
}
case class ContactInformation(label: String,
email: Option[String],
phones: List[String])
// #contact-define
}
package controllers {
import views.html._
import views.html.contact._
class Application @Inject()(components: ControllerComponents) extends AbstractController(components) with I18nSupport {
//#userForm-define
val userForm = Form(
mapping(
"name" -> text,
"age" -> number
)(UserData.apply)(UserData.unapply)
)
//#userForm-define
def home(id: Int = 0) = Action {
Ok("Welcome!")
}
// #form-render
def index = Action { implicit request =>
Ok(views.html.user(userForm))
}
// #form-render
def userPostHandlingFailure() = Action { implicit request =>
val userForm = userFormConstraints
//#userForm-handling-failure
userForm.bindFromRequest.fold(
formWithErrors => {
// binding failure, you retrieve the form containing errors:
BadRequest(views.html.user(formWithErrors))
},
userData => {
/* binding success, you get the actual value. */
val newUser = models.User(userData.name, userData.age)
val id = models.User.create(newUser)
Redirect(routes.Application.home(id))
}
)
//#userForm-handling-failure
}
// #form-bodyparser
val userPost = Action(parse.form(userForm)) { implicit request =>
val userData = request.body
val newUser = models.User(userData.name, userData.age)
val id = models.User.create(newUser)
Redirect(routes.Application.home(id))
}
// #form-bodyparser
// #form-bodyparser-errors
val userPostWithErrors = Action(parse.form(userForm, onErrors = (formWithErrors: Form[UserData]) => {
implicit val messages = messagesApi.preferred(Seq(Lang.defaultLang))
BadRequest(views.html.user(formWithErrors))
})) { implicit request =>
val userData = request.body
val newUser = models.User(userData.name, userData.age)
val id = models.User.create(newUser)
Redirect(routes.Application.home(id))
}
// #form-bodyparser-errors
def submit = Action { implicit request =>
BadRequest("Not used")
}
val userFormName = {
//#userForm-get
val anyData = Map("name" -> "bob", "age" -> "18")
val user: UserData = userForm.bind(anyData).get
//#userForm-get
//#userForm-filled
val filledForm = userForm.fill(UserData("Bob", 18))
//#userForm-filled
user.name
}
//#addressSelectForm-constraint
val addressSelectForm: Form[AddressData] = Form(
mapping(
"street" -> text,
"city" -> text
)(AddressData.apply)(AddressData.unapply)
)
//#addressSelectForm-constraint
val filledAddressSelectForm = {
//#addressSelectForm-filled
val selectedFormValues = AddressData(street = "Main St", city = "London")
val filledForm = addressSelectForm.fill(selectedFormValues)
//#addressSelectForm-filled
filledForm
}
//#userForm-verify
val userFormVerify = Form(
mapping(
"name" -> text,
"age" -> number,
"accept" -> checked("Please accept the terms and conditions")
)((name, age, _) => UserData(name, age))
((user: UserData) => Some(user.name, user.age, false))
)
//#userForm-verify
val userFormVerifyName = {
val anyData = Map("name" -> "bob", "age" -> "18", "accept" -> "true")
val user: UserData = userFormVerify.bind(anyData).get
user.name
}
//#userForm-constraints
val userFormConstraints = Form(
mapping(
"name" -> text.verifying(nonEmpty),
"age" -> number.verifying(min(0), max(100))
)(UserData.apply)(UserData.unapply)
)
//#userForm-constraints
val userFormConstraintsName = {
val anyData = Map("name" -> "bob", "age" -> "18", "accept" -> "true")
val user: UserData = userFormConstraints.bind(anyData).get
user.name
}
//#userForm-constraints-2
val userFormConstraints2 = Form(
mapping(
"name" -> nonEmptyText,
"age" -> number(min = 0, max = 100)
)(UserData.apply)(UserData.unapply)
)
//#userForm-constraints-2
val userFormConstraints2Name = {
val anyData = Map("name" -> "bob", "age" -> "18", "accept" -> "true")
val user: UserData = userFormConstraints2.bind(anyData).get
user.name
}
//#userForm-constraints-ad-hoc
def validate(name: String, age: Int) = {
name match {
case "bob" if age >= 18 =>
Some(UserData(name, age))
case "admin" =>
Some(UserData(name, age))
case _ =>
None
}
}
val userFormConstraintsAdHoc = Form(
mapping(
"name" -> text,
"age" -> number
)(UserData.apply)(UserData.unapply) verifying("Failed form constraints!", fields => fields match {
case userData => validate(userData.name, userData.age).isDefined
})
)
//#userForm-constraints-ad-hoc
val userFormConstraintsAdhocName = {
val anyData = Map("name" -> "bob", "age" -> "18")
val formData = userFormConstraintsAdHoc.bind(anyData).get
formData.name
}
//#userForm-nested
val userFormNested: Form[UserAddressData] = Form(
mapping(
"name" -> text,
"address" -> mapping(
"street" -> text,
"city" -> text
)(AddressData.apply)(AddressData.unapply)
)(UserAddressData.apply)(UserAddressData.unapply)
)
//#userForm-nested
val userFormNestedCity = {
val anyData = Map("name" -> "[email protected]", "address.street" -> "Century Road.", "address.city" -> "Shanghai")
val user = userFormNested.bind(anyData).get
user.address.city
}
//#userForm-repeated
val userFormRepeated = Form(
mapping(
"name" -> text,
"emails" -> list(email)
)(UserListData.apply)(UserListData.unapply)
)
//#userForm-repeated
val userFormRepeatedEmails = {
val anyData = Map("name" -> "bob", "emails[0]" -> "[email protected]", "emails[1]" -> "[email protected]")
val user = userFormRepeated.bind(anyData).get
user.emails
}
//#userForm-optional
val userFormOptional = Form(
mapping(
"name" -> text,
"email" -> optional(email)
)(UserOptionalData.apply)(UserOptionalData.unapply)
)
//#userForm-optional
val userFormOptionalEmail = {
val anyData = Map("name" -> "bob")
val user = userFormOptional.bind(anyData).get
user.email
}
//#userForm-default
Form(
mapping(
"name" -> default(text, "Bob"),
"age" -> default(number, 18)
)(UserData.apply)(UserData.unapply)
)
//#userForm-default
case class UserStaticData(id: Long, name: String, email: Option[String])
//#userForm-static-value
val userFormStatic = Form(
mapping(
"id" -> ignored(23L),
"name" -> text,
"email" -> optional(email)
)(UserStaticData.apply)(UserStaticData.unapply)
)
//#userForm-static-value
//#userForm-custom-datatype
val userFormCustom = Form(
mapping(
"name" -> text,
"website" -> of[URL]
)(UserCustomData.apply)(UserCustomData.unapply)
)
//#userForm-custom-datatype
//#userForm-custom-formatter
import play.api.data.format.Formatter
import play.api.data.format.Formats._
implicit object UrlFormatter extends Formatter[URL] {
override val format = Some(("format.url", Nil))
override def bind(key: String, data: Map[String, String]) = parsing(new URL(_), "error.url", Nil)(key, data)
override def unbind(key: String, value: URL) = Map(key -> value.toString)
}
//#userForm-custom-formatter
val userFormStaticId = {
val anyData = Map("id" -> "1", "name" -> "bob")
val user = userFormStatic.bind(anyData).get
user.id
}
// #userForm-tuple
val userFormTuple = Form(
tuple(
"name" -> text,
"age" -> number
) // tuples come with built-in apply/unapply
)
// #userForm-tuple
val userFormTupleName = {
// #userForm-tuple-example
val anyData = Map("name" -> "bob", "age" -> "25")
val (name, age) = userFormTuple.bind(anyData).get
// #userForm-tuple-example
name
}
// #contact-form
val contactForm: Form[Contact] = Form(
// Defines a mapping that will handle Contact values
mapping(
"firstname" -> nonEmptyText,
"lastname" -> nonEmptyText,
"company" -> optional(text),
// Defines a repeated mapping
"informations" -> seq(
mapping(
"label" -> nonEmptyText,
"email" -> optional(email),
"phones" -> list(
text verifying pattern("""[0-9.+]+""".r, error="A valid phone number is required")
)
)(ContactInformation.apply)(ContactInformation.unapply)
)
)(Contact.apply)(Contact.unapply)
)
// #contact-form
// #contact-edit
def editContact = Action { implicit request =>
val existingContact = Contact(
"Fake", "Contact", Some("Fake company"), informations = List(
ContactInformation(
"Personal", Some("[email protected]"), List("01.23.45.67.89", "98.76.54.32.10")
),
ContactInformation(
"Professional", Some("[email protected]"), List("01.23.45.67.89")
),
ContactInformation(
"Previous", Some("[email protected]"), List()
)
)
)
Ok(views.html.contact.form(contactForm.fill(existingContact)))
}
// #contact-edit
// #contact-save
def saveContact = Action { implicit request =>
contactForm.bindFromRequest.fold(
formWithErrors => {
BadRequest(views.html.contact.form(formWithErrors))
},
contact => {
val contactId = Contact.save(contact)
Redirect(routes.Application.showContact(contactId)).flashing("success" -> "Contact saved!")
}
)
}
// #contact-save
def showContact(id: Int) = Action {
Ok("Contact id: " + id)
}
}
//#messages-controller
class MessagesController @Inject()(cc: ControllerComponents)
extends AbstractController(cc) with play.api.i18n.I18nSupport {
import play.api.data.Form
import play.api.data.Forms._
val userForm = Form(
mapping(
"name" -> text,
"age" -> number
)(views.html.UserData.apply)(views.html.UserData.unapply)
)
def index = Action { implicit request =>
Ok(views.html.user(userForm))
}
}
//#messages-controller
//#messages-action-transformer
// Exposes a "MessagesAction" to the user while hiding the underpinnings
abstract class AbstractMessagesController(cc: ControllerComponents)
extends AbstractController(cc) {
private val messagesRequestTransformer = {
new ActionTransformer[Request, MessagesRequest] {
def transform[A](request: Request[A]) = Future.successful {
val messages = cc.messagesApi.preferred(request)
new MessagesRequest(request, messages)
}
override protected def executionContext = cc.executionContext
}
}
def MessagesAction: ActionBuilder[MessagesRequest, AnyContent] = {
cc.actionBuilder.andThen(messagesRequestTransformer)
}
}
//#messages-action-transformer
//#messages-request-controller
// Example form that uses a MessagesRequest, which is also a MessagesProvider
class MessagesRequestController @Inject()(components: ControllerComponents)
extends AbstractMessagesController(components) {
import play.api.data.Form
import play.api.data.Forms._
val userForm = Form(
mapping(
"name" -> text,
"age" -> number
)(views.html.UserData.apply)(views.html.UserData.unapply)
)
def index = MessagesAction { implicit request: MessagesRequest[_] =>
Ok(views.html.messages(userForm))
}
def post() = TODO
}
//#messages-request-controller
}
}
| wsargent/playframework | documentation/manual/working/scalaGuide/main/forms/code/ScalaForms.scala | Scala | apache-2.0 | 17,412 |
package endpoints
package documented
package algebra
import scala.language.higherKinds
/**
* Algebra interface for describing endpoints including documentation
* (such as human readable descriptions of things).
*
* This interface is modeled after [[endpoints.algebra.Endpoints]] but some methods
* take additional parameters carrying the documentation part.
*/
trait Endpoints
extends Requests
with Responses {
/**
* Information carried by an HTTP endpoint
* @tparam A Information carried by the request
* @tparam B Information carried by the response
*/
type Endpoint[A, B]
/**
* HTTP endpoint.
*
* @param request Request
* @param response Response
*/
def endpoint[A, B](request: Request[A], response: Response[B]): Endpoint[A, B]
}
| Krever/endpoints | openapi/openapi/src/main/scala/endpoints/documented/algebra/Endpoints.scala | Scala | mit | 801 |
package controllers
import java.io._
import javax.inject.Inject
import play.api._
import play.api.mvc._
import scala.concurrent.ExecutionContext
/**
* Controller that serves static resources from an external folder.
* It useful in development mode if you want to serve static assets that shouldn't be part of the build process.
*
* Note that this controller IS NOT intended to be used in production mode and can lead to security issues.
* Therefore it is automatically disabled in production mode.
*
* All assets are served with max-age=3600 cache directive.
*
* You can use this controller in any application, just by declaring the appropriate route. For example:
* {{{
* GET /assets/\\uFEFF*file controllers.ExternalAssets.at(path="/home/peter/myplayapp/external", file)
* GET /assets/\\uFEFF*file controllers.ExternalAssets.at(path="C:\\external", file)
* GET /assets/\\uFEFF*file controllers.ExternalAssets.at(path="relativeToYourApp", file)
* }}}
*
*/
class ProxyExternalAssets @Inject()(environment: Environment)(implicit ec: ExecutionContext) extends Controller {
val AbsolutePath = """^(/|[a-zA-Z]:\\\\).*""".r
/**
* Generates an `Action` that serves a static resource from an external folder
*
* @param rootPath the root folder for searching the static resource files such as `"/home/peter/public"`, `C:\\external` or `relativeToYourApp`
* @param file the file part extracted from the URL
*/
def at(rootPath: String, file: String): Action[AnyContent] = Action { request =>
environment.mode match {
case _ => {
val fileToServe = rootPath match {
case AbsolutePath(_) => new File(rootPath, file)
case _ => new File(environment.getFile(rootPath), file)
}
if (fileToServe.exists) {
Ok.sendFile(fileToServe, inline = true).withHeaders(CACHE_CONTROL -> "max-age=3600")
} else {
NotFound
}
}
}
}
} | amollenkopf/dcos-iot-demo | map-webapp/app/controllers/ProxyExternalAssets.scala | Scala | apache-2.0 | 2,020 |
class Foo[T <: java.io.Serializable, U >: java.lang.Cloneable]
object Test {
def main(args: Array[String]): Unit = {
val tParams = classOf[Foo[_, _]].getTypeParameters
tParams.foreach { tp =>
println(tp.getName + " <: " + tp.getBounds.map(_.getTypeName).mkString(", "))
}
}
} | som-snytt/dotty | tests/generic-java-signatures/boundsInterfaces.scala | Scala | apache-2.0 | 297 |
package io.flow.lint
import io.apibuilder.spec.v0.models.Service
import io.flow.build.{BuildType, DownloadCache}
case class Controller() extends io.flow.build.Controller {
override val name = "Linter"
override val command = "lint"
def run(
buildType: BuildType,
downloadCache: DownloadCache,
services: Seq[Service]
) (
implicit ec: scala.concurrent.ExecutionContext
): Unit = {
services.foreach { service =>
print(s"${service.name}...")
Lint(buildType).validate(service) match {
case Nil => println(" Valid!")
case errors => {
errors.size match {
case 1 => println(" 1 error:")
case n => println(s" $n errors:")
}
errors.sorted.foreach { error =>
addError(service.name, error)
println(s" - $error")
}
}
}
}
}
}
| flowcommerce/api-lint | src/main/scala/io/flow/lint/Controller.scala | Scala | mit | 885 |
package dk.gp.hgpc.util
import dk.gp.hgpc.HgpcModel
import breeze.linalg.DenseMatrix
import breeze.linalg.DenseVector
import dk.bayes.dsl.factor.DoubleFactor
import dk.bayes.math.gaussian.canonical.CanonicalGaussian
import dk.bayes.math.gaussian.canonical.DenseCanonicalGaussian
import dk.bayes.math.gaussian.canonical.SparseCanonicalGaussian
import breeze.numerics._
import dk.bayes.math.accuracy.loglik
import dk.bayes.math.accuracy.binaryAcc
object calcHGPCAcc {
/**
* @return (acc,tpr,tnr)
*/
def apply(model: HgpcModel): Tuple3[Double, Double, Double] = {
val hgpcFactorGraph = HgpcFactorGraph(model)
val (calib, iters) = calibrateHgpcFactorGraph(hgpcFactorGraph, maxIter = 10)
apply(hgpcFactorGraph)
}
/**
* @return (acc,tpr,tnr)
*/
def apply(calibratedHgpcFactorGraph: HgpcFactorGraph): Tuple3[Double, Double, Double] = {
val predictedVSActual = calibratedHgpcFactorGraph.taskIds.flatMap { taskId =>
calibratedHgpcFactorGraph.taskYFactorsMap(taskId).map { taskYFactor =>
val outcome1Prob = taskYFactor.calcNewMsgV2()
DenseVector(outcome1Prob, taskYFactor.v2.k)
}
}
val predictedVSActualMatrix = DenseVector.horzcat(predictedVSActual: _*).t
val predicted = predictedVSActualMatrix(::, 0)
val actual = predictedVSActualMatrix(::, 1)
val acc = binaryAcc(predicted, actual)
val tpr = binaryAcc(predicted(actual :== 1d).toDenseVector, actual(actual :== 1d).toDenseVector)
val tnr = binaryAcc(predicted(actual :== 0d).toDenseVector, actual(actual :== 0d).toDenseVector)
(acc, tpr, tnr)
}
} | danielkorzekwa/bayes-scala-gp | src/main/scala/dk/gp/hgpc/util/calcHGPCAcc.scala | Scala | bsd-2-clause | 1,604 |
/*******************************************************************************
* Copyright (C) 2012 Łukasz Szpakowski.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package pl.luckboy.liftchess.engine
/** A class for the castling.
*
* @author Łukasz Szpakowski
*/
final class Castling private(val id: Int, val name: String) extends EnumValue
{
@inline
def unary_~ : Castling =
Castling(id ^ 3)
@inline
def &(castling: Castling): Castling =
Castling(id & castling.id)
@inline
def |(castling: Castling): Castling =
Castling(id | castling.id)
}
/** A class for the castling.
*
* @author Łukasz Szpakowski
*/
object Castling
{
val NoneCastling = new Castling(0, "-")
val KingsideCastling = new Castling(1, "K")
val QueensideCastling = new Castling(2, "Q")
val AllCastling = new Castling(3, "KQ")
private val Values = makeArray(NoneCastling, KingsideCastling, QueensideCastling, AllCastling)
def apply(id: Int): Castling =
Values(id)
def makeArray[T](none: T, k: T, q: T, kq: T)(implicit m: ClassManifest[T]) =
Array(none, k, q, kq)
def values: Set[Castling] =
Values.toSet
}
| luckboy/LiftChess | src/pl/luckboy/liftchess/engine/Castling.scala | Scala | lgpl-3.0 | 1,871 |
package cn.gridx.scala.spray.request.get
import spray.http.{ContentTypes, HttpEntity, HttpResponse, StatusCodes}
import spray.routing.{HttpService, Route}
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Created by tao on 11/29/16.
*/
trait RouteService extends HttpService {
this: Processor =>
def route: Route = routeReceiveGetRequest
def routeReceiveGetRequest = (path( "get" / "request") & get) { ctx =>
// log.info("收到GET请求,等待3秒")
Future { utils.blockingOp() }
val result = "已完成, 返回!"
// log.info("即将返回响应")
ctx.complete(HttpResponse(StatusCodes.OK,
HttpEntity(ContentTypes.`application/json`, result.getBytes())))
}
}
| TaoXiao/Scala | spray/src/main/scala/cn/gridx/scala/spray/request/get/RouteService.scala | Scala | apache-2.0 | 754 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.util.Locale
import scala.collection.JavaConverters._
import scala.language.implicitConversions
import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.sql.catalyst.analysis.{Star, UnresolvedAlias, UnresolvedAttribute, UnresolvedFunction}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.util.usePrettyExpression
import org.apache.spark.sql.execution.aggregate.TypedAggregateExpression
import org.apache.spark.sql.execution.python.{PythonUDF, PythonUdfType}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{NumericType, StructType}
/**
* A set of methods for aggregations on a `DataFrame`, created by [[Dataset#groupBy groupBy]],
* [[Dataset#cube cube]] or [[Dataset#rollup rollup]] (and also `pivot`).
*
* The main method is the `agg` function, which has multiple variants. This class also contains
* some first-order statistics such as `mean`, `sum` for convenience.
*
* @note This class was named `GroupedData` in Spark 1.x.
*
* @since 2.0.0
*/
@InterfaceStability.Stable
class RelationalGroupedDataset protected[sql](
df: DataFrame,
groupingExprs: Seq[Expression],
groupType: RelationalGroupedDataset.GroupType) {
private[this] def toDF(aggExprs: Seq[Expression]): DataFrame = {
val aggregates = if (df.sparkSession.sessionState.conf.dataFrameRetainGroupColumns) {
groupingExprs ++ aggExprs
} else {
aggExprs
}
val aliasedAgg = aggregates.map(alias)
groupType match {
case RelationalGroupedDataset.GroupByType =>
Dataset.ofRows(
df.sparkSession, Aggregate(groupingExprs, aliasedAgg, df.logicalPlan))
case RelationalGroupedDataset.RollupType =>
Dataset.ofRows(
df.sparkSession, Aggregate(Seq(Rollup(groupingExprs)), aliasedAgg, df.logicalPlan))
case RelationalGroupedDataset.CubeType =>
Dataset.ofRows(
df.sparkSession, Aggregate(Seq(Cube(groupingExprs)), aliasedAgg, df.logicalPlan))
case RelationalGroupedDataset.PivotType(pivotCol, values) =>
val aliasedGrps = groupingExprs.map(alias)
Dataset.ofRows(
df.sparkSession, Pivot(aliasedGrps, pivotCol, values, aggExprs, df.logicalPlan))
}
}
// Wrap UnresolvedAttribute with UnresolvedAlias, as when we resolve UnresolvedAttribute, we
// will remove intermediate Alias for ExtractValue chain, and we need to alias it again to
// make it a NamedExpression.
private[this] def alias(expr: Expression): NamedExpression = expr match {
case u: UnresolvedAttribute => UnresolvedAlias(u)
case expr: NamedExpression => expr
case a: AggregateExpression if a.aggregateFunction.isInstanceOf[TypedAggregateExpression] =>
UnresolvedAlias(a, Some(Column.generateAlias))
case expr: Expression => Alias(expr, usePrettyExpression(expr).sql)()
}
private[this] def aggregateNumericColumns(colNames: String*)(f: Expression => AggregateFunction)
: DataFrame = {
val columnExprs = if (colNames.isEmpty) {
// No columns specified. Use all numeric columns.
df.numericColumns
} else {
// Make sure all specified columns are numeric.
colNames.map { colName =>
val namedExpr = df.resolve(colName)
if (!namedExpr.dataType.isInstanceOf[NumericType]) {
throw new AnalysisException(
s""""$colName" is not a numeric column. """ +
"Aggregation function can only be applied on a numeric column.")
}
namedExpr
}
}
toDF(columnExprs.map(expr => f(expr).toAggregateExpression()))
}
private[this] def strToExpr(expr: String): (Expression => Expression) = {
val exprToFunc: (Expression => Expression) = {
(inputExpr: Expression) => expr.toLowerCase(Locale.ROOT) match {
// We special handle a few cases that have alias that are not in function registry.
case "avg" | "average" | "mean" =>
UnresolvedFunction("avg", inputExpr :: Nil, isDistinct = false)
case "stddev" | "std" =>
UnresolvedFunction("stddev", inputExpr :: Nil, isDistinct = false)
// Also special handle count because we need to take care count(*).
case "count" | "size" =>
// Turn count(*) into count(1)
inputExpr match {
case s: Star => Count(Literal(1)).toAggregateExpression()
case _ => Count(inputExpr).toAggregateExpression()
}
case name => UnresolvedFunction(name, inputExpr :: Nil, isDistinct = false)
}
}
(inputExpr: Expression) => exprToFunc(inputExpr)
}
/**
* (Scala-specific) Compute aggregates by specifying the column names and
* aggregate methods. The resulting `DataFrame` will also contain the grouping columns.
*
* The available aggregate methods are `avg`, `max`, `min`, `sum`, `count`.
* {{{
* // Selects the age of the oldest employee and the aggregate expense for each department
* df.groupBy("department").agg(
* "age" -> "max",
* "expense" -> "sum"
* )
* }}}
*
* @since 1.3.0
*/
def agg(aggExpr: (String, String), aggExprs: (String, String)*): DataFrame = {
toDF((aggExpr +: aggExprs).map { case (colName, expr) =>
strToExpr(expr)(df(colName).expr)
})
}
/**
* (Scala-specific) Compute aggregates by specifying a map from column name to
* aggregate methods. The resulting `DataFrame` will also contain the grouping columns.
*
* The available aggregate methods are `avg`, `max`, `min`, `sum`, `count`.
* {{{
* // Selects the age of the oldest employee and the aggregate expense for each department
* df.groupBy("department").agg(Map(
* "age" -> "max",
* "expense" -> "sum"
* ))
* }}}
*
* @since 1.3.0
*/
def agg(exprs: Map[String, String]): DataFrame = {
toDF(exprs.map { case (colName, expr) =>
strToExpr(expr)(df(colName).expr)
}.toSeq)
}
/**
* (Java-specific) Compute aggregates by specifying a map from column name to
* aggregate methods. The resulting `DataFrame` will also contain the grouping columns.
*
* The available aggregate methods are `avg`, `max`, `min`, `sum`, `count`.
* {{{
* // Selects the age of the oldest employee and the aggregate expense for each department
* import com.google.common.collect.ImmutableMap;
* df.groupBy("department").agg(ImmutableMap.of("age", "max", "expense", "sum"));
* }}}
*
* @since 1.3.0
*/
def agg(exprs: java.util.Map[String, String]): DataFrame = {
agg(exprs.asScala.toMap)
}
/**
* Compute aggregates by specifying a series of aggregate columns. Note that this function by
* default retains the grouping columns in its output. To not retain grouping columns, set
* `spark.sql.retainGroupColumns` to false.
*
* The available aggregate methods are defined in [[org.apache.spark.sql.functions]].
*
* {{{
* // Selects the age of the oldest employee and the aggregate expense for each department
*
* // Scala:
* import org.apache.spark.sql.functions._
* df.groupBy("department").agg(max("age"), sum("expense"))
*
* // Java:
* import static org.apache.spark.sql.functions.*;
* df.groupBy("department").agg(max("age"), sum("expense"));
* }}}
*
* Note that before Spark 1.4, the default behavior is to NOT retain grouping columns. To change
* to that behavior, set config variable `spark.sql.retainGroupColumns` to `false`.
* {{{
* // Scala, 1.3.x:
* df.groupBy("department").agg($"department", max("age"), sum("expense"))
*
* // Java, 1.3.x:
* df.groupBy("department").agg(col("department"), max("age"), sum("expense"));
* }}}
*
* @since 1.3.0
*/
@scala.annotation.varargs
def agg(expr: Column, exprs: Column*): DataFrame = {
toDF((expr +: exprs).map {
case typed: TypedColumn[_, _] =>
typed.withInputType(df.exprEnc, df.logicalPlan.output).expr
case c => c.expr
})
}
/**
* Count the number of rows for each group.
* The resulting `DataFrame` will also contain the grouping columns.
*
* @since 1.3.0
*/
def count(): DataFrame = toDF(Seq(Alias(Count(Literal(1)).toAggregateExpression(), "count")()))
/**
* Compute the average value for each numeric columns for each group. This is an alias for `avg`.
* The resulting `DataFrame` will also contain the grouping columns.
* When specified columns are given, only compute the average values for them.
*
* @since 1.3.0
*/
@scala.annotation.varargs
def mean(colNames: String*): DataFrame = {
aggregateNumericColumns(colNames : _*)(Average)
}
/**
* Compute the max value for each numeric columns for each group.
* The resulting `DataFrame` will also contain the grouping columns.
* When specified columns are given, only compute the max values for them.
*
* @since 1.3.0
*/
@scala.annotation.varargs
def max(colNames: String*): DataFrame = {
aggregateNumericColumns(colNames : _*)(Max)
}
/**
* Compute the mean value for each numeric columns for each group.
* The resulting `DataFrame` will also contain the grouping columns.
* When specified columns are given, only compute the mean values for them.
*
* @since 1.3.0
*/
@scala.annotation.varargs
def avg(colNames: String*): DataFrame = {
aggregateNumericColumns(colNames : _*)(Average)
}
/**
* Compute the min value for each numeric column for each group.
* The resulting `DataFrame` will also contain the grouping columns.
* When specified columns are given, only compute the min values for them.
*
* @since 1.3.0
*/
@scala.annotation.varargs
def min(colNames: String*): DataFrame = {
aggregateNumericColumns(colNames : _*)(Min)
}
/**
* Compute the sum for each numeric columns for each group.
* The resulting `DataFrame` will also contain the grouping columns.
* When specified columns are given, only compute the sum for them.
*
* @since 1.3.0
*/
@scala.annotation.varargs
def sum(colNames: String*): DataFrame = {
aggregateNumericColumns(colNames : _*)(Sum)
}
/**
* Pivots a column of the current `DataFrame` and performs the specified aggregation.
*
* There are two versions of `pivot` function: one that requires the caller to specify the list
* of distinct values to pivot on, and one that does not. The latter is more concise but less
* efficient, because Spark needs to first compute the list of distinct values internally.
*
* {{{
* // Compute the sum of earnings for each year by course with each course as a separate column
* df.groupBy("year").pivot("course", Seq("dotNET", "Java")).sum("earnings")
*
* // Or without specifying column values (less efficient)
* df.groupBy("year").pivot("course").sum("earnings")
* }}}
*
* @param pivotColumn Name of the column to pivot.
* @since 1.6.0
*/
def pivot(pivotColumn: String): RelationalGroupedDataset = {
// This is to prevent unintended OOM errors when the number of distinct values is large
val maxValues = df.sparkSession.sessionState.conf.dataFramePivotMaxValues
// Get the distinct values of the column and sort them so its consistent
val values = df.select(pivotColumn)
.distinct()
.sort(pivotColumn) // ensure that the output columns are in a consistent logical order
.rdd
.map(_.get(0))
.take(maxValues + 1)
.toSeq
if (values.length > maxValues) {
throw new AnalysisException(
s"The pivot column $pivotColumn has more than $maxValues distinct values, " +
"this could indicate an error. " +
s"If this was intended, set ${SQLConf.DATAFRAME_PIVOT_MAX_VALUES.key} " +
"to at least the number of distinct values of the pivot column.")
}
pivot(pivotColumn, values)
}
/**
* Pivots a column of the current `DataFrame` and performs the specified aggregation.
* There are two versions of pivot function: one that requires the caller to specify the list
* of distinct values to pivot on, and one that does not. The latter is more concise but less
* efficient, because Spark needs to first compute the list of distinct values internally.
*
* {{{
* // Compute the sum of earnings for each year by course with each course as a separate column
* df.groupBy("year").pivot("course", Seq("dotNET", "Java")).sum("earnings")
*
* // Or without specifying column values (less efficient)
* df.groupBy("year").pivot("course").sum("earnings")
* }}}
*
* @param pivotColumn Name of the column to pivot.
* @param values List of values that will be translated to columns in the output DataFrame.
* @since 1.6.0
*/
def pivot(pivotColumn: String, values: Seq[Any]): RelationalGroupedDataset = {
groupType match {
case RelationalGroupedDataset.GroupByType =>
new RelationalGroupedDataset(
df,
groupingExprs,
RelationalGroupedDataset.PivotType(df.resolve(pivotColumn), values.map(Literal.apply)))
case _: RelationalGroupedDataset.PivotType =>
throw new UnsupportedOperationException("repeated pivots are not supported")
case _ =>
throw new UnsupportedOperationException("pivot is only supported after a groupBy")
}
}
/**
* (Java-specific) Pivots a column of the current `DataFrame` and performs the specified
* aggregation.
*
* There are two versions of pivot function: one that requires the caller to specify the list
* of distinct values to pivot on, and one that does not. The latter is more concise but less
* efficient, because Spark needs to first compute the list of distinct values internally.
*
* {{{
* // Compute the sum of earnings for each year by course with each course as a separate column
* df.groupBy("year").pivot("course", Arrays.<Object>asList("dotNET", "Java")).sum("earnings");
*
* // Or without specifying column values (less efficient)
* df.groupBy("year").pivot("course").sum("earnings");
* }}}
*
* @param pivotColumn Name of the column to pivot.
* @param values List of values that will be translated to columns in the output DataFrame.
* @since 1.6.0
*/
def pivot(pivotColumn: String, values: java.util.List[Any]): RelationalGroupedDataset = {
pivot(pivotColumn, values.asScala)
}
/**
* Applies the given serialized R function `func` to each group of data. For each unique group,
* the function will be passed the group key and an iterator that contains all of the elements in
* the group. The function can return an iterator containing elements of an arbitrary type which
* will be returned as a new `DataFrame`.
*
* This function does not support partial aggregation, and as a result requires shuffling all
* the data in the [[Dataset]]. If an application intends to perform an aggregation over each
* key, it is best to use the reduce function or an
* `org.apache.spark.sql.expressions#Aggregator`.
*
* Internally, the implementation will spill to disk if any given group is too large to fit into
* memory. However, users must take care to avoid materializing the whole iterator for a group
* (for example, by calling `toList`) unless they are sure that this is possible given the memory
* constraints of their cluster.
*
* @since 2.0.0
*/
private[sql] def flatMapGroupsInR(
f: Array[Byte],
packageNames: Array[Byte],
broadcastVars: Array[Broadcast[Object]],
outputSchema: StructType): DataFrame = {
val groupingNamedExpressions = groupingExprs.map(alias)
val groupingCols = groupingNamedExpressions.map(Column(_))
val groupingDataFrame = df.select(groupingCols : _*)
val groupingAttributes = groupingNamedExpressions.map(_.toAttribute)
Dataset.ofRows(
df.sparkSession,
FlatMapGroupsInR(
f,
packageNames,
broadcastVars,
outputSchema,
groupingDataFrame.exprEnc.deserializer,
df.exprEnc.deserializer,
df.exprEnc.schema,
groupingAttributes,
df.logicalPlan.output,
df.logicalPlan))
}
/**
* Applies a grouped vectorized python user-defined function to each group of data.
* The user-defined function defines a transformation: `pandas.DataFrame` -> `pandas.DataFrame`.
* For each group, all elements in the group are passed as a `pandas.DataFrame` and the results
* for all groups are combined into a new [[DataFrame]].
*
* This function does not support partial aggregation, and requires shuffling all the data in
* the [[DataFrame]].
*
* This function uses Apache Arrow as serialization format between Java executors and Python
* workers.
*/
private[sql] def flatMapGroupsInPandas(expr: PythonUDF): DataFrame = {
require(expr.pythonUdfType == PythonUdfType.PANDAS_GROUPED_UDF,
"Must pass a grouped vectorized python udf")
require(expr.dataType.isInstanceOf[StructType],
"The returnType of the vectorized python udf must be a StructType")
val groupingNamedExpressions = groupingExprs.map {
case ne: NamedExpression => ne
case other => Alias(other, other.toString)()
}
val groupingAttributes = groupingNamedExpressions.map(_.toAttribute)
val child = df.logicalPlan
val project = Project(groupingNamedExpressions ++ child.output, child)
val output = expr.dataType.asInstanceOf[StructType].toAttributes
val plan = FlatMapGroupsInPandas(groupingAttributes, expr, output, project)
Dataset.ofRows(df.sparkSession, plan)
}
override def toString: String = {
val builder = new StringBuilder
builder.append("RelationalGroupedDataset: [grouping expressions: [")
val kFields = groupingExprs.map(_.asInstanceOf[NamedExpression]).map {
case f => s"${f.name}: ${f.dataType.simpleString(2)}"
}
builder.append(kFields.take(2).mkString(", "))
if (kFields.length > 2) {
builder.append(" ... " + (kFields.length - 2) + " more field(s)")
}
builder.append(s"], value: ${df.toString}, type: $groupType]").toString()
}
}
private[sql] object RelationalGroupedDataset {
def apply(
df: DataFrame,
groupingExprs: Seq[Expression],
groupType: GroupType): RelationalGroupedDataset = {
new RelationalGroupedDataset(df, groupingExprs, groupType: GroupType)
}
/**
* The Grouping Type
*/
private[sql] trait GroupType {
override def toString: String = getClass.getSimpleName.stripSuffix("$").stripSuffix("Type")
}
/**
* To indicate it's the GroupBy
*/
private[sql] object GroupByType extends GroupType
/**
* To indicate it's the CUBE
*/
private[sql] object CubeType extends GroupType
/**
* To indicate it's the ROLLUP
*/
private[sql] object RollupType extends GroupType
/**
* To indicate it's the PIVOT
*/
private[sql] case class PivotType(pivotCol: Expression, values: Seq[Literal]) extends GroupType
}
| 1haodian/spark | sql/core/src/main/scala/org/apache/spark/sql/RelationalGroupedDataset.scala | Scala | apache-2.0 | 20,131 |
import scala.tools.partest._
object Test extends DirectTest {
def code = ???
def macros_1 = """
import scala.reflect.macros.blackbox.Context
object Impls {
def impl(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
}
object Macros {
//import Impls._
def impl(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
def foo: Unit = macro impl
}
"""
def compileMacros() = {
val classpath = List(sys.props("partest.lib"), sys.props("partest.reflect")) mkString sys.props("path.separator")
compileString(newCompiler("-language:experimental.macros", "-cp", classpath, "-d", testOutput.path))(macros_1)
}
def test_2 = """
object Test extends App {
println(Macros.foo)
}
"""
def compileTest() = {
val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(test_2)
}
def show(): Unit = {
log("Compiling Macros_1...")
if (compileMacros()) {
log("Compiling Test_2...")
if (compileTest()) log("Success!") else log("Failed...")
}
}
}
| martijnhoekstra/scala | test/files/run/t5940.scala | Scala | apache-2.0 | 1,170 |
package com.sksamuel.elastic4s
import com.sksamuel.elastic4s.ElasticDsl._
import com.sksamuel.elastic4s.analyzers.KeywordAnalyzer
import com.sksamuel.elastic4s.mappings.FieldType.StringType
import com.sksamuel.elastic4s.testkit.ElasticSugar
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram
import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms
import org.elasticsearch.search.aggregations.metrics.avg.InternalAvg
import org.elasticsearch.search.aggregations.metrics.cardinality.InternalCardinality
import org.elasticsearch.search.aggregations.metrics.max.InternalMax
import org.elasticsearch.search.aggregations.metrics.min.InternalMin
import org.elasticsearch.search.aggregations.metrics.scripted.InternalScriptedMetric
import org.elasticsearch.search.aggregations.metrics.sum.InternalSum
import org.elasticsearch.search.aggregations.metrics.valuecount.InternalValueCount
import org.scalatest.{FreeSpec, Matchers}
import scala.collection.JavaConverters._
class AggregationsTest extends FreeSpec with Matchers with ElasticSugar {
client.execute {
create index "aggregations" mappings {
mapping("breakingbad") fields (
"job" typed StringType analyzer KeywordAnalyzer
)
}
}.await
client.execute(
bulk(
index into "aggregations/breakingbad" fields("name" -> "walter white", "job" -> "meth kingpin", "age" -> 50, "actor" -> "bryan"),
index into "aggregations/breakingbad" fields("name" -> "hank schrader", "job" -> "dea agent", "age" -> 55, "actor" -> "dean"),
index into "aggregations/breakingbad" fields("name" -> "jesse pinkman", "job" -> "meth sidekick", "age" -> 30),
index into "aggregations/breakingbad" fields("name" -> "gus fring", "job" -> "meth kingpin", "age" -> 60),
index into "aggregations/breakingbad" fields("name" -> "steven gomez", "job" -> "dea agent", "age" -> 50),
index into "aggregations/breakingbad" fields("name" -> "saul goodman", "job" -> "lawyer", "age" -> 55),
index into "aggregations/breakingbad" fields("name" -> "Huell Babineaux", "job" -> "heavy", "age" -> 43, "actor" -> "lavell"),
index into "aggregations/breakingbad" fields("name" -> "mike ehrmantraut", "job" -> "heavy", "age" -> 45),
index into "aggregations/breakingbad" fields("name" -> "lydia rodarte quayle", "job" -> "meth sidekick", "age" -> 40),
index into "aggregations/breakingbad" fields("name" -> "todd alquist", "job" -> "meth sidekick", "age" -> 26)
)
).await
refresh("aggregations")
blockUntilCount(10, "aggregations")
"terms aggregation" - {
"should group by field" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation terms "agg1" field "job"
}
}.await
resp.totalHits shouldBe 10
val agg = resp.aggregations.getAsMap.get("agg1").asInstanceOf[StringTerms]
agg.getBuckets.size shouldBe 5
agg.getBucketByKey("meth kingpin").getDocCount shouldBe 2
agg.getBucketByKey("meth sidekick").getDocCount shouldBe 3
agg.getBucketByKey("dea agent").getDocCount shouldBe 2
agg.getBucketByKey("lawyer").getDocCount shouldBe 1
agg.getBucketByKey("heavy").getDocCount shouldBe 2
}
"should only include matching documents in the query" in {
val resp = client.execute {
// should match 3 documents
search in "aggregations/breakingbad" query prefixQuery("name" -> "s") aggregations {
aggregation terms "agg1" field "job"
}
}.await
resp.totalHits shouldBe 3
val aggs = resp.aggregations.getAsMap.get("agg1").asInstanceOf[StringTerms]
aggs.getBuckets.size shouldBe 2
aggs.getBucketByKey("dea agent").getDocCount shouldBe 2
aggs.getBucketByKey("lawyer").getDocCount shouldBe 1
}
}
"avg aggregation" - {
"should average by field" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation avg "agg1" field "age"
}
}.await
resp.totalHits shouldBe 10
val agg = resp.aggregations.getAsMap.get("agg1").asInstanceOf[InternalAvg]
agg.getValue shouldBe 45.4
}
"should only include matching documents in the query" in {
val resp = client.execute {
// should match 3 documents
search in "aggregations/breakingbad" query prefixQuery("name" -> "g") aggregations {
aggregation avg "agg1" field "age"
}
}.await
resp.totalHits shouldBe 3
val agg = resp.aggregations.getAsMap.get("agg1").asInstanceOf[InternalAvg]
agg.getValue shouldBe 55
}
}
"cardinality aggregation" - {
"should count distinct values" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation cardinality "agg1" field "job"
}
}.await
resp.totalHits shouldBe 10
val aggs = resp.aggregations.getAsMap.get("agg1").asInstanceOf[InternalCardinality]
aggs.getValue shouldBe 5
}
}
"missing aggregation" - {
"should return documents missing a value" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation missing "agg1" field "actor"
}
}.await
resp.totalHits shouldBe 10
val aggs = resp.aggregations.getAsMap.get("agg1").asInstanceOf[InternalMissing]
aggs.getDocCount shouldBe 7
}
}
"max aggregation" - {
"should count max value for field" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation max "agg1" field "age"
}
}.await
resp.totalHits shouldBe 10
val aggs = resp.aggregations.getAsMap.get("agg1").asInstanceOf[InternalMax]
aggs.getValue shouldBe 60
}
}
"min aggregation" - {
"should count min value for field" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation min "agg1" field "age"
}
}.await
resp.totalHits shouldBe 10
val aggs = resp.aggregations.getAsMap.get("agg1").asInstanceOf[InternalMin]
aggs.getValue shouldBe 26
}
}
"sum aggregation" - {
"should sum values for field" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation sum "agg1" field "age"
}
}.await
resp.totalHits shouldBe 10
val aggs = resp.aggregations.getAsMap.get("agg1").asInstanceOf[InternalSum]
aggs.getValue shouldBe 454.0
}
}
"value count aggregation" - {
"should sum values for field" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation count "agg1" field "age"
}
}.await
resp.totalHits shouldBe 10
val aggs = resp.aggregations.getAsMap.get("agg1").asInstanceOf[InternalValueCount]
aggs.getValue shouldBe 10
}
}
"histogram aggregation" - {
"should create histogram by field" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation histogram "h" field "age" interval 10
}
}.await
resp.totalHits shouldBe 10
val buckets = resp.aggregations.get[Histogram]("h").getBuckets.asScala
buckets.size shouldBe 5
buckets.find(_.getKey == 20).get.getDocCount shouldBe 1
buckets.find(_.getKey == 30).get.getDocCount shouldBe 1
buckets.find(_.getKey == 40).get.getDocCount shouldBe 3
buckets.find(_.getKey == 50).get.getDocCount shouldBe 4
buckets.find(_.getKey == 60).get.getDocCount shouldBe 1
}
"should use offset" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation histogram "h" field "age" interval 10 offset 5
}
}.await
resp.totalHits shouldBe 10
val buckets = resp.aggregations.get[Histogram]("h").getBuckets.asScala
buckets.size shouldBe 4
buckets.find(_.getKey == 25).get.getDocCount shouldBe 2
buckets.find(_.getKey == 35).get.getDocCount shouldBe 2
buckets.find(_.getKey == 45).get.getDocCount shouldBe 3
buckets.find(_.getKey == 55).get.getDocCount shouldBe 3
}
"should respect min_doc_count" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation histogram "agg1" field "age" interval 10 minDocCount 2
}
}.await
resp.totalHits shouldBe 10
val buckets = resp.aggregations.get[Histogram]("agg1").getBuckets.asScala
buckets.size shouldBe 2
buckets.find(_.getKey == 40).get.getDocCount shouldBe 3
buckets.find(_.getKey == 50).get.getDocCount shouldBe 4
}
"should respect ordering" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation histogram "agg1" field "age" interval 10 order Histogram.Order.COUNT_DESC
}
}.await
resp.totalHits shouldBe 10
val buckets = resp.aggregations.get[Histogram]("agg1").getBuckets.asScala
buckets.size shouldBe 5
buckets.head.getKeyAsString shouldBe "50"
buckets.tail.head.getKeyAsString shouldBe "40"
}
}
"scripted aggregation" - {
"should compute a word count on field name" in {
val resp = client.execute {
search in "aggregations/breakingbad" aggregations {
aggregation.scriptedMetric("agg1")
.initScript("_agg['wordCount'] = []")
.mapScript("_agg.wordCount.add(doc['name'].values.size())")
.combineScript("wc = 0; for(c in _agg.wordCount) { wc += c }; return wc")
.reduceScript("wc = 0; for(a in _aggs) { wc += a }; return wc")
}
}.await
val agg = resp.aggregations.get[InternalScriptedMetric]("agg1")
agg.aggregation().asInstanceOf[Integer] shouldBe 21
}
}
}
| muuki88/elastic4s | elastic4s-core-tests/src/test/scala/com/sksamuel/elastic4s/AggregationsTest.scala | Scala | apache-2.0 | 10,136 |
object Testo {
trait A {
type Z <: U
class U
class O {
def foo(x: Z) = 1
}
}
class H extends A {
class Z extends U
class I extends O {
override def <caret>foo(x: Z) = 2
}
}
} | ilinum/intellij-scala | testdata/supers/class/ClassAliasDependent.scala | Scala | apache-2.0 | 225 |
package com.nulabinc.backlog.r2b.mapping.collector.actor
import java.util.concurrent.CountDownLatch
import akka.actor.SupervisorStrategy.Restart
import akka.actor.{Actor, ActorRef, OneForOneStrategy, Props}
import akka.routing.SmallestMailboxPool
import com.nulabinc.backlog.migration.common.conf.BacklogConfiguration
import com.nulabinc.backlog.migration.common.dsl.ConsoleDSL
import com.nulabinc.backlog.migration.common.utils.{ConsoleOut, Logging, ProgressBar}
import com.nulabinc.backlog.r2b.mapping.collector.core.{MappingContext, MappingData}
import com.nulabinc.backlog4j.BacklogAPIException
import com.osinka.i18n.Messages
import com.taskadapter.redmineapi.bean.User
import monix.eval.Task
import monix.execution.Scheduler
import scala.concurrent.duration._
/**
* @author
* uchida
*/
private[collector] class IssuesActor(mappingContext: MappingContext)(implicit
s: Scheduler,
consoleDSL: ConsoleDSL[Task]
) extends Actor
with BacklogConfiguration
with Logging {
private[this] val strategy =
OneForOneStrategy(maxNrOfRetries = 5, withinTimeRange = 10 seconds) {
case e: BacklogAPIException if e.getMessage.contains("429") =>
Restart
case e: BacklogAPIException if e.getMessage.contains("Stream closed") =>
Restart
case e =>
ConsoleOut.error("Fatal error: " + e.getMessage)
logger.error(e.getStackTrace.mkString("\\n"))
sys.exit(2)
}
private[this] val limit: Int = exportLimitAtOnce
private[this] val allCount = mappingContext.issueService.countIssues()
private[this] val completion = new CountDownLatch(allCount)
private[this] val console =
(ProgressBar.progress _)(
Messages("common.issues"),
Messages("message.analyzing"),
Messages("message.analyzed")
)
private[this] val issuesInfoProgress =
(ProgressBar.progress _)(
Messages("common.issues_info"),
Messages("message.collecting"),
Messages("message.collected")
)
def receive: Receive = {
case IssuesActor.Do(mappingData: MappingData, allUsers: Seq[User]) =>
val router =
SmallestMailboxPool(akkaMailBoxPool, supervisorStrategy = strategy)
val issueActor = context.actorOf(
router.props(
Props(
new IssueActor(mappingContext.issueService, mappingData, allUsers)
)
)
)
(0 until (allCount, limit))
.foldLeft(Seq.empty[Int]) { (acc, offset) =>
acc concat issueIds(offset)
}
.map(issues)
.foreach(_(issueActor))
completion.await
sender() ! IssuesActor.Done
}
private[this] def issueIds(offset: Int): Seq[Int] = {
val params =
Map(
"offset" -> offset.toString,
"limit" -> limit.toString,
"project_id" -> mappingContext.projectId.value.toString,
"status_id" -> "*",
"subproject_id" -> "!*"
)
val ids =
mappingContext.issueService.allIssues(params).map(_.getId.intValue())
issuesInfoProgress(((offset / limit) + 1), ((allCount / limit) + 1))
ids
}
private[this] def issues(issueId: Int)(issueActor: ActorRef) = {
issueActor ! IssueActor.Do(issueId, completion, allCount, console)
}
}
private[collector] object IssuesActor {
case class Do(mappingData: MappingData, allUsers: Seq[User])
case object Done
}
| nulab/BacklogMigration-Redmine | src/main/scala/com/nulabinc/backlog/r2b/mapping/collector/actor/IssuesActor.scala | Scala | mit | 3,379 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ml.dmlc.mxnetexamples.neuralstyle.end2end
import ml.dmlc.mxnet.Shape
import ml.dmlc.mxnet.Context
import ml.dmlc.mxnet.NDArray
import ml.dmlc.mxnet.Symbol
import ml.dmlc.mxnet.Initializer
import org.slf4j.LoggerFactory
/**
* @author Depeng Liang
*/
object Basic {
class PretrainedInit(prefix: String, params: Map[String, NDArray],
verbose: Boolean = false) extends Initializer {
private val logger = LoggerFactory.getLogger(classOf[PretrainedInit])
private val prefixLen = prefix.length() + 1
private val argParams = params.filter(_._1.startsWith("arg:"))
private val auxParams = params.filter(_._1.startsWith("aux:"))
private val argNames = argParams.keySet.map(_.substring(4))
private val auxNames = auxParams.keySet.map(_.substring(4))
override def initWeight(name: String, arr: NDArray): Unit = {
val key = name.substring(prefixLen)
if (this.argNames.contains(key)) {
if (verbose) logger.info(s"Init $name")
arr.set(this.argParams(s"arg:$key"))
} else if (this.auxNames.contains(key)) {
if (verbose) logger.info(s"Init $name")
arr.set(this.auxParams(s"aux:$key"))
} else {
logger.info(s"Unknown params: $name, init with 0")
arr.set(0f)
}
}
override def initDefault(name: String, arr: NDArray): Unit = {
}
}
def getStyleModule(prefix: String, dShape: Shape,
ctx: Context, params: Map[String, NDArray]): Module = {
val inputShape = Map(s"${prefix}_data" -> dShape)
val (style, content) = ModelVgg19.getVggSymbol(prefix)
val (gram, gScale) = styleGramSymbol(inputShape, style)
val init = new PretrainedInit(prefix, params, true)
new Module(symbol = gram, context = ctx,
dataShapes = Map(s"${prefix}_data" -> dShape),
initializer = init, forTraining = false)
}
def styleGramSymbol(inputShape: Map[String, Shape], style: Symbol): (Symbol, List[Int]) = {
val (_, outputShape, _) = style.inferShape(inputShape)
var gramList = List[Symbol]()
var gradScale = List[Int]()
for (i <- 0 until style.listOutputs().length) {
val shape = outputShape(i)
val x = Symbol.Reshape()()(Map("data" -> style.get(i),
"shape" -> Shape(shape(1), shape(2) * shape(3))))
// use fully connected to quickly do dot(x, x^T)
val gram = Symbol.FullyConnected()()(Map("data" -> x, "weight" -> x,
"no_bias" -> true, "num_hidden" -> shape(1)))
gramList = gramList :+ gram
gradScale = gradScale :+ (shape(1) * shape(2) * shape(3) * shape(1))
}
(Symbol.Group(gramList: _*), gradScale)
}
def getLoss(gram: Symbol, content: Symbol): (Symbol, Symbol) = {
var gramLoss = List[Symbol]()
for (i <- 0 until gram.listOutputs().length) {
val gvar = Symbol.Variable(s"target_gram_$i")
gramLoss = gramLoss :+ Symbol.sum()(Symbol.square()(gvar - gram.get(i))())()
}
val cvar = Symbol.Variable("target_content")
val contentLoss = Symbol.sum()(Symbol.square()(cvar - content)())()
(Symbol.Group(gramLoss: _*), contentLoss)
}
def getContentModule(prefix: String, dShape: Shape,
ctx: Context, params: Map[String, NDArray]): Module = {
val (_, sym) = ModelVgg19.getVggSymbol(prefix, true)
val init = new PretrainedInit(prefix, params)
new Module(symbol = sym, context = ctx,
dataShapes = Map(s"${prefix}_data" -> dShape),
initializer = init, forTraining = false)
}
def getLossModule(prefix: String, dShape: Shape,
ctx: Context, params: Map[String, NDArray]): (Module, List[Int]) = {
val inputShape = Map(s"${prefix}_data" -> dShape)
val (style, content) = ModelVgg19.getVggSymbol(prefix)
val (gram, gScale) = styleGramSymbol(inputShape, style)
val (styleLoss, contentLoss) = getLoss(gram, content)
val sym = Symbol.Group(styleLoss, contentLoss)
val init = new PretrainedInit(prefix, params, true)
val mod = new Module(symbol = sym, context = ctx,
dataShapes = Map(s"${prefix}_data" -> dShape),
initializer = init, forTraining = true,
inputsNeedGrad = true)
(mod, gScale)
}
}
| Mega-DatA-Lab/mxnet | scala-package/examples/src/main/scala/ml/dmlc/mxnetexamples/neuralstyle/end2end/Basic.scala | Scala | apache-2.0 | 5,055 |
/* scala-stm - (c) 2009-2012, Stanford University, PPL */
package scala.concurrent.stm
import org.scalatest.FunSuite
import java.util.concurrent.CountDownLatch
class UnrecordedTxnSuite extends FunSuite {
test("fixed unrecorded txn") {
val z = atomic.unrecorded { implicit txn => "foo" }
assert(z === "foo")
}
test("nested fixed unrecorded txn") {
val x = Ref(0)
val z = atomic { implicit txn =>
x() = 1
atomic.unrecorded { implicit txn => "foo" }
}
assert(z === "foo")
}
test("writing unrecorded txn") {
val x = Ref(0)
val z = atomic.unrecorded { implicit txn =>
x() = 1
"foo"
}
assert(z === "foo")
assert(x.single() === 0)
}
test("nested unrecorded txn") {
val x = Ref(0)
val z = atomic.unrecorded { implicit txn =>
x += 1
atomic.unrecorded { implicit txn =>
x += 1
atomic.unrecorded { implicit txn =>
x += 1
atomic.unrecorded { implicit txn =>
x += 1
atomic.unrecorded { implicit txn =>
x += 1
atomic.unrecorded { implicit txn =>
x += 1
atomic.unrecorded { implicit txn =>
x()
}
}
}
}
}
}
}
assert(z === 6)
assert(x.single() === 0)
}
test("nested new write unrecorded txn") {
val x = Ref(0)
val z = atomic { implicit txn =>
atomic.unrecorded { implicit txn =>
x() = 1
"foo"
}
}
assert(x.single() === 0)
assert(z === "foo")
}
test("nested update unrecorded txn") {
val x = Ref(0)
val z = atomic { implicit txn =>
x() = 1
atomic.unrecorded { implicit txn =>
x() = 2
"foo"
}
}
assert(x.single() === 1)
assert(z === "foo")
}
test("nested preceding unrecorded txn") {
val x = Ref(0)
val z = atomic { implicit txn =>
val z = atomic.unrecorded { implicit txn =>
x() = 2
"foo"
}
x() = 1
z
}
assert(x.single() === 1)
assert(z === "foo")
}
test("read set emptied") {
val b = new CountDownLatch(1)
val e = new CountDownLatch(1)
val x = Ref(0)
new Thread {
override def run(): Unit = {
b.await()
x.single() = 1
e.countDown()
}
}.start()
var tries = 0
val (z1, z2) = atomic { implicit txn =>
tries += 1
val z1 = atomic.unrecorded { implicit txn => x() }
b.countDown()
e.await()
(z1, x())
}
assert(z1 === 0)
assert(z2 === 1)
assert(tries === 1)
}
class TestException extends Exception
test("outerFailure handler") {
val x = Ref(0)
var z: Any = null
intercept[TestException] {
atomic { implicit txn =>
val level = NestingLevel.root
val done = new CountDownLatch(1)
new Thread {
override def run(): Unit = {
level.requestRollback(Txn.UncaughtExceptionCause(new TestException))
done.countDown()
}
}.start()
done.await()
z = atomic.unrecorded({ implicit txn => x() }, { cause => cause })
}
}
assert(z.isInstanceOf[Txn.UncaughtExceptionCause])
}
} | nbronson/scala-stm | src/test/scala/scala/concurrent/stm/UnrecordedTxnSuite.scala | Scala | bsd-3-clause | 3,290 |
package scala.virtualization.lms
package epfl
package test2
import common._
import test1._
import reflect.SourceContext
import java.io.PrintWriter
import org.scalatest._
trait FFT { this: Arith with Trig =>
def omega(k: Int, N: Int): Complex = {
val kth = -2.0 * k * math.Pi / N
Complex(cos(kth), sin(kth))
}
case class Complex(re: Rep[Double], im: Rep[Double]) {
def +(that: Complex) = Complex(this.re + that.re, this.im + that.im)
def -(that: Complex) = Complex(this.re - that.re, this.im - that.im)
def *(that: Complex) = Complex(this.re * that.re - this.im * that.im, this.re * that.im + this.im * that.re)
}
def splitEvenOdd[T](xs: List[T]): (List[T], List[T]) = (xs: @unchecked) match {
case e :: o :: xt =>
val (es, os) = splitEvenOdd(xt)
((e :: es), (o :: os))
case Nil => (Nil, Nil)
// cases?
}
def mergeEvenOdd[T](even: List[T], odd: List[T]): List[T] = ((even, odd): @unchecked) match {
case (Nil, Nil) =>
Nil
case ((e :: es), (o :: os)) =>
e :: (o :: mergeEvenOdd(es, os))
// cases?
}
def fft(xs: List[Complex]): List[Complex] = xs match {
case (x :: Nil) => xs
case _ =>
val N = xs.length // assume it's a power of two
val (even0, odd0) = splitEvenOdd(xs)
val (even1, odd1) = (fft(even0), fft(odd0))
val (even2, odd2) = (even1 zip odd1 zipWithIndex) map {
case ((x, y), k) =>
val z = omega(k, N) * y
(x + z, x - z)
} unzip;
even2 ::: odd2
}
}
trait ArithExpOptFFT extends ArithExpOpt {
override def infix_+(x: Exp[Double], y: Exp[Double])(implicit pos: SourceContext) = (x, y) match {
case (x, Def(Minus(Const(0.0) | Const(-0.0), y))) => infix_-(x, y)
case _ => super.infix_+(x, y)
}
override def infix_-(x: Exp[Double], y: Exp[Double])(implicit pos: SourceContext) = (x, y) match {
case (x, Def(Minus(Const(0.0) | Const(-0.0), y))) => infix_+(x, y)
case _ => super.infix_-(x, y)
}
override def infix_*(x: Exp[Double], y: Exp[Double])(implicit pos: SourceContext) = (x, y) match {
case (x, Const(-1.0)) => infix_-(0.0, x)
case (Const(-1.0), y) => infix_-(0.0, y)
case _ => super.infix_*(x, y)
}
}
trait TrigExpOptFFT extends TrigExpOpt {
override def cos(x: Exp[Double]) = x match {
case Const(x) if { val z = x / math.Pi / 0.5; z != 0 && z == z.toInt } => Const(0.0)
case _ => super.cos(x)
}
}
trait FlatResult extends BaseExp { // just to make dot output nicer
case class Result(x: Any) extends Def[Any]
def result(x: Any): Exp[Any] = toAtom(Result(x))
}
trait ScalaGenFlat extends ScalaGenBase {
import IR._
type Block[+T] = Exp[T]
def getBlockResultFull[T](x: Block[T]): Exp[T] = x
def reifyBlock[T:Manifest](x: =>Exp[T]): Block[T] = x
def traverseBlock[A](block: Block[A]): Unit = {
buildScheduleForResult(block) foreach traverseStm
}
}
class TestFFT extends FileDiffSuite {
val prefix = home + "test-out/epfl/test2-"
def testFFT1 = {
withOutFile(prefix+"fft1") {
val o = new FFT with ArithExp with TrigExpOpt with FlatResult with DisableCSE //with DisableDCE
import o._
val r = fft(List.tabulate(4)(_ => Complex(fresh, fresh)))
println(globalDefs.mkString("\n"))
println(r)
val p = new ExportGraph with DisableDCE { val IR: o.type = o }
p.emitDepGraph(result(r), prefix+"fft1-dot", true)
}
assertFileEqualsCheck(prefix+"fft1")
assertFileEqualsCheck(prefix+"fft1-dot")
}
def testFFT2 = {
withOutFile(prefix+"fft2") {
val o = new FFT with ArithExpOptFFT with TrigExpOptFFT with FlatResult
import o._
case class Result(x: Any) extends Exp[Any]
val r = fft(List.tabulate(4)(_ => Complex(fresh, fresh)))
println(globalDefs.mkString("\n"))
println(r)
val p = new ExportGraph { val IR: o.type = o }
p.emitDepGraph(result(r), prefix+"fft2-dot", true)
}
assertFileEqualsCheck(prefix+"fft2")
assertFileEqualsCheck(prefix+"fft2-dot")
}
def testFFT3 = {
withOutFile(prefix+"fft3") {
class FooBar extends FFT
with ArithExpOptFFT with TrigExpOptFFT with ArraysExp
with CompileScala {
def ffts(input: Rep[Array[Double]], size: Int) = {
val list = List.tabulate(size)(i => Complex(input(2*i), input(2*i+1)))
val r = fft(list)
// make a new array for now - doing in-place update would be better
makeArray(r.flatMap { case Complex(re,im) => List(re,im) })
}
val codegen = new ScalaGenFlat with ScalaGenArith with ScalaGenArrays { val IR: FooBar.this.type = FooBar.this } // TODO: find a better way...
}
val o = new FooBar
import o._
val fft4 = (input: Rep[Array[Double]]) => ffts(input, 4)
codegen.emitSource(fft4, "FFT4", new PrintWriter(System.out))
val fft4c = compile(fft4)
println(fft4c(Array(1.0,0.0, 1.0,0.0, 2.0,0.0, 2.0,0.0, 1.0,0.0, 1.0,0.0, 0.0,0.0, 0.0,0.0)).mkString(","))
}
assertFileEqualsCheck(prefix+"fft3")
}
}
| afernandez90/virtualization-lms-core | test-src/epfl/test2-fft/TestFFT.scala | Scala | bsd-3-clause | 5,125 |
package filodb.prom.downsample
import scala.concurrent.Await
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import akka.actor.{ActorSystem, CoordinatedShutdown}
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, Uri}
import akka.http.scaladsl.model.Uri.Query
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.ActorMaterializer
import com.typesafe.config.ConfigFactory
import com.typesafe.scalalogging.StrictLogging
import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport
import filodb.http.PromCirceSupport
import filodb.prometheus.query.PrometheusModel.SuccessResponse
/**
* Use this tool to validate raw data against downsampled data for gauges.
*
*
* Run as main class with following system properties:
*
* -Dquery-endpoint=https://myFiloDbEndpoint.com
* -Draw-data-promql=jvm_threads{_ns=\\"myApplication\\",measure=\\"daemon\\",__col__=\\"value\\"}[@@@@s]
* -Dflush-interval=12h
* -Dquery-range=6h
*
* raw-data-promql property value should end with ',__col__="value"}[@@@@s]'.
* The lookback window is replaced by validation tool when running the query.
*
*/
object GaugeDownsampleValidator extends App with StrictLogging {
import FailFastCirceSupport._
// DO NOT REMOVE PromCirceSupport import below assuming it is unused - Intellij removes it in auto-imports :( .
// Needed to override Sampl case class Encoder.
import PromCirceSupport._
import io.circe.generic.auto._
case class DownsampleValidation(name: String, rawQuery: String, dsQuery: String)
case class DownsampleLevel(step: Duration, endpoint: String)
val config = ConfigFactory.load()
val rawPromql = config.getString("raw-data-promql")
val filodbHttpEndpoint = config.getString("query-endpoint")
val flushIntervalHours = config.getDuration("flush-interval")
val queryRange = config.getDuration("query-range")
require((rawPromql.endsWith(""",__col__="value"}[@@@@s]""")),
"""Raw Data PromQL should end with ,__col__="value"}[@@@@s]""")
// List of validations to perform
val validations = Seq (
DownsampleValidation("min", s"""min_over_time($rawPromql)""",
s"""min_over_time(${rawPromql.replace("\\"value\\"", "\\"min\\"")})"""),
DownsampleValidation("max", s"""max_over_time($rawPromql)""",
s"""max_over_time(${rawPromql.replace("\\"value\\"", "\\"max\\"")})"""),
DownsampleValidation("sum", s"""sum_over_time($rawPromql)""",
s"""sum_over_time(${rawPromql.replace("\\"value\\"", "\\"sum\\"")})"""),
DownsampleValidation("count", s"""count_over_time($rawPromql)""",
s"""sum_over_time(${rawPromql.replace("\\"value\\"", "\\"count\\"")})""")
)
val now = System.currentTimeMillis()
val endTime = (now - flushIntervalHours.toMillis) / 1000
val lastHourEnd = (endTime / 1.hour.toSeconds) * 1.hour.toSeconds
val startTime = (lastHourEnd - queryRange.toMillis / 1000)
val urlPrefixRaw = s"$filodbHttpEndpoint/promql/prometheus/api"
implicit val as = ActorSystem()
implicit val materializer = ActorMaterializer()
// TODO configure dataset name etc.
val downsampleLevels = Seq (
DownsampleLevel(1.minute, s"$filodbHttpEndpoint/promql/prometheus_ds_1m/api"),
DownsampleLevel(15.minutes, s"$filodbHttpEndpoint/promql/prometheus_ds_15m/api"),
DownsampleLevel(60.minutes, s"$filodbHttpEndpoint/promql/prometheus_ds_1hr/api"))
val params = Map( "start" -> startTime.toString, "end" -> endTime.toString)
// validation loop:
val results = for {
level <- downsampleLevels // for each downsample dataset
validation <- validations // for each validation
} yield {
val step = level.step.toSeconds
// invoke query on downsample dataset
val dsPromQLFull = validation.dsQuery.replace("@@@@", step.toString)
val dsParams = params ++ Map("step" -> step.toString, "query" -> dsPromQLFull)
val dsUrl = Uri(s"${level.endpoint}/v1/query_range").withQuery(Query(dsParams))
val dsRespFut = Http().singleRequest(HttpRequest(uri = dsUrl)).flatMap(Unmarshal(_).to[SuccessResponse])
val dsResp = try {
Some(Await.result(dsRespFut, 10.seconds))
} catch {
case e: Throwable =>
e.printStackTrace()
None
}
// invoke query on raw dataset
val rawPromQLFull = validation.rawQuery.replace("@@@@", step.toString)
val rawParams = params ++ Map("step" -> step.toString, "query" -> rawPromQLFull)
val rawUrl = Uri(s"$urlPrefixRaw/v1/query_range").withQuery(Query(rawParams))
val rawRespFut = Http().singleRequest(HttpRequest(uri = rawUrl)).flatMap(Unmarshal(_).to[SuccessResponse])
val rawResp = try {
Some(Await.result(rawRespFut, 10.seconds))
} catch {
case e: Throwable =>
e.printStackTrace()
None
}
// normalize the results by sorting the range vectors so we can do comparison
val dsNorm = dsResp.get.data.copy(result =
dsResp.get.data.result.sortWith((a, b) => a.metric("instance").compareTo(b.metric("instance")) > 0))
val rawNorm = rawResp.get.data.copy(result =
rawResp.get.data.result.sortWith((a, b) => a.metric("instance").compareTo(b.metric("instance")) > 0))
logger.info(s"Downsampler=${validation.name} step=${step}s validationResult=${dsNorm == rawNorm} " +
s"rawUrl=$rawUrl dsUrl=$dsUrl")
if (dsNorm != rawNorm) {
logger.error(s"Raw results: $rawNorm")
logger.error(s"DS results: $dsNorm")
}
dsNorm == rawNorm
}
CoordinatedShutdown(as).run(CoordinatedShutdown.UnknownReason)
if (results.exists(b => !b)) {
logger.info("Validation had a failure. See logs for details.")
System.exit(10)
}
else
logger.info("Validation was a success")
}
| velvia/FiloDB | http/src/test/scala/filodb/prom/downsample/GaugeDownsampleValidator.scala | Scala | apache-2.0 | 5,727 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database.test
import akka.http.scaladsl.model.Uri
import akka.stream.scaladsl.Source
import akka.stream.{ActorMaterializer, Materializer}
import akka.util.CompactByteString
import common.WskActorSystem
import org.junit.runner.RunWith
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers}
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.database.{AttachmentSupport, InliningConfig}
import org.apache.openwhisk.core.entity.WhiskEntity
import org.apache.openwhisk.core.entity.size._
@RunWith(classOf[JUnitRunner])
class AttachmentSupportTests extends FlatSpec with Matchers with ScalaFutures with WskActorSystem {
behavior of "Attachment inlining"
implicit val materializer: Materializer = ActorMaterializer()
it should "not inline if maxInlineSize set to zero" in {
val inliner = new AttachmentSupportTestMock(InliningConfig(maxInlineSize = 0.KB))
val bs = CompactByteString("hello world")
val bytesOrSource = inliner.inlineOrAttach(Source.single(bs)).futureValue
val uri = inliner.uriOf(bytesOrSource, "foo")
uri shouldBe Uri("test:foo")
}
class AttachmentSupportTestMock(val inliningConfig: InliningConfig) extends AttachmentSupport[WhiskEntity] {
override protected[core] implicit val materializer: Materializer = ActorMaterializer()
override protected def attachmentScheme: String = "test"
override protected def executionContext = actorSystem.dispatcher
override protected[database] def put(d: WhiskEntity)(implicit transid: TransactionId) = ???
}
}
| starpit/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/database/test/AttachmentSupportTests.scala | Scala | apache-2.0 | 2,452 |
package org.jetbrains.plugins.scala
package highlighter
import _root_.org.jetbrains.plugins.scala.lang.psi.api.toplevel.{ScEarlyDefinitions, ScModifierListOwner}
import com.intellij.internal.statistic.UsageTrigger
import com.intellij.lang.annotation.AnnotationHolder
import com.intellij.openapi.editor.colors.TextAttributesKey
import com.intellij.psi._
import com.intellij.psi.stubs.StubElement
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns._
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScConstructor, ScReferenceElement, ScStableCodeReferenceElement}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.{ScParameter, ScTypeParam}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportExpr
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.templates.ScTemplateBody
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.{ScClass, ScObject, ScTrait}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager.ClassCategory
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
import org.jetbrains.plugins.scala.lang.psi.types.{ScFunctionType, ScType}
import org.jetbrains.plugins.scala.lang.psi.{ScalaPsiUtil, ScalaStubBasedElementImpl}
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
import org.jetbrains.plugins.scala.settings.ScalaProjectSettings
/**
* User: Alexander Podkhalyuzin
* Date: 17.07.2008
*/
object AnnotatorHighlighter {
private val JAVA_COLLECTIONS_BASES = List("java.util.Map", "java.util.Collection")
private val SCALA_FACTORY_METHODS_NAMES = Set("make", "apply")
private val SCALA_COLLECTION_MUTABLE_BASE = "_root_.scala.collection.mutable."
private val SCALA_COLLECTION_IMMUTABLE_BASE = "_root_.scala.collection.immutable."
private val SCALA_COLLECTION_GENERIC_BASE = "_root_.scala.collection.generic."
private val SCALA_PREDEFINED_OBJECTS = Set("scala", "scala.Predef")
private val SCALA_PREDEF_IMMUTABLE_BASES = Set("_root_.scala.PredefMap", "_root_.scala.PredefSet", "scalaList",
"scalaNil", "scalaStream", "scalaVector", "scalaSeq")
private def getParentStub(el: StubBasedPsiElement[_ <: StubElement[_ <: PsiElement]]): PsiElement = {
val stub: StubElement[_ <: PsiElement] = el.getStub
if (stub != null) {
stub.getParentStub.getPsi
} else el.getParent
}
private def getParentByStub(x: PsiElement): PsiElement = {
x match {
case el: ScalaStubBasedElementImpl[_] => getParentStub(el)
case _ => x.getContext
}
}
def highlightReferenceElement(refElement: ScReferenceElement, holder: AnnotationHolder) {
def annotateCollectionByType(resolvedType: ScType) {
if (ScalaNamesUtil.isOperatorName(
resolvedType.presentableText.substring(0, resolvedType.presentableText.prefixLength(_ != '.')))) return
val scalaProjectSettings: ScalaProjectSettings = ScalaProjectSettings.getInstance(refElement.getProject)
scalaProjectSettings.getCollectionTypeHighlightingLevel match {
case ScalaProjectSettings.COLLECTION_TYPE_HIGHLIGHTING_NONE => return
case ScalaProjectSettings.COLLECTION_TYPE_HIGHLIGHTING_NOT_QUALIFIED =>
refElement.qualifier match {
case None =>
case _ => return
}
case ScalaProjectSettings.COLLECTION_TYPE_HIGHLIGHTING_ALL =>
}
UsageTrigger.trigger("scala.collection.pack.highlighting")
def conformsByNames(tp: ScType, qn: List[String]): Boolean = {
qn.exists(textName => {
val cachedClass = ScalaPsiManager.instance(refElement.getProject).getCachedClass(textName, refElement.getResolveScope, ClassCategory.TYPE)
if (cachedClass == null) false
else tp.conforms(ScType.designator(cachedClass))
})
}
def simpleAnnotate(annotationText: String, annotationAttributes: TextAttributesKey) {
if (SCALA_FACTORY_METHODS_NAMES.contains(refElement.nameId.getText)) {
return
}
val annotation = holder.createInfoAnnotation(refElement.nameId, annotationText)
annotation.setTextAttributes(annotationAttributes)
}
val text = resolvedType.canonicalText
if (text == null) return
if (text.startsWith(SCALA_COLLECTION_IMMUTABLE_BASE) || SCALA_PREDEF_IMMUTABLE_BASES.contains(text)) {
simpleAnnotate(ScalaBundle.message("scala.immutable.collection"), DefaultHighlighter.IMMUTABLE_COLLECTION)
} else if (text.startsWith(SCALA_COLLECTION_MUTABLE_BASE)) {
simpleAnnotate(ScalaBundle.message("scala.mutable.collection"), DefaultHighlighter.MUTABLE_COLLECTION)
} else if (conformsByNames(resolvedType, JAVA_COLLECTIONS_BASES)) {
simpleAnnotate(ScalaBundle.message("java.collection"), DefaultHighlighter.JAVA_COLLECTION)
} else if (resolvedType.canonicalText.startsWith(SCALA_COLLECTION_GENERIC_BASE) && refElement.isInstanceOf[ScReferenceExpression]) {
refElement.asInstanceOf[ScReferenceExpression].getType(TypingContext.empty).foreach(_ match {
case f@ScFunctionType(returnType, params) => Option(returnType).foreach(a =>
if (a.canonicalText.startsWith(SCALA_COLLECTION_MUTABLE_BASE)) {
simpleAnnotate(ScalaBundle.message("scala.mutable.collection"), DefaultHighlighter.MUTABLE_COLLECTION)
} else if (a.canonicalText.startsWith(SCALA_COLLECTION_IMMUTABLE_BASE)) {
simpleAnnotate(ScalaBundle.message("scala.immutable.collection"), DefaultHighlighter.IMMUTABLE_COLLECTION)
})
case _ =>
})
}
}
def annotateCollection(resolvedClazz: PsiClass) {
annotateCollectionByType(ScType.designator(resolvedClazz))
}
val c = ScalaPsiUtil.getParentOfType(refElement, classOf[ScConstructor])
c match {
case null =>
case c => if (c.getParent.isInstanceOf[ScAnnotationExpr]) return
}
val resolvedElement = refElement.resolve()
if (PsiTreeUtil.getParentOfType(refElement, classOf[ScImportExpr]) == null && resolvedElement.isInstanceOf[PsiClass]) {
annotateCollection(resolvedElement.asInstanceOf[PsiClass])
}
val annotation = holder.createInfoAnnotation(refElement.nameId, null)
resolvedElement match {
case c: PsiClass if ScType.baseTypesQualMap.contains(c.qualifiedName) => //this is td, it's important!
annotation.setTextAttributes(DefaultHighlighter.PREDEF)
case x: ScClass if x.getModifierList.has(ScalaTokenTypes.kABSTRACT) =>
annotation.setTextAttributes(DefaultHighlighter.ABSTRACT_CLASS)
case _: ScTypeParam =>
annotation.setTextAttributes(DefaultHighlighter.TYPEPARAM)
case x: ScTypeAlias =>
x.getOriginalElement match {
case originalElement: ScTypeAliasDefinition =>
originalElement.aliasedType.foreach(annotateCollectionByType(_))
case _ =>
}
annotation.setTextAttributes(DefaultHighlighter.TYPE_ALIAS)
case c: ScClass if referenceIsToCompanionObjectOfClass(refElement) =>
annotation.setTextAttributes(DefaultHighlighter.OBJECT)
case _: ScClass =>
annotation.setTextAttributes(DefaultHighlighter.CLASS)
case _: ScObject =>
annotation.setTextAttributes(DefaultHighlighter.OBJECT)
case _: ScTrait =>
annotation.setTextAttributes(DefaultHighlighter.TRAIT)
case x: PsiClass if x.isInterface =>
annotation.setTextAttributes(DefaultHighlighter.TRAIT)
case x: PsiClass if x.getModifierList != null && x.getModifierList.hasModifierProperty("abstract") =>
annotation.setTextAttributes(DefaultHighlighter.ABSTRACT_CLASS)
case _: PsiClass if refElement.isInstanceOf[ScStableCodeReferenceElement] =>
annotation.setTextAttributes(DefaultHighlighter.CLASS)
case _: PsiClass if refElement.isInstanceOf[ScReferenceExpression] =>
annotation.setTextAttributes(DefaultHighlighter.OBJECT)
case x: ScBindingPattern =>
val parent = x.nameContext
parent match {
case r@(_: ScValue | _: ScVariable) =>
Option(x.containingClass).foreach(a => if (SCALA_PREDEFINED_OBJECTS.contains(a.qualifiedName)) {
x.getType(TypingContext.empty).foreach(annotateCollectionByType(_))
})
getParentByStub(parent) match {
case _: ScTemplateBody | _: ScEarlyDefinitions =>
r match {
case mod: ScModifierListOwner if mod.hasModifierProperty("lazy") =>
annotation.setTextAttributes(DefaultHighlighter.LAZY)
case _: ScValue => annotation.setTextAttributes(DefaultHighlighter.VALUES)
case _: ScVariable => annotation.setTextAttributes(DefaultHighlighter.VARIABLES)
case _ =>
}
case _ =>
r match {
case mod: ScModifierListOwner if mod.hasModifierProperty("lazy") =>
annotation.setTextAttributes(DefaultHighlighter.LOCAL_LAZY)
case _: ScValue => annotation.setTextAttributes(DefaultHighlighter.LOCAL_VALUES)
case _: ScVariable => annotation.setTextAttributes(DefaultHighlighter.LOCAL_VARIABLES)
case _ =>
}
}
case _: ScCaseClause =>
annotation.setTextAttributes(DefaultHighlighter.PATTERN)
case _: ScGenerator | _: ScEnumerator =>
annotation.setTextAttributes(DefaultHighlighter.GENERATOR)
case _ =>
}
case x: PsiField =>
if (!x.hasModifierProperty("final")) annotation.setTextAttributes(DefaultHighlighter.VARIABLES)
else annotation.setTextAttributes(DefaultHighlighter.VALUES)
case x: ScParameter if x.isAnonymousParameter => annotation.setTextAttributes(DefaultHighlighter.ANONYMOUS_PARAMETER)
case x: ScParameter => annotation.setTextAttributes(DefaultHighlighter.PARAMETER)
case x@(_: ScFunctionDefinition | _: ScFunctionDeclaration | _: ScMacroDefinition) =>
if (SCALA_FACTORY_METHODS_NAMES.contains(x.asInstanceOf[PsiMethod].getName) || x.asInstanceOf[PsiMethod].isConstructor) {
val clazz = PsiTreeUtil.getParentOfType(x, classOf[PsiClass])
if (clazz != null) {
annotateCollection(clazz)
}
}
if (x != null) {
val fun = x.asInstanceOf[ScFunction]
val clazz = fun.containingClass
clazz match {
case o: ScObject if o.allSynthetics.contains(fun) =>
annotation.setTextAttributes(DefaultHighlighter.OBJECT_METHOD_CALL)
return
case _ =>
}
getParentByStub(x) match {
case _: ScTemplateBody | _: ScEarlyDefinitions =>
getParentByStub(getParentByStub(getParentByStub(x))) match {
case _: ScClass | _: ScTrait =>
annotation.setTextAttributes(DefaultHighlighter.METHOD_CALL)
case _: ScObject =>
annotation.setTextAttributes(DefaultHighlighter.OBJECT_METHOD_CALL)
case _ =>
}
case _ =>
annotation.setTextAttributes(DefaultHighlighter.LOCAL_METHOD_CALL)
}
}
case x: PsiMethod =>
if (x.isConstructor) {
val clazz: PsiClass = PsiTreeUtil.getParentOfType(x, classOf[PsiClass])
if (clazz != null) annotateCollection(clazz)
}
if (x.getModifierList != null && x.getModifierList.hasModifierProperty("static")) {
annotation.setTextAttributes(DefaultHighlighter.OBJECT_METHOD_CALL)
} else {
annotation.setTextAttributes(DefaultHighlighter.METHOD_CALL)
}
case x => //println("" + x + " " + x.getText)
}
}
def highlightElement(element: PsiElement, holder: AnnotationHolder) {
element match {
case x: ScAnnotation => visitAnnotation(x, holder)
case x: ScParameter => visitParameter(x, holder)
case x: ScCaseClause => visitCaseClause(x, holder)
case x: ScGenerator => visitGenerator(x, holder)
case x: ScEnumerator => visitEnumerator(x, holder)
case x: ScTypeAlias => visitTypeAlias(x, holder)
case _ if element.getNode.getElementType == ScalaTokenTypes.tIDENTIFIER =>
getParentByStub(element) match {
case _: ScNameValuePair =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.ANNOTATION_ATTRIBUTE)
case _: ScTypeParam =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.TYPEPARAM)
case clazz: ScClass =>
if (clazz.getModifierList.has(ScalaTokenTypes.kABSTRACT)) {
val annotation = holder.createInfoAnnotation(clazz.nameId, null)
annotation.setTextAttributes(DefaultHighlighter.ABSTRACT_CLASS)
} else {
val annotation = holder.createInfoAnnotation(clazz.nameId, null)
annotation.setTextAttributes(DefaultHighlighter.CLASS)
}
case _: ScObject =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.OBJECT)
case _: ScTrait =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.TRAIT)
case x: ScBindingPattern =>
x.nameContext match {
case r@(_: ScValue | _: ScVariable) =>
getParentByStub(r) match {
case _: ScTemplateBody | _: ScEarlyDefinitions =>
val annotation = holder.createInfoAnnotation(element, null)
r match {
case mod: ScModifierListOwner if mod.hasModifierProperty("lazy") =>
annotation.setTextAttributes(DefaultHighlighter.LAZY)
case _: ScValue => annotation.setTextAttributes(DefaultHighlighter.VALUES)
case _: ScVariable => annotation.setTextAttributes(DefaultHighlighter.VARIABLES)
case _ =>
}
case _ =>
val annotation = holder.createInfoAnnotation(element, null)
r match {
case mod: ScModifierListOwner if mod.hasModifierProperty("lazy") =>
annotation.setTextAttributes(DefaultHighlighter.LOCAL_LAZY)
case _: ScValue => annotation.setTextAttributes(DefaultHighlighter.LOCAL_VALUES)
case _: ScVariable => annotation.setTextAttributes(DefaultHighlighter.LOCAL_VARIABLES)
case _ =>
}
}
case _: ScCaseClause =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.PATTERN)
case _: ScGenerator | _: ScEnumerator =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.GENERATOR)
case _ =>
}
case _: ScFunctionDefinition | _: ScFunctionDeclaration =>
val annotation = holder.createInfoAnnotation(element, null)
annotation.setTextAttributes(DefaultHighlighter.METHOD_DECLARATION)
case _ =>
}
case _ =>
}
}
private def visitAnnotation(annotation: ScAnnotation, holder: AnnotationHolder): Unit = {
val annotation1 = holder.createInfoAnnotation(annotation.getFirstChild, null)
annotation1.setTextAttributes(DefaultHighlighter.ANNOTATION)
val element = annotation.annotationExpr.constr.typeElement
val annotation2 = holder.createInfoAnnotation(element, null)
annotation2.setTextAttributes(DefaultHighlighter.ANNOTATION)
}
private def visitTypeAlias(typeAlias: ScTypeAlias, holder: AnnotationHolder): Unit = {
val annotation = holder.createInfoAnnotation(typeAlias.nameId, null)
annotation.setTextAttributes(DefaultHighlighter.TYPE_ALIAS)
}
private def visitClass(clazz: ScClass, holder: AnnotationHolder): Unit = {
if (clazz.getModifierList.has(ScalaTokenTypes.kABSTRACT)) {
val annotation = holder.createInfoAnnotation(clazz.nameId, null)
annotation.setTextAttributes(DefaultHighlighter.ABSTRACT_CLASS)
} else {
val annotation = holder.createInfoAnnotation(clazz.nameId, null)
annotation.setTextAttributes(DefaultHighlighter.CLASS)
}
}
private def visitParameter(param: ScParameter, holder: AnnotationHolder): Unit = {
val annotation = holder.createInfoAnnotation(param.nameId, null)
val attributesKey =
if (param.isAnonymousParameter) DefaultHighlighter.ANONYMOUS_PARAMETER
else DefaultHighlighter.PARAMETER
annotation.setTextAttributes(attributesKey)
}
private def visitPattern(pattern: ScPattern, holder: AnnotationHolder, attribute: TextAttributesKey): Unit = {
for (binding <- pattern.bindings if !binding.isWildcard) {
val annotation = holder.createInfoAnnotation(binding.nameId, null)
annotation.setTextAttributes(attribute)
}
}
private def visitCaseClause(clause: ScCaseClause, holder: AnnotationHolder): Unit = {
clause.pattern match {
case Some(x) => visitPattern(x, holder, DefaultHighlighter.PATTERN)
case None =>
}
}
private def visitGenerator(generator: ScGenerator, holder: AnnotationHolder): Unit = {
visitPattern(generator.pattern, holder, DefaultHighlighter.GENERATOR)
}
private def visitEnumerator(enumerator: ScEnumerator, holder: AnnotationHolder): Unit = {
visitPattern(enumerator.pattern, holder, DefaultHighlighter.GENERATOR)
}
private def referenceIsToCompanionObjectOfClass(r: ScReferenceElement): Boolean = {
Option(r.getContext) exists {
case _: ScMethodCall | _: ScReferenceExpression => true // These references to 'Foo' should be 'object' references: case class Foo(a: Int); Foo(1); Foo.apply(1).
case _ => false
}
}
}
| LPTK/intellij-scala | src/org/jetbrains/plugins/scala/highlighter/AnnotatorHighlighter.scala | Scala | apache-2.0 | 18,571 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import org.scalatest.{ Matchers, WordSpec }
import cascading.pipe.Pipe
import cascading.tuple.Fields
import com.twitter.scalding.source._
class SourceSpec extends WordSpec with Matchers {
import Dsl._
"A case class Source" should {
"inherit equality properly from TimePathedSource" in {
implicit val tz = DateOps.UTC
implicit val parser = DateParser.default
val d1 = RichDate("2012-02-01")
val d2 = RichDate("2012-02-02")
val d3 = RichDate("2012-02-03")
val dr1 = DateRange(d1, d2)
val dr2 = DateRange(d2, d3)
val a = DailySuffixTsv("/test")(dr1)
val b = DailySuffixTsv("/test")(dr2)
val c = DailySuffixTsv("/testNew")(dr1)
val d = new DailySuffixTsvSecond("/testNew")(dr1)
val e = DailySuffixTsv("/test")(dr1)
a should not be b
b should not be c
a should not be d
a shouldBe e
}
}
class DailySuffixTsvSecond(prefix: String, fs: Fields = Fields.ALL)(override implicit val dateRange: DateRange)
extends DailySuffixSource(prefix, dateRange) with DelimitedScheme {
override val fields = fs
}
"A Source with overriden transformForRead and transformForWrite" should {
"respect these overrides even for tests" in {
JobTest(new AddRemoveOneJob(_))
.source(AddOneTsv("input"), List((0, "0"), (1, "1")))
.sink[(String, String)](RemoveOneTsv("output")) { buf =>
buf.toSet shouldBe Set(("0", "0"), ("1", "1"))
}
.run
.finish
}
}
}
case class AddOneTsv(p: String) extends FixedPathSource(p)
with DelimitedScheme with Mappable[(Int, String, String)] {
import Dsl._
import TDsl._
override val transformInTest = true
override val sourceFields = new Fields("one", "two", "three")
override def converter[U >: (Int, String, String)] =
TupleConverter.asSuperConverter[(Int, String, String), U](implicitly[TupleConverter[(Int, String, String)]])
override def transformForRead(p: Pipe) = {
p.mapTo((0, 1) -> ('one, 'two, 'three)) {
t: (Int, String) => t :+ "1"
}
}
}
case class RemoveOneTsv(p: String) extends FixedPathSource(p)
with DelimitedScheme with Mappable[(Int, String, String)] {
override val transformInTest = true
import Dsl._
override val sourceFields = new Fields("one", "two", "three")
override def converter[U >: (Int, String, String)] =
TupleConverter.asSuperConverter[(Int, String, String), U](implicitly[TupleConverter[(Int, String, String)]])
override def transformForWrite(p: Pipe) = {
p.mapTo(('one, 'two, 'three) -> (0, 1)) {
t: (Int, String, String) => (t._1, t._2)
}
}
}
class AddRemoveOneJob(args: Args) extends Job(args) {
AddOneTsv("input")
.read
//just for fun lets just switch all 1s with 2s
.map('three -> 'three) { s: String => "2" }
.write(RemoveOneTsv("output"))
}
| sriramkrishnan/scalding | scalding-core/src/test/scala/com/twitter/scalding/SourceSpec.scala | Scala | apache-2.0 | 3,450 |
package au.id.cxd.math.probability.discrete
import scala.math._
import au.id.cxd.math.count.Choose
/**
* ##import MathJax
*
* Created by cd on 7/09/2014.
*
* The Negative Binomial Distribution (class name NegativeBinomial) provides the probability of the $nth$ success or potentially $nth$ failure of a bernoulli trial. The parameters are $r$ representing the $r -1$ initial trials that where the successful and $y$ the total number of trials before the next success $r$ occurs. The distribution is calculated as follows:
* $$
* P(y;r) = {y - 1 \choose r - 1}p^r q^{y-r}
* $$
* where $y = r, r + 1, ...$\\\\
* The simple properties of the distribution are:\\\\
* Mean:$\mu = \frac{r}{p}$\\
* Variance:$\sigma^2 = \frac{r(1-p)}{p^2}$\\
*
* r - the first success
* p the probability
* y - the number of trials when the rth success occurs
*
* y >= r
*
*/
class NegativeBinomial(r: Double, p: Double) extends DiscreteDistribution {
/**
* distribution
*
* $(y-1 C r-1) p^r q^(y-r)$
*
* @param y
* @return
*/
def pdf(y: Double) = Choose(y - 1.0)(r - 1.0) * pow(p, r) * pow(1.0 - p, y - r)
def mean() = r / p
def variance() = r * (1.0 - p) / pow(p, 2.0)
}
object NegativeBinomial {
def apply(r: Double)(p: Double) = new NegativeBinomial(r, p)
}
| cxd/scala-au.id.cxd.math | math/src/main/scala/au/id/cxd/math/probability/discrete/NegativeBinomial.scala | Scala | mit | 1,345 |
/*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.frontend.v2_3.ast.functions
import org.neo4j.cypher.internal.frontend.v2_3.ast.{Function, SimpleTypedFunction}
import org.neo4j.cypher.internal.frontend.v2_3.symbols._
case object Str extends Function with SimpleTypedFunction {
def name = "str"
val signatures = Vector(
Signature(argumentTypes = Vector(CTAny), outputType = CTString)
)
}
| HuangLS/neo4j | community/cypher/frontend-2.3/src/main/scala/org/neo4j/cypher/internal/frontend/v2_3/ast/functions/Str.scala | Scala | apache-2.0 | 1,181 |
package eventstore
package cluster
import java.net.InetSocketAddress
import com.typesafe.config.Config
import eventstore.util.ToCoarsest
import scala.collection.JavaConverters._
import scala.concurrent.duration._
/**
* Contains settings relating to a connection to a cluster.
*
* @param gossipSeedsOrDns Gossip seeds or DNS settings
* @param dnsLookupTimeout The time given to resolve dns
* @param maxDiscoverAttempts Maximum number of attempts for discovering endpoints
* @param discoverAttemptInterval The interval between cluster discovery attempts
* @param discoveryInterval The interval at which to keep discovering cluster
* @param gossipTimeout Timeout for cluster gossip.
*/
case class ClusterSettings(
gossipSeedsOrDns: GossipSeedsOrDns = GossipSeedsOrDns.GossipSeeds("127.0.0.1" :: 2113),
dnsLookupTimeout: FiniteDuration = 2.seconds,
maxDiscoverAttempts: Int = 10,
discoverAttemptInterval: FiniteDuration = 500.millis,
discoveryInterval: FiniteDuration = 1.second,
gossipTimeout: FiniteDuration = 1.second) {
require(maxDiscoverAttempts >= 1, s"maxDiscoverAttempts must be >= 1, but is $maxDiscoverAttempts")
}
object ClusterSettings {
def opt(conf: Config): Option[ClusterSettings] = {
def opt(conf: Config) = {
def option[T](path: String, f: String => T): Option[T] = {
if (conf hasPath path) Option(f(path)) else None
}
def clusterDns = option("dns", conf.getString).map { dns =>
GossipSeedsOrDns(
clusterDns = dns,
externalGossipPort = conf getInt "external-gossip-port")
}
def gossipSeeds = option("gossip-seeds", conf.getStringList).flatMap { ss =>
if (ss.isEmpty) None
else {
val seeds = ss.asScala.map { s =>
s.split(":") match {
case Array(host, port) => new InetSocketAddress(host, port.toInt)
case _ => sys.error(s"Cannot parse address from $s, expected format is host:port")
}
}
Some(GossipSeedsOrDns.GossipSeeds(seeds.toList))
}
}
def duration(path: String) = ToCoarsest(FiniteDuration(conf.getDuration(path, MILLISECONDS), MILLISECONDS))
(clusterDns orElse gossipSeeds).map { gossipSeedsOrDns =>
ClusterSettings(
gossipSeedsOrDns = gossipSeedsOrDns,
dnsLookupTimeout = duration("dns-lookup-timeout"),
maxDiscoverAttempts = conf getInt "max-discover-attempts",
discoverAttemptInterval = duration("discover-attempt-interval"),
discoveryInterval = duration("discovery-interval"),
gossipTimeout = duration("gossip-timeout"))
}
}
opt(conf getConfig "eventstore.cluster")
}
}
sealed trait GossipSeedsOrDns
object GossipSeedsOrDns {
def apply(clusterDns: String): GossipSeedsOrDns = ClusterDns(clusterDns)
def apply(clusterDns: String, externalGossipPort: Int): GossipSeedsOrDns = ClusterDns(clusterDns, externalGossipPort)
def apply(gossipSeeds: InetSocketAddress*): GossipSeedsOrDns = GossipSeeds(gossipSeeds.toList)
/**
* Used if we're discovering via DNS
*
* @param clusterDns The DNS name to use for discovering endpoints.
* @param externalGossipPort The well-known endpoint on which cluster managers are running.
*/
case class ClusterDns(
clusterDns: String = "localhost",
externalGossipPort: Int = 30778) extends GossipSeedsOrDns {
require(0 < externalGossipPort && externalGossipPort < 65536, s"externalGossipPort is not valid :$externalGossipPort")
require(clusterDns != null, "clusterDns must be not null")
require(clusterDns.nonEmpty, "clusterDns must be not empty")
}
/**
* Used if we're connecting with gossip seeds
*
* @param gossipSeeds Endpoints for seeding gossip.
*/
case class GossipSeeds(gossipSeeds: List[InetSocketAddress]) extends GossipSeedsOrDns {
require(gossipSeeds.nonEmpty, s"gossipSeeds must be non empty")
}
object GossipSeeds {
def apply(gossipSeeds: InetSocketAddress*): GossipSeeds = GossipSeeds(gossipSeeds.toList)
}
} | pawelkaczor/EventStore.JVM | src/main/scala/eventstore/cluster/ClusterSettings.scala | Scala | bsd-3-clause | 4,097 |
/*
*
* * Copyright 2020 Lenses.io.
* *
* * Licensed under the Apache License, Version 2.0 (the "License");
* * you may not use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License.
*
*/
package com.datamountaineer.streamreactor.connect.jms.source
import com.datamountaineer.streamreactor.common.utils.JarManifest
import com.datamountaineer.streamreactor.connect.jms.config.{JMSConfig, JMSConfigConstants}
import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.common.config.ConfigDef
import org.apache.kafka.connect.connector.Task
import org.apache.kafka.connect.source.SourceConnector
import org.apache.kafka.connect.util.ConnectorUtils
import java.util
import scala.collection.JavaConverters._
/**
* Created by [email protected] on 10/03/2017.
* stream-reactor
*/
class JMSSourceConnector extends SourceConnector with StrictLogging {
private var configProps: util.Map[String, String] = _
private val configDef = JMSConfig.config
private val manifest = JarManifest(getClass.getProtectionDomain.getCodeSource.getLocation)
override def taskClass(): Class[_ <: Task] = classOf[JMSSourceTask]
def kcqlTaskScaling(maxTasks: Int): util.List[util.Map[String, String]] = {
val raw = configProps.get(JMSConfigConstants.KCQL)
require(raw != null && raw.nonEmpty, s"No ${JMSConfigConstants.KCQL} provided!")
//sql1, sql2
val kcqls = raw.split(";")
val groups = ConnectorUtils.groupPartitions(kcqls.toList.asJava, maxTasks).asScala
//split up the kcql statement based on the number of tasks.
groups
.filterNot(_.isEmpty)
.map { g =>
val taskConfigs = new java.util.HashMap[String, String]
taskConfigs.putAll(configProps)
taskConfigs.put(JMSConfigConstants.KCQL, g.asScala.mkString(";")) //overwrite
taskConfigs.asScala.toMap.asJava
}
}.asJava
def defaultTaskScaling(maxTasks: Int): util.List[util.Map[String, String]] = {
val raw = configProps.get(JMSConfigConstants.KCQL)
require(raw != null && raw.nonEmpty, s"No ${JMSConfigConstants.KCQL} provided!")
(1 to maxTasks).map { _ =>
val taskConfigs: util.Map[String, String] = new java.util.HashMap[String, String]
taskConfigs.putAll(configProps)
taskConfigs
}.toList.asJava
}
override def taskConfigs(maxTasks: Int): util.List[util.Map[String, String]] = {
val config = new JMSConfig(configProps)
val scaleType = config.getString(JMSConfigConstants.TASK_PARALLELIZATION_TYPE).toLowerCase()
if (scaleType == JMSConfigConstants.TASK_PARALLELIZATION_TYPE_DEFAULT) {
kcqlTaskScaling(maxTasks)
} else defaultTaskScaling(maxTasks)
}
override def config(): ConfigDef = configDef
override def start(props: util.Map[String, String]): Unit = {
val config = new JMSConfig(props)
configProps = config.props
}
override def stop(): Unit = {}
override def version(): String = manifest.version()
}
| datamountaineer/stream-reactor | kafka-connect-jms/src/main/scala/com/datamountaineer/streamreactor/connect/jms/source/JMSSourceConnector.scala | Scala | apache-2.0 | 3,383 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
/**
* To keep interaction with Lucene really simple, we make the
* following assumptions about the entities that we index:
*
* 1. all entities are flat (no nested objects).
*
* 2. all values have a `String` representation.
*
* 3. field names are universal: e.g. a "name" field in one type of
* entity should be analyzed the same way as in another.
*
* 4. entities have a unique id that is derived from their content.
*
* which allows us to use case classes to define entities, getting us
* serialisation and deserialisation with minimal boilerplate.
* Field-based `Analyzer`s and `Query`s, on the other hand, can be
* arbitrarily complex.
*
* In addition, Option[T]s are indexed but not stored (not
* fully persistent).
*/
package org.ensime.indexer
import org.apache.lucene.document._
package object lucene {
implicit class RichEntity[T <: Entity](e: T) {
def toDocument(implicit p: DocumentProvider[T]) = p.toDocument(e)
}
implicit class RichDocument(d: Document) {
def toEntity[T](implicit p: DocumentRecovery[T]) = p.toEntity(d)
def boostText(f: String, boost: Float) = {
d.getField(f).asInstanceOf[TextField].setBoost(boost)
}
}
}
| d6y/ensime-server | core/src/main/scala/org/ensime/indexer/lucene/package.scala | Scala | gpl-3.0 | 1,322 |
import leon.lang._
object MergeSort {
sealed abstract class List
case class Cons(head:Int,tail:List) extends List
case class Nil() extends List
case class Pair(fst:List,snd:List)
def contents(l: List): Set[Int] = l match {
case Nil() => Set.empty
case Cons(x,xs) => contents(xs) ++ Set(x)
}
def is_sorted(l: List): Boolean = l match {
case Nil() => true
case Cons(x,xs) => xs match {
case Nil() => true
case Cons(y, ys) => x <= y && is_sorted(Cons(y, ys))
}
}
def length(list:List): Int = list match {
case Nil() => 0
case Cons(x,xs) => 1 + length(xs)
}
def splithelper(aList:List,bList:List,n:Int): Pair =
if (n <= 0) Pair(aList,bList)
else
bList match {
case Nil() => Pair(aList,bList)
case Cons(x,xs) => splithelper(Cons(x,aList),xs,n-1)
}
def split(list:List,n:Int): Pair = splithelper(Nil(),list,n)
def merge(aList:List, bList:List):List = (bList match {
case Nil() => aList
case Cons(x,xs) =>
aList match {
case Nil() => bList
case Cons(y,ys) =>
if (y < x)
Cons(y,merge(ys, bList))
else
Cons(x,merge(aList, xs))
}
}) ensuring(res => contents(res) == contents(aList) ++ contents(bList))
def mergeSort(list:List):List = (list match {
case Nil() => list
case Cons(x,Nil()) => list
case _ =>
val p = split(list,length(list)/2)
merge(mergeSort(p.fst), mergeSort(p.snd))
}) ensuring(res => contents(res) == contents(list) && is_sorted(res))
def main(args: Array[String]): Unit = {
val ls: List = Cons(5, Cons(2, Cons(4, Cons(5, Cons(1, Cons(8,Nil()))))))
println(ls)
println(mergeSort(ls))
}
}
| ericpony/scala-examples | testcases/verification/list-algorithms/MergeSort.scala | Scala | mit | 1,761 |
// Copyright 2014 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cba.omnia.piped.aggregators
import org.scalacheck.Arbitrary
import org.scalacheck.Arbitrary.arbitrary
import com.cba.omnia.piped.PipedSpec
import com.twitter.algebird.BaseProperties
class LimitedSizeHistogramSpec extends PipedSpec { def is = s2"""
Can create a LimitedSizeHistogram from a value $create
LimitedSizeHistograms follow the monoid laws $monoid
Can use LimitedSizeHistogram aggregators $aggregator
LimitedSizeHistograms have a limited size $limited
LimitedSizeHistograms have a limited size when used as aggregator $limitedAggregator
"""
def create = {
(LimitedSizeHistogramMonoid[Int](5)).create(1) === LimitedHistogram[Int](Map(1 -> 1l))
(LimitedSizeHistogramMonoid[Double](5)).create(1.0) === LimitedHistogram[Double](Map(1.0 -> 1l))
(LimitedSizeHistogramMonoid[String](5)).create("test") === LimitedHistogram[String](Map("test" -> 1l))
}
def monoid = {
pending("Algebird scalacheck version is incompatiable with specs2 scalacheck version")
/*implicit val monoid = LimitedSizeHistogramMonoid[Int](7)
implicit val histogramgen: Arbitrary[LimitedSizeHistogram[Int]] = Arbitrary { for {
map <- arbitrary[Map[Int, Long]]
} yield monoid.create(map) }
BaseProperties.monoidLaws[LimitedSizeHistogram[Int]]*/
}
def aggregator = {
val aggregator = LimitedSizeHistogramAggregator.apply[Int](5)
val data = List(1, 2, 1, 3, 1, 4, 2)
aggregator(data) === LimitedHistogram(Map((2, 2l), (1, 3l), (3, 1l), (4, 1l)))
}
def limited = {
val monoid = LimitedSizeHistogramMonoid[Int](4)
val data = List(1, 2, 1, 3, 1, 4, 2, 5, 7)
data.map(monoid.create).foldLeft(monoid.zero)(monoid.plus) === OverLimitHistogram[Int]()
}
def limitedAggregator = {
val aggregator = LimitedSizeHistogramAggregator.apply[Int](5)
val data = List(1, 2, 1, 3, 1, 4, 2, 5, 6, 7)
aggregator(data) === OverLimitHistogram[Int]
}
}
| CommBank/piped | src/test/scala/com/cba/omnia/piped/aggregators/LimitedSizeHistogramSpec.scala | Scala | apache-2.0 | 2,643 |
package edu.nus.systemtesting.hipsleek.app
import java.io.File
import java.io.PrintWriter
import java.nio.file.Paths
import scala.io.Source
import edu.nus.systemtesting.ConstructTestCase
import edu.nus.systemtesting.ExpectsOutput
import edu.nus.systemtesting.PreparedSystem
import edu.nus.systemtesting.TestCase
import edu.nus.systemtesting.TestCaseBuilder
import edu.nus.systemtesting.TestCaseConfiguration
import edu.nus.systemtesting.TestCaseResult
import edu.nus.systemtesting.Testable
import edu.nus.systemtesting.hipsleek.HipTestCase
import edu.nus.systemtesting.hipsleek.HipTestSuiteUsage
import edu.nus.systemtesting.hipsleek.SleekTestCase
import edu.nus.systemtesting.hipsleek.SleekTestSuiteUsage
import edu.nus.systemtesting.hipsleek.ValidateableSleekTestCase
/**
* Generates, runs a subset of some set of testables.
*
* Does not deal with legacy, misnamed `run-fast-tests.pl`.
*/
class RunFast(config: AppConfig) extends UsesRepository(config) {
val validate = new Validate(config)
def DefaultFastCacheName(name: String) = s".fasttests_$name"
// might make sense to use SQLite db here.
private case class FastTestRow(filename: String, args: Option[String] = None) {
override def toString(): String =
filename + (args map (a => " " + a) getOrElse "")
}
private def loadFastTests(file: File): Option[List[FastTestRow]] = {
if (file.exists) {
val src = Source fromFile file
val content = src.mkString
src.close()
Some(content.lines.toList map { line =>
line.split(" ", 2) match {
case Array(filename) => FastTestRow(filename)
case Array(filename, args) => FastTestRow(filename, Some(args))
}
})
} else {
None
}
}
private def saveToFile(file: File, rows: List[FastTestRow]): Unit = {
val out = new PrintWriter(file)
rows foreach { row =>
out.println(row.toString())
}
out.flush()
out.close()
}
private def fileForSuite(suite: Suite): File =
new File(DefaultFastCacheName(suite.toString()))
private def args(arguments: String): Option[String] =
if (arguments == "") None else Some(arguments)
private def rowFromTestable(tc: Testable): FastTestRow = {
import tc.{ fileName, arguments }
FastTestRow(fileName.toString(), args(arguments))
}
private def filterTestable(all: List[Testable with ExpectsOutput],
loaded: List[FastTestRow]): List[Testable with ExpectsOutput] = {
val loadedSet = loaded.toSet
all filter { tc => loadedSet contains rowFromTestable(tc) }
}
private def loadFastTests(suite: Suite): Option[List[Testable with ExpectsOutput]] = {
loadFastTests(fileForSuite(suite)) map { fastTests =>
suite match {
case HipOnly() => filterTestable(HipTestSuiteUsage.allTestable, fastTests)
case SleekOnly() => filterTestable(SleekTestSuiteUsage.allTestable, fastTests)
case SleekValidateOnly() => {
// construct ValidateableSleekTestCase directly
fastTests map { fastTest =>
TestCaseBuilder(Paths.get("sleek"), Paths.get(fastTest.filename))
}
}
}
}
}
private def saveFastTests(suite: Suite, data: List[Testable]): Unit = {
saveToFile(fileForSuite(suite), data map rowFromTestable)
}
private def constructForSuite(suite: Suite): ConstructTestCase =
suite match {
case HipOnly() => HipTestCase.constructTestCase
case SleekOnly() => SleekTestCase.constructTestCase
case SleekValidateOnly() => ValidateableSleekTestCase.constructTestCase
}
private def allTestableForSuite(suite: Suite): List[Testable with ExpectsOutput] =
suite match {
case HipOnly() => HipTestSuiteUsage.allTestable
case SleekOnly() => SleekTestSuiteUsage.allTestable
case SleekValidateOnly() => validate.allTestable // XXX This is a bit dubious.
}
private def generateFastTestablesForSuite(suite: Suite): List[Testable] = {
// get the universe of testable for the suite,
val allTestable = allTestableForSuite(suite)
val construct = constructForSuite(suite)
// Try to keep the timing under 2 mins for running the tests
// TODO RunFast customise FastTestTime
val FastTestTime = 120 * 1000
val resArch = config.defaultResultsArchive
val extantResults = allTestable map (resArch.resultsFor) filterNot (_.isEmpty)
val extantTimeTestablePairs = extantResults map { res =>
val timings = res map { case (rev, tcr) => tcr.executionTime }
val avgTiming = timings.foldLeft(0L)({ (sum, time) => sum + time }) / timings.length
val testable = res.head._2
(avgTiming, testable)
}
val extantTime = extantTimeTestablePairs.foldLeft(0L) { (sum, pair) => sum + pair._1 }
// if we have sufficent number of results, can compute from that,
val timeTestablePairs = if (extantTime >= FastTestTime) {
extantTimeTestablePairs
} else {
// otherwise, must run with a short timeout + don't save results (for T/O),
// so as to see which tests are "quick"
// Only save results for tests which take 10s or less.
val QuickTimeout = 10
val quickConfig = config.copy(saveResultOnTimeout = false,
timeout = QuickTimeout)
val runHipSleek = new RunHipSleek(quickConfig)
import runHipSleek.runTests
// If repo is dirty, this will be needlessly expensive.
val repoC = repo.identify()
// n.b. *might* throw UnableToBuildException here
// XXX BUG: It seems this doesn't save the test case result, even for non-timeout?
val tsr = runTests(construct, allTestable)(repoC)
// wait for the results
tsr.results map { tcr => (tcr.executionTime, tcr) }
}
val sortedTCRs = timeTestablePairs.sortBy { case (time, tc) => time }
sortedTCRs.foldLeft(List[TestCaseResult]())({ (fastTests, timeTCPair) =>
val (tcTime, tc) = timeTCPair
val totalTime = fastTests.foldLeft(0L) { (sum, tcr) => sum + tcr.executionTime }
if (totalTime < FastTestTime) {
fastTests :+ tc
} else {
fastTests
}
})
}
private def suiteSetFromString(suite: String): SuiteSet =
suite match {
case "hip" => HipOnly()
case "sleek" => SleekOnly()
case "all" => All()
case "sleek-validate" => SleekValidateOnly()
case "validate-sleek" => SleekValidateOnly()
case _ => throw new IllegalArgumentException(s"Unknown suite: $suite")
}
private def suiteFromString(suite: String): Suite =
suite match {
case "hip" => HipOnly()
case "sleek" => SleekOnly()
case "sleek-validate" => SleekValidateOnly()
case "validate-sleek" => SleekValidateOnly()
case _ => throw new IllegalArgumentException(s"Unknown suite: $suite")
}
def run(): Unit = {
import config.{ runFastSuite, runFastGenerate => isForcedGenerate }
val suiteName = runFastSuite.getOrElse(throw new IllegalArgumentException("must be given a suite name"))
val suiteSet = suiteSetFromString(suiteName)
suiteSet.suites foreach { suite =>
runForSuite(suite, isForcedGenerate)
}
}
def runForSuite(suite: Suite, isForcedGenerate: Boolean): Unit = {
//
// Step 1. Generate set of fast tests (if need be)
//
val fastTests = loadFastTests(suite) match {
case Some(xs) if !isForcedGenerate => {
println("Loading fast tests from cached file.")
xs
}
case Some(xs) if isForcedGenerate => {
println("File exists, but flag forced generation. Generating.")
val xs = generateFastTestablesForSuite(suite)
println("Saving cache of fast tests.")
saveFastTests(suite, xs)
xs
}
case None => {
println("No fast tests cache file found for this suite. Generating.")
val xs = generateFastTestablesForSuite(suite)
println("Saving cache of fast tests.")
saveFastTests(suite, xs)
xs
}
}
//
// Step 2. With the set of generated tests in hand, run these.
//
val testable = filterTestable(allTestableForSuite(suite), fastTests map rowFromTestable)
// TODO RunFast customise revision(?)
val repoC = repo.identify()
val runHipSleek = new RunHipSleek(config)
import runHipSleek.runTests
runTests(constructForSuite(suite), testable)(repoC)
}
}
| rgoulter/system-testing | src/main/scala/edu/nus/systemtesting/hipsleek/app/RunFast.scala | Scala | mit | 8,508 |
package com.twitter.finagle.redis.integration
import com.twitter.finagle.redis.naggati.RedisClientTest
import com.twitter.finagle.redis.tags.{RedisTest, ClientTest}
import com.twitter.util.Await
import com.twitter.finagle.redis.util.{CBToString, StringToChannelBuffer}
import org.junit.Ignore
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
@Ignore
@RunWith(classOf[JUnitRunner])
final class KeyClientIntegrationSuite extends RedisClientTest {
test("Correctly perform the DEL command", RedisTest, ClientTest) {
withRedisClient { client =>
Await.result(client.set(foo, bar))
Await.result(client.del(Seq(foo)))
assert(Await.result(client.get(foo)) == None)
}
}
test("Correctly perform the DUMP command", RedisTest, ClientTest) {
withRedisClient { client =>
val k = StringToChannelBuffer("mykey")
val v = StringToChannelBuffer("10")
val expectedBytes: Array[Byte] = Array(0, -64, 10, 6, 0, -8, 114, 63, -59, -5, -5, 95, 40)
Await.result(client.set(k, v))
assert(Await.result(client.dump(k)).fold(fail("Expected result for DUMP"))(_.array) ==
expectedBytes)
Await.result(client.del(Seq(foo)))
assert(Await.result(client.dump(foo)) == None)
}
}
// Once the scan/hscan pull request gets merged into Redis master,
// the tests can be uncommented.
ignore("Correctly perform the SCAN command", RedisTest, ClientTest) {
withRedisClient { client =>
Await.result(client.set(foo, bar))
Await.result(client.set(baz, boo))
assert(CBToString(Await.result(client.scan(0, None, None)).apply(1)) == "baz")
val withCount = Await.result(client.scan(0, Some(10), None))
assert(CBToString(withCount(0)) == "0")
assert(CBToString(withCount(1)) == "baz")
assert(CBToString(withCount(2)) == "foo")
val pattern = StringToChannelBuffer("b*")
val withPattern = Await.result(client.scan(0, None, Some(pattern)))
assert(CBToString(withPattern(0)) == "0")
assert(CBToString(withPattern(1)) == "baz")
}
}
test("Correctly perform the EXISTS command", RedisTest, ClientTest) {
withRedisClient { client =>
Await.result(client.set(foo, bar))
assert(Await.result(client.exists(foo)) == true)
}
}
test("Correctly perform the TTL command", RedisTest, ClientTest) {
withRedisClient { client =>
Await.result(client.set(foo, bar))
val time = 20L
assert(Await.result(client.expire(foo, time)) == true)
val result = Await.result(client.ttl(foo)) match {
case Some(num) => num
case None => fail("Could not retrieve key for TTL test")
}
assert(result <= time)
}
}
test("Correctly perform the EXPIREAT command", RedisTest, ClientTest) {
withRedisClient { client =>
Await.result(client.set(foo, bar))
// TODO: this isn't actually a TTL, which means that the second assertion
// below is true for uninteresting reasons.
val ttl = System.currentTimeMillis() + 20000L
assert(Await.result(client.expireAt(foo, ttl)) == true)
val result = Await.result(client.ttl(foo)) match {
case Some(num) => num
case None => fail("Could not retrieve key for TTL")
}
assert(result <= ttl)
}
}
test("Correctly perform the MOVE command", RedisTest, ClientTest) {
withRedisClient { client =>
val fromDb = 14
val toDb = 15
Await.result(client.select(toDb))
Await.result(client.del(Seq(foo)))
Await.result(client.select(fromDb))
// This following fails with an exceptions since bar is not a database.
// assert(Await.result(client.move(foo, bar)) == false)
Await.result(client.set(foo, bar))
assert(Await.result(client.move(foo, StringToChannelBuffer(toDb.toString))) == true)
Await.result(client.del(Seq(foo))) // clean up
}
}
test("Correctly perform the PEXPIRE & PTL commands", RedisTest, ClientTest) {
withRedisClient { client =>
val ttl = 100000L
Await.result(client.set(foo, bar))
assert(Await.result(client.pExpire(foo, ttl)) == true)
val result = Await.result(client.pTtl(foo)) match {
case Some(num) => num
case None => fail("Could not retrieve pTtl for key")
}
assert(result <= ttl)
}
}
test("Correctly perform the PEXPIREAT & PTL commands", RedisTest, ClientTest) {
withRedisClient { client =>
val horizon = 20000L
val ttl = System.currentTimeMillis() + horizon
Await.result(client.set(foo, bar))
assert(Await.result(client.pExpireAt(foo, ttl)) == true)
val result = Await.result(client.pTtl(foo)) match {
case Some(num) => num
case None => fail("Could not retrieve pTtl for key")
}
assert(result <= horizon)
}
}
}
| liamstewart/finagle | finagle-redis/src/test/scala/com/twitter/finagle/redis/commands/key/KeyClientIntegrationSuite.scala | Scala | apache-2.0 | 4,866 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.geotools.tools
import com.beust.jcommander.JCommander
import org.locationtech.geomesa.tools._
import org.locationtech.geomesa.tools.export.{ConvertCommand, GenerateAvroSchemaCommand}
import org.locationtech.geomesa.tools.status._
object GeoToolsRunner extends Runner {
override val name: String = "geomesa-gt"
override def createCommands(jc: JCommander): Seq[Command] = Seq(
new data.GeoToolsCreateSchemaCommand,
new data.GeoToolsDeleteFeaturesCommand,
new data.GeoToolsDescribeSchemaCommand,
new data.GeoToolsGetSftConfigCommand,
new data.GeoToolsGetTypeNamesCommand,
new data.GeoToolsRemoveSchemaCommand,
new data.GeoToolsUpdateSchemaCommand,
new export.GeoToolsExportCommand,
new export.GeoToolsPlaybackCommand,
new ingest.GeoToolsIngestCommand,
// common commands, placeholders for script functions
new ConvertCommand,
new ConfigureCommand,
new ClasspathCommand,
new EnvironmentCommand,
new GenerateAvroSchemaCommand,
new HelpCommand(this, jc),
new ScalaConsoleCommand,
new VersionCommand
)
}
| aheyne/geomesa | geomesa-gt/geomesa-gt-tools/src/main/scala/org/locationtech/geomesa/geotools/tools/GeoToolsRunner.scala | Scala | apache-2.0 | 1,581 |
/**
* The MIT License (MIT)
*
* Copyright (c) 2013 Israel Freitas([email protected])
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
package brain.models
import org.scalatest.FunSpec
import org.scalatest.Matchers
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
import org.scalatest.BeforeAndAfter
import org.scalatest.BeforeAndAfterEach
import aimltoxml.aiml.Category
import aimltoxml.aiml.Text
import aimltoxml.aiml.Srai
import aimltoxml.aiml.TemplateElement
import aimltoxml.aiml.Random
import org.scalatest.FlatSpec
import aimltoxml.aiml.Think
import aimltoxml.aiml.Star
import aimltoxml.aiml.Get
class TeachingToCategoryAdapterTest extends FunSpec with Matchers with BeforeAndAfter {
var adapter: TeachingToCategoryAdapter = null
var teaching: Teaching = null
var validTeaching: Teaching = null
before {
validTeaching = mock(classOf[Teaching])
when(validTeaching.whenTheUserSays).thenReturn(""" hi
hello
hello there""")
when(validTeaching.say).thenReturn("hi")
}
describe("#TeachingToCategoryAdapter") {
def setup = {
teaching = mock(classOf[Teaching])
}
it("requires a valid Teaching") {
setup
new TeachingToCategoryAdapter(validTeaching)
}
it("when the teaching is null throws an exception") {
setup
intercept[IllegalArgumentException](new TeachingToCategoryAdapter(null))
}
it("when present, requires a valid 'respondingTo'")(pending)
it("when present, requires a valid 'think'")(pending)
}
describe("#selectDefaultPattern") {
def setup = {
adapter = new TeachingToCategoryAdapter(validTeaching)
}
it("returns the more simple pattern based on the its complexity") {
setup
adapter.selectDefaultPattern(Set("hi")) should be("hi")
adapter.selectDefaultPattern(Set("hello there", "hey")) should be("hey")
// with star
adapter.selectDefaultPattern(Set("hello there", "hey *")) should be("hello there")
adapter.selectDefaultPattern(Set("hello there *", "hey *")) should be("hey *")
adapter.selectDefaultPattern(Set("hey * *", "hey *")) should be("hey *")
// with underscore
adapter.selectDefaultPattern(Set("hello there", "hey _")) should be("hello there")
adapter.selectDefaultPattern(Set("hello there _", "hey _")) should be("hey _")
adapter.selectDefaultPattern(Set("hey _ _", "hey _")) should be("hey _")
}
}
describe("#calculateThePatternComplexity") {
def setup = {
adapter = new TeachingToCategoryAdapter(validTeaching)
}
it("considers the amount of chars") {
setup
adapter.calculateThePatternComplexity("hi") should be(0.002)
}
it("considers the amount of stars") {
setup
adapter.calculateThePatternComplexity("*") should be(1.001)
adapter.calculateThePatternComplexity("* *") should be(2.003)
}
it("considers the amount of underscores") {
setup
adapter.calculateThePatternComplexity("_") should be(1.001)
adapter.calculateThePatternComplexity("_ _") should be(2.003)
}
}
describe("#countSpecialChar") {
def setup = {
adapter = new TeachingToCategoryAdapter(validTeaching)
}
it("count united special char as only one star") {
setup
adapter.countSpecialChar("*", "one *** sequence ") should be(1)
adapter.countSpecialChar("*", "*** first") should be(1)
adapter.countSpecialChar("*", "last **") should be(1)
adapter.countSpecialChar("*", "one *** two ****** sequences ") should be(2)
adapter.countSpecialChar("*", "one *** two ****** three ** sequences ") should be(3)
}
it("returns 0 when no special char is found") {
setup
adapter.countSpecialChar("*", "pattern without any star") should be(0)
}
it("returns the number of occurrences of the special character") {
setup
adapter.countSpecialChar("*", "pattern with only one *") should be(1)
adapter.countSpecialChar("*", "* first") should be(1)
adapter.countSpecialChar("*", "last *") should be(1)
adapter.countSpecialChar("*", "mid*dle") should be(1)
adapter.countSpecialChar("*", "pattern with only one * ") should be(1)
adapter.countSpecialChar("*", "pattern with only one * and two *") should be(2)
adapter.countSpecialChar("*", "pattern with only one * and two * ") should be(2)
adapter.countSpecialChar("*", "pattern with only one * and two * and three *") should be(3)
}
}
describe("#createCategory") {
def setup = {
adapter = new TeachingToCategoryAdapter(validTeaching)
}
it("when the pattern is equals to the default pattern then returns a category with the fully filled template") {
val expectedCategory = new Category("hi", Set(Think(List.empty[TemplateElement]), Random("hello there")))
adapter.createCategory("hi", "hi", "*", List.empty[String], Set("hello there")) should be(expectedCategory)
}
it("when the pattern is not equals to the default pattern then returns a category which template contains only a srai pointing to the default pattern") {
val expectedCategory = Category("hello", Srai("hi"))
adapter.createCategory("hello", "hi", "*", List.empty[String], Set("hello there")) should be(expectedCategory)
}
}
describe("#createTemplateElements"){ pending }
describe("#parseMemorize"){ pending }
describe("#parseSay"){ pending }
describe("#parseKeyValue"){ pending }
describe("#parseValue"){ pending }
describe("parseText"){
def setup = {
adapter = new TeachingToCategoryAdapter(validTeaching)
}
it("return List(Text) if there is no getSyntax"){
adapter.parseText("only text") should be (List(Text("only text")))
}
it("return List(Text, Get) if there is getSyntax"){
adapter.parseText("text and ${get}") should be (List(Text("text and "), Get("get")))
}
it("return List(Text, Star) if there is getSyntax(*)"){
adapter.parseText("text and ${*}") should be (List(Text("text and "), Star(1)))
}
it("return List(Text, Get, Star) if there is getSyntax(*)"){
adapter.parseText("text, ${get} and ${*}") should be (List(Text("text, "), Get("get"), Text(" and "), Star(1)))
}
it("return List(Get, Star) if there is getSyntax(*)"){
adapter.parseText("${get}${*}") should be (List(Get("get"), Star(1)))
}
}
//GetUtil ####
describe("GetUtil.parse"){
it("return Get(test) in '${ test }'"){
GetUtil.parse("${ test }") should be (Get("test"))
}
it("return Star(1) in '${ *1 }'"){
GetUtil.parse("${ *1 }") should be (Star(1))
}
it("return Star(1) if equals to '${*}'"){
GetUtil.parse("${*1}") should be (Star(1))
}
it("return Star(i) if equals to '${*i}' (i=1)"){
GetUtil.parse("${*1}") should be (Star(1))
}
it("return Star(i) if equals to '${*i}' (i=20)"){
GetUtil.parse("${*20}") should be (Star(20))
}
it("throws an exception if the get is empty ('${}')"){
intercept[InvalidGetSyntaxException](GetUtil.parse("${}"))
}
}
describe("GetUtil.validate"){
it("throws an exception if i is not a Number in ${*i}"){
intercept[InvalidStarIndexException](GetUtil.validate("${*aaa}"))
}
it("throws an exception if i < 1"){
intercept[InvalidStarIndexException](GetUtil.validate("${*0}"))
}
it("throws an exception if the get is empty ('${}')"){
intercept[InvalidGetSyntaxException](GetUtil.validate("${}"))
}
it("throws an exception if the get is empty ('${ }')"){
intercept[InvalidGetSyntaxException](GetUtil.validate("${ }"))
}
it("throws an exception if getSyntax has a name with empty space (${some name})"){
intercept[InvalidGetSyntaxException](GetUtil.validate("${some name}"))
}
it("throws an exception if getSyntax has a name with empty space (${* 1})"){
intercept[InvalidGetSyntaxException](GetUtil.validate("${* 1}"))
}
it("throws an exception if getSyntax does not match"){
intercept[InvalidGetSyntaxException](GetUtil.validate("*1"))
}
it("throws an exception if getSyntax does not match (2)"){
intercept[InvalidGetSyntaxException](GetUtil.validate("some text"))
}
it("throws an exception get's Name starts with an invalid char"){
intercept[InvalidVariableNameException](GetUtil.validate("${-name}"))
intercept[InvalidVariableNameException](GetUtil.validate("${$name}"))
intercept[InvalidVariableNameException](GetUtil.validate("${@name}"))
intercept[InvalidVariableNameException](GetUtil.validate("${1name}"))
intercept[InvalidVariableNameException](GetUtil.validate("${.name}"))
intercept[InvalidVariableNameException](GetUtil.validate("${áname}"))
}
it("throws an exception get's Name contains any invalid char"){
intercept[InvalidVariableNameException](GetUtil.validate("${user.name}"))
intercept[InvalidVariableNameException](GetUtil.validate("${user@name}"))
intercept[InvalidVariableNameException](GetUtil.validate("${n#me}"))
intercept[InvalidVariableNameException](GetUtil.validate("${n%me}"))
intercept[InvalidVariableNameException](GetUtil.validate("${nam&}"))
intercept[InvalidVariableNameException](GetUtil.validate("${na(e}"))
intercept[InvalidVariableNameException](GetUtil.validate("${tést}"))
intercept[InvalidVariableNameException](GetUtil.validate("${n@me}"))
}
it("pass ok if syntax ok"){
GetUtil.validate("${*1}")
GetUtil.validate("${ *1 }")
GetUtil.validate("${name}")
GetUtil.validate("${ name }")
}
}
describe("GetUtil.findInvalidCharacterForInitializeGetName"){
it("return all char that differ from [a-zA-Z_0-9\\\\_\\\\*]"){
GetUtil.findInvalidCharacterForInitializeGetName("name") should be (None)
GetUtil.findInvalidCharacterForInitializeGetName("*name")should be (None)
GetUtil.findInvalidCharacterForInitializeGetName("-name")should be (Some("-"))
GetUtil.findInvalidCharacterForInitializeGetName("$name")should be (Some("$"))
GetUtil.findInvalidCharacterForInitializeGetName("@name")should be (Some("@"))
GetUtil.findInvalidCharacterForInitializeGetName("1name")should be (Some("1"))
GetUtil.findInvalidCharacterForInitializeGetName(".name")should be (Some("."))
GetUtil.findInvalidCharacterForInitializeGetName("áname")should be (Some("á"))
}
}
describe("GetUtil.findInvalidCharacterForGetName"){
it("return all char that differ from [a-zA-Z_0-9\\\\_\\\\-\\\\*]"){
GetUtil.findInvalidCharacterForGetName("user name") should be (Some(" "))
GetUtil.findInvalidCharacterForGetName("user.name") should be (Some("."))
GetUtil.findInvalidCharacterForGetName("user@name") should be (Some("@"))
GetUtil.findInvalidCharacterForGetName("n#me") should be (Some("#"))
GetUtil.findInvalidCharacterForGetName("n%me") should be (Some("%"))
GetUtil.findInvalidCharacterForGetName("nam&") should be (Some("&"))
GetUtil.findInvalidCharacterForGetName("na(me") should be (Some("("))
GetUtil.findInvalidCharacterForGetName("tést") should be (Some("é"))
GetUtil.findInvalidCharacterForGetName("n@me") should be (Some("@"))
}
}
describe("GetUtil.find"){
it("return Some(${}) in 'this is an empty get ${}'"){
GetUtil.findIn("this is an empty get ${}") should be (Some("${}"))
}
it("return Some(${ }) in 'this is an empty get ${ }'"){
GetUtil.findIn("this is an empty get ${ }") should be (Some("${ }"))
}
it("return Some(${a}) in 'this is an non-empty get ${a}'"){
GetUtil.findIn("this is an empty get ${a}") should be (Some("${a}"))
}
it("return Some(${a}) in 'this is an non-empty get ${ a }'"){
GetUtil.findIn("this is an empty get ${a}") should be (Some("${a}"))
}
it("return Some(${#a}) in 'this is an non-empty get ${ #a }'"){
GetUtil.findIn("this is an empty get ${#a}") should be (Some("${#a}"))
}
it("return Some(${${a}) in 'this is an non-empty get ${ ${a} }'"){
GetUtil.findIn("this is an empty get ${${a}}") should be (Some("${${a}"))
}
it("return None in 'this is an text with get syntax'"){
GetUtil.findIn("this is an text with get syntax") should be (None)
}
}
describe("#toCategory") {
def setup = {
adapter = new TeachingToCategoryAdapter(validTeaching)
}
it("returns a set with a category for each sentence said by the user") {
val categories = adapter.toCategory
categories.size should be(3)
categories.map(_.pattern) should be(Set(Text("hi"), Text("hello"), Text("hello there")))
categories.toList(0).templateElements should be( Set(Think(List()), new Random(Set(List(Text("hi"))))))
categories.toList(1).templateElements should be( Set(Srai("hi")))
categories.toList(2).templateElements should be( Set(Srai("hi")) )
}
}
describe("#validateKeyValue"){
def setup = {
}
it("throws an exception if there is no '='"){
// should not raise an error!
KeyValueValidator.validateKeyValue("test=")
intercept[NoAttributionSignException](KeyValueValidator.validateKeyValue(""))
intercept[NoAttributionSignException](KeyValueValidator.validateKeyValue("someKey"))
}
it("throws an exception if there is more than one '='"){
intercept[MoreThanOneAttributionSignException](KeyValueValidator.validateKeyValue("k1=v1 k2=v2"))
intercept[MoreThanOneAttributionSignException](KeyValueValidator.validateKeyValue("k1==v1"))
}
it("throws an exception if there is no Key"){
intercept[NoVariableNameException](KeyValueValidator.validateKeyValue(" =value"))
}
it("validates the Key name"){
//verify(mock, times(1)).validateKeyName("???")
pending
}
it("throws an exception if empty 'get' (${}) is present in Value"){ pending }
it("throws an exception if Value part contains space ' ' ('age=${some value})"){ pending }
it("throws an exception if unclosed 'get' (${) is present"){ pending }
}
//KeyValueUtil
describe("findKey"){
it("return Some('k') (without space) in 'k=value'"){
val key = KeyValueUtil.findKey("k=value")
if(key.isEmpty) throw new Exception("key not found!")
key.get should be ("k")
}
it("return Some('key') (without space) in ' key =value'"){
val key = KeyValueUtil.findKey(" key = value")
if(key.isEmpty) throw new Exception("key not found!")
key.get should be ("key")
}
it("return Some('key') (without space) in ' key =value'"){
val key = KeyValueUtil.findKey(" key = value")
if(key.isEmpty) throw new Exception("key not found!")
key.get should be ("key")
}
it("return Some('key') (without space) in ' key =value'"){
val key = KeyValueUtil.findKey(" key = value")
if(key.isEmpty) throw new Exception("key not found!")
key.get should be ("key")
}
it("return Some('a key') (without space) in ' a key =value'"){
// \\s is an invalid character, but it does not matter in this moment.
val key = KeyValueUtil.findKey("@key = value")
if(key.isEmpty) throw new Exception("key not found!")
key.get should be ("@key")
}
it("return Some('@key') (without space) in ' key =value'"){
// @ is an invalid start character name, but it does not matter in this moment.
val key = KeyValueUtil.findKey("@key = value")
if(key.isEmpty) throw new Exception("key not found!")
key.get should be ("@key")
}
it("return None in ' =value'"){
KeyValueUtil.findKey(" = value") should be (None)
}
it("return None in '=value'"){
KeyValueUtil.findKey("= value") should be (None)
}
}
describe("#findInvalidCharacterForInitializeKeyName"){
it("return all char that differ from [a-zA-Z_0-9\\\\_\\\\-]"){
KeyValueValidator.findInvalidCharacterForInitializeKeyName("-key") should be (Some("-"))
KeyValueValidator.findInvalidCharacterForInitializeKeyName("$name")should be (Some("$"))
KeyValueValidator.findInvalidCharacterForInitializeKeyName("@name")should be (Some("@"))
KeyValueValidator.findInvalidCharacterForInitializeKeyName("1name")should be (Some("1"))
KeyValueValidator.findInvalidCharacterForInitializeKeyName(".name")should be (Some("."))
KeyValueValidator.findInvalidCharacterForInitializeKeyName("áname")should be (Some("á"))
KeyValueValidator.findInvalidCharacterForInitializeKeyName("*name")should be (Some("*"))
}
}
describe("#findInvalidCharacterForName"){
it("return all char that differ from [a-zA-Z_0-9\\\\_\\\\-]"){
KeyValueValidator.findInvalidCharacterForName("user name") should be (Some(" "))
KeyValueValidator.findInvalidCharacterForName("user.name") should be (Some("."))
KeyValueValidator.findInvalidCharacterForName("user@name") should be (Some("@"))
KeyValueValidator.findInvalidCharacterForName("n#me") should be (Some("#"))
KeyValueValidator.findInvalidCharacterForName("n%me") should be (Some("%"))
KeyValueValidator.findInvalidCharacterForName("nam&") should be (Some("&"))
KeyValueValidator.findInvalidCharacterForName("na(me") should be (Some("("))
KeyValueValidator.findInvalidCharacterForName("tést") should be (Some("é"))
KeyValueValidator.findInvalidCharacterForName("n@me") should be (Some("@"))
KeyValueValidator.findInvalidCharacterForName("n*me") should be (Some("*"))
}
}
describe("validateKey"){
it("throws an exception if there is no Key"){
intercept[NoVariableNameException](KeyValueValidator.validateKey(""))
}
it("throws an exception if Key does not starts with a letter"){
//certo
KeyValueValidator.validateKey("_userName")
KeyValueValidator.validateKey("userName")
KeyValueValidator.validateKey("UserName")
//errado
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("$name"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("@name"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("1name"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey(".name"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("áname"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("*name"))
}
it("throws an exception if Key contains space (' ') in its name"){
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("user name"))
}
it("throws an exception if Key contains something differ from [a-zA-Z_0-9\\\\_\\\\-]"){
//correto:
KeyValueValidator.validateKey("username")
KeyValueValidator.validateKey("userName")
KeyValueValidator.validateKey("UserName")
KeyValueValidator.validateKey("user_name")
KeyValueValidator.validateKey("user-name")
KeyValueValidator.validateKey("_userName")
//errado:
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("user name"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("user.name"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("user@name"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("n#me"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("n%me"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("nam&"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("na(me"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("tést"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("n@me"))
intercept[InvalidVariableNameException](KeyValueValidator.validateKey("n*me"))
}
}
describe("Memorize.validate"){
}
}
| ifreitas/brain | src/test/scala/brain/models/TeachingToCategoryAdapterTest.scala | Scala | apache-2.0 | 22,412 |
package org.ferrit.core.crawler
import akka.actor.{Actor, ActorRef, Props, Terminated}
import akka.actor.OneForOneStrategy
import akka.actor.SupervisorStrategy.Stop
import akka.pattern.ask
import akka.pattern.pipe
import akka.event.Logging
import akka.routing.Listen
import akka.util.Timeout
import scala.concurrent.Future
import scala.concurrent.duration._
import org.ferrit.core.crawler.CrawlWorker.{Run, Started, StartOkay, StartFailed}
import org.ferrit.core.crawler.CrawlWorker.{StopCrawl, Stopped}
import org.ferrit.core.http.HttpClient
import org.ferrit.core.model.CrawlJob
import org.ferrit.core.parser.MultiParser
import org.ferrit.core.robot.{DefaultRobotRulesCache, RobotRulesCacheActor}
import org.ferrit.core.uri.InMemoryFrontier
import org.ferrit.core.uri.InMemoryUriCache
/**
* Manages a collection of running crawler jobs up to a given limit.
*/
class CrawlerManager(
node: String,
userAgent: String,
maxCrawlers: Int,
httpClient: HttpClient,
robotRulesCache: ActorRef
) extends Actor {
import CrawlerManager._
private [crawler] implicit val execContext = context.system.dispatcher
private [crawler] val log = Logging(context.system, getClass)
private [crawler] val askTimeout = new Timeout(1.second)
private [crawler] case class JobEntry(crawler: ActorRef, job: CrawlJob)
private [crawler] var jobs: Map[String, JobEntry] = Map.empty
// Crawlers should not restart if they crash
override val supervisorStrategy = OneForOneStrategy(0, 1.second) {
case _: Exception => Stop
}
override def receive = messagesFromClients orElse messagesFromCrawlers
def messagesFromClients: Receive = {
case JobsQuery() =>
sender ! JobsInfo(jobs.values.map(_.job).toSeq)
case StartJob(config, listeners) =>
if (jobs.size >= maxCrawlers) {
sender ! JobStartFailed(new CrawlRejectException(tooManyCrawlers))
} else if (jobs.exists(pair => pair._2.job.crawlerId == config.id)) {
sender ! JobStartFailed(new CrawlRejectException(crawlerExists))
} else {
val resolvedConfig = config.userAgent match {
case Some(ua) => config
case None => config.copy(userAgent = Some(userAgent))
}
startCrawlJob(resolvedConfig, listeners) pipeTo sender
}
case StopJob(id) =>
val reply = jobs.get(id) match {
case None => JobNotFound
case Some(entry) =>
entry.crawler ! StopCrawl
StopAccepted(Seq(entry.job.jobId))
}
sender ! reply
case StopAllJobs() =>
val ids:Seq[String] = jobs.map({pair =>
val crawler = pair._2.crawler
val job = pair._2.job
crawler ! StopCrawl
job.jobId
}).toSeq
sender ! StopAccepted(ids)
}
def messagesFromCrawlers: Receive = {
case CrawlWorker.Stopped(outcome, job) =>
case Terminated(child) => removeJob(child)
}
/* = = = = = = = = = = = Implementation = = = = = = = = = = = */
def startCrawlJob(config: CrawlConfig, listeners: Seq[ActorRef]):Future[AnyRef] = {
val newJob = CrawlJob.create(config, node)
val crawler = context.actorOf(Props(
classOf[CrawlWorker],
newJob,
config,
new InMemoryFrontier,
new InMemoryUriCache,
httpClient,
robotRulesCache,
MultiParser.default,
new DefaultStopRule
))
context.watch(crawler)
listeners.foreach(l => crawler ! Listen(l))
jobs = jobs + (newJob.jobId -> JobEntry(crawler, newJob))
crawler
.ask(Run)(askTimeout)
.map({
case StartOkay(msg, job) => newJob
case StartFailed(t, config) => JobStartFailed(t)
})
}
def removeJob(child: ActorRef):Unit = {
jobs.find(_._2.crawler == child) match {
case Some(pair) =>
val id = pair._1
jobs = jobs - id
case None =>
}
}
}
object CrawlerManager {
case class StartJob(config: CrawlConfig, crawlListeners: Seq[ActorRef])
case class JobStartFailed(t: Throwable)
case class StopJob(id: String)
case class StopAllJobs()
case class StopAccepted(ids: Seq[String]) // is not a guarantee that job stopped
case object JobNotFound
case class JobsQuery()
case class JobsInfo(jobs: Seq[CrawlJob])
val tooManyCrawlers = "The maximum number of active crawlers is reached"
val crawlerExists = "There is already an active crawler with same crawler configuration"
} | reggoodwin/ferrit | src/main/scala/org/ferrit/core/crawler/CrawlerManager.scala | Scala | mit | 4,465 |
import stainless.lang._
import stainless.annotation._
import stainless.proof._
object LawTypeArgsElim {
abstract class Structure[A] {
def doSomething(x: A, y: A): A
@law
def someLaw(x: A, y: A): Boolean = {
doSomething(x, y) == doSomething(y, x)
}
}
case class BigIntStructure() extends Structure[BigInt] {
override def doSomething(x: BigInt, y: BigInt): BigInt = {
x + y
}
override def someLaw(x: BigInt, y: BigInt): Boolean = {
super.someLaw(x, y) because {
x + y == y + x
}
}
}
def ok1[A](s: Structure[A], a: A, b: A) = {
assert(s.someLaw(a, b)) // valid
assert(s.doSomething(a, b) == s.doSomething(b, a)) // valid
}
case class OptionStructure[A](ev: Structure[A]) extends Structure[Option[A]] {
override def doSomething(x: Option[A], y: Option[A]): Option[A] = {
(x, y) match {
case (None(), None()) => None()
case (_, None()) => None()
case (None(), _) => None()
case (Some(a), Some(b)) => Some(ev.doSomething(a, b))
}
}
override def someLaw(x: Option[A], y: Option[A]): Boolean = {
super.someLaw(x, y) because {
(x, y) match {
case (None(), None()) => true
case (_, None()) => true
case (None(), _) => true
case (Some(a), Some(b)) => ev.someLaw(a, b)
}
}
}
}
def ok2[A](s: Structure[Option[A]], a: Option[A], b: Option[A]) = {
assert(s.someLaw(a, b)) // valid
assert(s.doSomething(a, b) == s.doSomething(b, a)) // valid
}
}
| epfl-lara/stainless | frontends/benchmarks/verification/valid/LawTypeArgsElim.scala | Scala | apache-2.0 | 1,652 |
package com.twitter.finagle.example.kestrel
import com.twitter.conversions.time._
import com.twitter.finagle.builder.ClientBuilder
import com.twitter.finagle.kestrel.protocol.Kestrel
import com.twitter.finagle.kestrel.{ReadHandle, Client}
import com.twitter.util.JavaTimer
import com.twitter.finagle.service.Backoff
import org.jboss.netty.util.CharsetUtil
import java.util.concurrent.atomic.AtomicBoolean
/**
* This example show how to configure a client doing reliable reads
* from multiple kestrel servers.
*
* This shows how to process messages continuously from a kestrel queue.
*/
object KestrelClient {
def main(args: Array[String]) {
// Configure your duration as necessary.
// On a production server, you would typically not use this part and just
// let the client read continuously until a shutdown is requested.
val duration = 10.seconds
println("running for %s".format(duration))
// Add "host:port" pairs as needed
val hosts = Seq("localhost:22133")
val stopped = new AtomicBoolean(false)
val clients: Seq[Client] = hosts map { host =>
Client(ClientBuilder()
.codec(Kestrel())
.hosts(host)
.hostConnectionLimit(1) // process at most 1 item per connection concurrently
.buildFactory())
}
val readHandles: Seq[ReadHandle] = {
val queueName = "queue"
val timer = new JavaTimer(isDaemon = true)
val retryBackoffs = Backoff.const(10.milliseconds)
clients map { _.readReliably(queueName, timer, retryBackoffs) }
}
val readHandle: ReadHandle = ReadHandle.merged(readHandles)
// Attach an async error handler that prints to stderr
readHandle.error foreach { e =>
if (!stopped.get) System.err.println("zomg! got an error " + e)
}
// Attach an async message handler that prints the messages to stdout
readHandle.messages foreach { msg =>
try {
println(msg.bytes.toString(CharsetUtil.UTF_8))
} finally {
msg.ack.sync() // if we don't do this, no more msgs will come to us
}
}
// Let it run for a little while
Thread.sleep(duration.inMillis)
// Without this, we get messages sent to our error handler
stopped.set(true)
println("stopping")
readHandle.close()
clients foreach { _.close() }
println("done")
}
}
| firebase/finagle | finagle-example/src/main/scala/com/twitter/finagle/example/kestrel/KestrelClient.scala | Scala | apache-2.0 | 2,339 |
package edu.arizona.sista.learning
import edu.arizona.sista.struct.Counter
/**
* Trait for ML datums. L indicates the type of the label; F indicates the type of the feature
* User: mihais
* Date: 4/23/13
*/
trait Datum[L, F] {
val label:L
def features:Iterable[F]
def featuresCounter:Counter[F]
override def toString:String = {
val os = new StringBuilder
os.append("LABEL:" + label)
os.append(" FEATURES:")
val c = featuresCounter
val keys = c.keySet
var first = true
for(key <- keys) {
if(! first) os.append(", ")
os.append(key)
os.append(":")
os.append(c.getCount(key))
first = false
}
os.toString()
}
}
/**
* Datum that contains only binary- (or Int) valued features
* @param label
* @param features
* @tparam L
* @tparam F
*/
class BVFDatum[L, F](
val label:L,
val features:Iterable[F]) extends Datum[L, F] {
def featuresCounter:Counter[F] = {
val c = new Counter[F]
for(f <- features) {
c.incrementCount(f)
}
c
}
override def equals(other:Any):Boolean = {
other match {
case that:BVFDatum[L, F] => label == that.label && features == that.features
case _ => false
}
}
override def hashCode = features.hashCode()
}
/**
* Datum that contains real-valued features
* @param label
* @param featuresCounter
* @tparam L
* @tparam F
*/
class RVFDatum[L, F](
val label:L,
val featuresCounter:Counter[F]) extends Datum[L, F] {
def features = featuresCounter.keySet
def getFeatureCount(f:F) = featuresCounter.getCount(f)
override def equals(other:Any):Boolean = {
other match {
case that:RVFDatum[L, F] => label == that.label && featuresCounter == that.featuresCounter
case _ => false
}
}
}
/**
* Datum that contains real-valued features and kernelized representation
* @param label
* @param featuresCounter
* @param kernel
* @tparam L
* @tparam F
*/
class RVFKDatum[L, F](
label:L,
featuresCounter:Counter[F],
val kernel:String) extends RVFDatum[L, F](label, featuresCounter) {
override def equals(other:Any):Boolean = {
other match {
case that:RVFKDatum[L, F] => label == that.label && featuresCounter == that.featuresCounter && kernel == that.kernel
case _ => false
}
}
} | capdevc/processors | src/main/scala/edu/arizona/sista/learning/Datum.scala | Scala | apache-2.0 | 2,299 |
//
// Taranos Cloud Sonification Framework: Service Core
// Copyright (C) 2018 David Hinson, Netrogen Blue LLC ([email protected])
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
package org.taranos.mc.test
import org.scalatest.fixture
import org.taranos.mc.trunk.intraprocess.Patch
import play.api.libs.json._
import org.taranos.common.ServiceCall
import org.taranos.mc.Cell.ResponseMessages.ServiceResult
import org.taranos.mc.{Cell, CellDirector}
class TrunkSpec extends fixture.FlatSpec
{
case class FixtureParam(cellDirector: CellDirectorFixture)
class CellDirectorFixture
{
// Create an actor system for the fixture:
val _actorSystem = akka.actor.ActorSystem("TestActorSystem")
// Create a mailbox for the fixture:
val _inbox = akka.actor.Inbox.create(_actorSystem)
// Create a cell director actor:
val _cellDirectorRef = _actorSystem.actorOf(CellDirector.MakeProps, "TestCellDirector")
private
def StartCellDirector (): Unit =
{
_inbox.send(_cellDirectorRef, CellDirector.RequestMessages.Start)
import scala.concurrent.duration._
val message = _inbox.receive(1.minute)
message match
{
case CellDirector.ResponseMessages.Started =>
case _ => assert(false)
}
}
def CallService (
serviceName: String,
serviceArgs: AnyRef*): org.taranos.common.ServiceResult =
{
def ArgsHelper(args: Seq[AnyRef]): Vector[String] =
{
var result = Vector.empty[String]
for (arg <- args)
{
result :+= (arg match
{
case jsValue: JsValue => Json.stringify(jsValue)
case value: String => value
case _ => "?"
})
}
result
}
_inbox.send(
_cellDirectorRef,
CellDirector.RequestMessages.ServiceCall(ServiceCall(serviceName, ArgsHelper(serviceArgs))))
import scala.concurrent.duration._
val message = _inbox.receive(10.minutes) // Generous timeout to allow for debugging.
message match
{
case serviceResult: ServiceResult =>
serviceResult._serviceResult
case _ =>
assert(false); null
}
}
StartCellDirector()
}
def withFixture(test: OneArgTest) =
{
val fixtureParam = FixtureParam(new CellDirectorFixture)
try
{
withFixture(test.toNoArgTest(fixtureParam)) // "loan" the fixture to the test
}
finally
{}
}
def CountTrunkElements (
f: FixtureParam,
trunkKey: String,
specificElements: String*): (Int, String) =
{
val elements =
if (specificElements.isEmpty)
Vector(
"t",
"si",
"sp",
"ss",
"sk",
"sl",
"st",
"smi",
"smb",
"smo",
"smpe",
"smpo")
else
specificElements
val report = ReportTrunkPlants(f, trunkKey)
val sum = elements.map(element => (report \ trunkKey \ element \ "ec").as[String].toInt).sum
(sum, report.toString)
}
def ReportTrunkPlants (
f: FixtureParam,
trunkKey: String): JsObject =
{
val serviceResult = f.cellDirector.CallService("@ReportTrunkModel", trunkKey, Json.obj("s" -> Json.arr("c")))
(Json.parse(serviceResult._results.head) \ "rmt" \ "rp").as[JsObject]
}
val expectedField1Key = "f1~f"
val expectedTrunk1Key = "t1~t"
val field1Tag = "!f1"
val field1Name = "f1"
val trunk1Tag = "!t1"
val trunk1Name = "t1"
//
// Trunks:
//
"CellDirector" must "pass basic trunk tests" in
{ f =>
// Attempt to destroy trunk (should fail):
val trunkDestructor = Json.obj("m" -> Json.obj(
"k" -> expectedTrunk1Key))
var serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.TrunkUnknown)
// Create trunk:
val trunkConstructor = Json.obj(
"m" -> Json.obj(
"t" -> trunk1Tag,
"n" -> trunk1Name))
serviceResult = f.cellDirector.CallService("CreateTrunks", trunkConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm trunk state:
var trunkReports = (Json.parse(serviceResult._results.head) \ "rt").as[Seq[JsObject]]
var trunkReport = trunkReports.head.as[JsObject]
assert((trunkReport \ "m" \ "k").as[String] == expectedTrunk1Key)
assert((trunkReport \ "m" \ "t").as[String] == trunk1Tag)
// Update trunk:
val trunk1Renamed = trunk1Name + "_renamed"
val trunkUpdate = Json.obj(
"m" -> Json.obj(
"k" -> expectedTrunk1Key,
"n" -> trunk1Renamed))
serviceResult = f.cellDirector.CallService("UpdateTrunks", trunkUpdate)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get trunk report:
var query = Json.obj(
"k" -> Json.arr(expectedTrunk1Key))
serviceResult = f.cellDirector.CallService("ReportTrunks", query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm trunk state:
trunkReports = (Json.parse(serviceResult._results.head) \ "rt").as[Seq[JsObject]]
trunkReport = trunkReports.head.as[JsObject]
assert((trunkReport \ "m" \ "k").as[String] == expectedTrunk1Key)
assert((trunkReport \ "m" \ "n").as[String] == trunk1Renamed)
// Destroy trunk:
assert(CountTrunkElements(f, expectedTrunk1Key, "t")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key, "si")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 2)
serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key, "t")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key, "si")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 0)
// Get trunk report (should be none reporting):
serviceResult = f.cellDirector.CallService("ReportTrunks", query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
trunkReports = (Json.parse(serviceResult._results.head) \ "rt").as[Seq[JsObject]]
assert(trunkReports.isEmpty)
}
//
// Signal Interfaces:
//
it must "pass basic signal interface tests" in
{ f =>
val expectedInterface1Key = "si1~si"
val expectedInterface2Key = "si2~si"
val interface1Name = "si1"
val interface2Name = "si2"
val interface1Tag = "!si1"
val interface2Tag = "!si2"
//
// Test entity creation:
//
// Attempt to destroy any prior trunk (might fail):
val trunkDestructor = Json.obj("m" -> Json.obj(
"k" -> expectedTrunk1Key))
var serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.TrunkUnknown)
// Create trunk:
val trunkConstructor = Json.obj(
"m" -> Json.obj(
"t" -> trunk1Tag,
"n" -> trunk1Name))
serviceResult = f.cellDirector.CallService("CreateTrunks", trunkConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create interfaces:
val interface1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> interface1Tag))
val interface2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> interface2Tag))
serviceResult = f.cellDirector.CallService(
"CreateSignalInterfaces",
expectedTrunk1Key,
interface1Constructor,
interface2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm interface states:
var query = Json.obj(
"k" -> Json.arr(
expectedInterface1Key,
expectedInterface2Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalInterfaces",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
var interfaceReports = (Json.parse(serviceResult._results.head) \ "rsi").as[Seq[JsObject]]
assert(interfaceReports.size == 2)
for (report <- interfaceReports)
(report \ "m" \ "k").as[String] match
{
case `expectedInterface1Key` =>
assert((report \ "m" \ "t").as[String] == interface1Tag)
case `expectedInterface2Key` =>
assert((report \ "m" \ "t").as[String] == interface2Tag)
case _ => assert(false)
}
//
// Test entity updates:
//
// Update interfaces:
val interface1Renamed = interface1Name + "_renamed"
var interface1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedInterface1Key,
"n" -> interface1Renamed))
val interface2Renamed = interface2Name + "_renamed"
var interface2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedInterface2Key,
"n" -> interface2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateSignalInterfaces",
expectedTrunk1Key,
interface1Update,
interface2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm interface states:
query = Json.obj(
"k" -> Json.arr(
expectedInterface1Key,
expectedInterface2Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalInterfaces",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
interfaceReports = (Json.parse(serviceResult._results.head) \ "rsi").as[Seq[JsObject]]
assert(interfaceReports.size == 2)
for (report <- interfaceReports)
(report \ "m" \ "k").as[String] match
{
case `expectedInterface1Key` =>
assert((report \ "m" \ "n").as[String] == interface1Renamed)
case `expectedInterface2Key` =>
assert((report \ "m" \ "n").as[String] == interface2Renamed)
case _ => assert(false)
}
//
// Test entity destruction:
//
// Destroy interfaces:
assert(CountTrunkElements(f, expectedTrunk1Key, "si")._1 == 3)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 4)
val interface1Destructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedInterface1Key))
val interface2Destructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedInterface2Key))
serviceResult = f.cellDirector.CallService(
"DestroySignalInterfaces",
expectedTrunk1Key,
interface1Destructor,
interface2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key, "si")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 2)
// Confirm interface states (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedInterface1Key,
expectedInterface2Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalInterfaces",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
interfaceReports = (Json.parse(serviceResult._results.head) \ "rsi").as[Seq[JsObject]]
assert(interfaceReports.isEmpty)
// Destroy trunk:
serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
}
//
// Signal Ports:
//
it must "pass basic signal port tests" in
{ f =>
val expectedInterface1Key = "si1~si"
val expectedPort1Key = "sp1~sp"
val expectedPort2Key = "sp2~sp"
val expectedSink1Key = "sk1~sk"
val interface1Tag = "!si1"
val port1Tag = "!sp1"
val port2Tag = "!sp2"
val sink1Tag = "!sk1"
//
// Test entity creation:
//
// Attempt to destroy any prior trunk (might fail):
val trunkDestructor = Json.obj("m" -> Json.obj(
"k" -> expectedTrunk1Key))
var serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.TrunkUnknown)
// Create trunk:
val trunkConstructor = Json.obj(
"m" -> Json.obj(
"t" -> trunk1Tag,
"n" -> trunk1Name))
serviceResult = f.cellDirector.CallService("CreateTrunks", trunkConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create interface:
val interface1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> interface1Tag))
serviceResult = f.cellDirector.CallService(
"CreateSignalInterfaces",
expectedTrunk1Key,
interface1Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create destination sinks:
val sinkCConstructor = Json.obj(
"m" -> Json.obj(
"t" -> sink1Tag,
"m" -> "c"))
serviceResult = f.cellDirector.CallService(
"CreateSignalSinks",
expectedTrunk1Key,
sinkCConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create ports:
val port1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> port1Tag,
"m" -> "c"),
"r" -> Json.obj(
"si" -> expectedInterface1Key))
val port2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> port2Tag,
"m" -> "c"),
"r" -> Json.obj(
"si" -> expectedInterface1Key))
serviceResult = f.cellDirector.CallService(
"CreateSignalPorts",
expectedTrunk1Key,
expectedInterface1Key,
port1Constructor,
port2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm port states:
var query = Json.obj(
"k" -> Json.arr(
expectedPort1Key,
expectedPort2Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalPorts",
expectedTrunk1Key,
expectedInterface1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
var portReports = (Json.parse(serviceResult._results.head) \ "rsp").as[Seq[JsObject]]
assert(portReports.size == 2)
for (report <- portReports)
(report \ "m" \ "k").as[String] match
{
case `expectedPort1Key` =>
assert((report \ "m" \ "t").as[String] == port1Tag)
case `expectedPort2Key` =>
assert((report \ "m" \ "t").as[String] == port2Tag)
case _ => assert(false)
}
//
// Test entity updates:
//
// Update ports:
val port1Renamed = port1Tag + "_renamed"
var port1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedPort1Key,
"n" -> port1Renamed))
val port2Renamed = port2Tag + "_renamed"
var port2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedPort2Key,
"n" -> port2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateSignalPorts",
expectedTrunk1Key,
port1Update,
port2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm port states:
query = Json.obj(
"k" -> Json.arr(
expectedPort1Key,
expectedPort2Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalPorts",
expectedTrunk1Key,
expectedInterface1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
portReports = (Json.parse(serviceResult._results.head) \ "rsp").as[Seq[JsObject]]
assert(portReports.size == 2)
for (report <- portReports)
(report \ "m" \ "k").as[String] match
{
case `expectedPort1Key` =>
assert((report \ "m" \ "n").as[String] == port1Renamed)
case `expectedPort2Key` =>
assert((report \ "m" \ "n").as[String] == port2Renamed)
case _ => assert(false)
}
//
// Test entity destruction:
//
// Destroy ports:
assert(CountTrunkElements(f, expectedTrunk1Key, "sp")._1 == 2)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 6)
val port1Destructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedPort1Key))
val port2Destructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedPort2Key))
serviceResult = f.cellDirector.CallService(
"DestroySignalPorts",
expectedTrunk1Key,
port1Destructor,
port2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key, "sp")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 4)
// Confirm port states (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedPort1Key,
expectedPort2Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalPorts",
expectedTrunk1Key,
expectedInterface1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
portReports = (Json.parse(serviceResult._results.head) \ "rsp").as[Seq[JsObject]]
assert(portReports.isEmpty)
// Destroy interface:
val interface1Destructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedInterface1Key))
serviceResult = f.cellDirector.CallService(
"DestroySignalInterfaces",
expectedTrunk1Key,
interface1Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Destroy trunk:
serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
}
//
// Signal Sources:
//
it must "pass basic signal source tests" in
{ f =>
val expectedSource1Key = "ss1~ss"
val source1Name = "ss1"
val source1Tag = "!ss1"
// Attempt to destroy any prior trunk (might fail):
val trunkDestructor = Json.obj("m" -> Json.obj(
"k" -> expectedTrunk1Key))
var serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.TrunkUnknown)
// Create trunk:
val trunkConstructor = Json.obj(
"m" -> Json.obj(
"t" -> trunk1Tag,
"n" -> trunk1Name))
serviceResult = f.cellDirector.CallService("CreateTrunks", trunkConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create sources:
val sourceCConstructor = Json.obj(
"m" -> Json.obj(
"t" -> source1Tag,
"n" -> source1Name))
serviceResult = f.cellDirector.CallService(
"CreateSignalSources",
expectedTrunk1Key,
sourceCConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm source states:
var sourceReports = (Json.parse(serviceResult._results.head) \ "rss").as[Seq[JsObject]]
assert(sourceReports.size == 1)
for (report <- sourceReports)
{
(report \ "m" \ "k").as[String] match
{
case `expectedSource1Key` =>
case _ => assert(false)
}
(report \ "m" \ "n").as[String] match
{
case `source1Name` =>
case _ => assert(false)
}
}
// Update sources:
val sourceCRenamed = source1Name + "_renamed"
val sourceCUpdate = Json.obj(
"m" -> Json.obj(
"k" -> expectedSource1Key,
"n" -> sourceCRenamed))
serviceResult = f.cellDirector.CallService(
"UpdateSignalSources",
expectedTrunk1Key,
sourceCUpdate)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get source reports:
var query = Json.obj(
"k" -> Json.arr(
expectedSource1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalSources",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm source states:
sourceReports = (Json.parse(serviceResult._results.head) \ "rss").as[Seq[JsObject]]
assert(sourceReports.size == 1)
for (report <- sourceReports)
(report \ "m" \ "k").as[String] match
{
case `expectedSource1Key` =>
assert((report \ "m" \ "n").as[String] == sourceCRenamed)
case _ => assert(false)
}
// Destroy sources:
assert(CountTrunkElements(f, expectedTrunk1Key, "ss")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 3)
val sourceCDestructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedSource1Key))
serviceResult = f.cellDirector.CallService(
"DestroySignalSources",
expectedTrunk1Key,
sourceCDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key, "ss")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 2)
// Get source reports:
query = Json.obj(
"k" -> Json.arr(
expectedSource1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalSources",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm source states (should be none reporting):
sourceReports = (Json.parse(serviceResult._results.head) \ "rss").as[Seq[JsObject]]
assert(sourceReports.isEmpty)
// Destroy trunk:
serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
}
//
// Signal Sinks:
//
it must "pass basic signal sink tests" in
{ f =>
val expectedSink1Key = "sk1~sk"
val sink1Name = "sk1"
val sink1Tag = "!sk1"
// Attempt to destroy any prior trunk (might fail):
val trunkDestructor = Json.obj("m" -> Json.obj(
"k" -> expectedTrunk1Key))
var serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.TrunkUnknown)
// Create trunk:
val trunkConstructor = Json.obj(
"m" -> Json.obj(
"t" -> trunk1Tag,
"n" -> trunk1Name))
serviceResult = f.cellDirector.CallService("CreateTrunks", trunkConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create sinks:
val sinkCConstructor = Json.obj(
"m" -> Json.obj(
"t" -> sink1Tag,
"n" -> sink1Name,
"m" -> "c"))
serviceResult = f.cellDirector.CallService(
"CreateSignalSinks",
expectedTrunk1Key,
sinkCConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm sink states:
var sinkReports = (Json.parse(serviceResult._results.head) \ "rsk").as[Seq[JsObject]]
assert(sinkReports.size == 1)
for (report <- sinkReports)
{
(report \ "m" \ "k").as[String] match
{
case `expectedSink1Key` =>
case _ => assert(false)
}
(report \ "m" \ "n").as[String] match
{
case `sink1Name` =>
case _ => assert(false)
}
(report \ "m" \ "m").as[String] match
{
case "c" =>
case "d" =>
case _ => assert(false)
}
}
// Update sinks:
val sinkCRenamed = sink1Name + "_renamed"
val sinkCUpdate = Json.obj(
"m" -> Json.obj(
"k" -> expectedSink1Key,
"n" -> sinkCRenamed))
serviceResult = f.cellDirector.CallService(
"UpdateSignalSinks",
expectedTrunk1Key,
sinkCUpdate)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get sink reports:
var query = Json.obj(
"k" -> Json.arr(
expectedSink1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalSinks",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm sink states:
sinkReports = (Json.parse(serviceResult._results.head) \ "rsk").as[Seq[JsObject]]
assert(sinkReports.size == 1)
for (report <- sinkReports)
(report \ "m" \ "k").as[String] match
{
case `expectedSink1Key` =>
assert((report \ "m" \ "n").as[String] == sinkCRenamed)
case _ => assert(false)
}
// Destroy sinks:
assert(CountTrunkElements(f, expectedTrunk1Key, "sk")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 3)
val sinkCDestructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedSink1Key))
serviceResult = f.cellDirector.CallService(
"DestroySignalSinks",
expectedTrunk1Key,
sinkCDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key, "sk")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 2)
// Get sink reports:
query = Json.obj(
"k" -> Json.arr(
expectedSink1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalSinks",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm sink states (should be none reporting):
sinkReports = (Json.parse(serviceResult._results.head) \ "rsk").as[Seq[JsObject]]
assert(sinkReports.isEmpty)
// Destroy trunk:
serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
}
//
// Signal Links:
//
it must "pass basic signal link tests" in
{ f =>
val expectedLink1Key = "sl1~sl"
val expectedSink1Key = "sk1~sk"
val expectedSource1Key = "ss1~ss"
val link1Name = "sl1"
val link1Tag = "!sl1"
val sink1Name = "sk1"
val sink1Tag = "!sk1"
val source1Name = "ss1"
val source1Tag = "!ss1"
// Attempt to destroy any prior trunk (might fail):
val trunkDestructor = Json.obj("m" -> Json.obj(
"k" -> expectedTrunk1Key))
var serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.TrunkUnknown)
// Create trunk:
val trunkConstructor = Json.obj(
"m" -> Json.obj(
"t" -> trunk1Tag,
"n" -> trunk1Name))
serviceResult = f.cellDirector.CallService("CreateTrunks", trunkConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create sources:
val sourceCConstructor = Json.obj(
"m" -> Json.obj(
"t" -> source1Tag,
"n" -> source1Name))
serviceResult = f.cellDirector.CallService(
"CreateSignalSources",
expectedTrunk1Key,
sourceCConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create sinks:
val sinkCConstructor = Json.obj(
"m" -> Json.obj(
"t" -> sink1Tag,
"n" -> sink1Name,
"m" -> "c"))
serviceResult = f.cellDirector.CallService(
"CreateSignalSinks",
expectedTrunk1Key,
sinkCConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create links:
val linkCConstructor = Json.obj(
"m" -> Json.obj(
"t" -> link1Tag,
"n" -> link1Name,
"m" -> "c"),
"r" -> Json.obj(
"ss" -> expectedSource1Key,
"sk" -> expectedSink1Key))
serviceResult = f.cellDirector.CallService(
"CreateSignalLinks",
expectedTrunk1Key,
linkCConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm link states:
var linkReports = (Json.parse(serviceResult._results.head) \ "rsl").as[Seq[JsObject]]
assert(linkReports.size == 1)
for (linkReport <- linkReports)
{
(linkReport \ "m" \ "k").as[String] match
{
case `expectedLink1Key` =>
case _ => assert(false)
}
(linkReport \ "m" \ "n").as[String] match
{
case `link1Name` =>
case _ => assert(false)
}
var sourceReportsOpt = (linkReport \ "rss").asOpt[Seq[JsObject]]
if (sourceReportsOpt.isDefined)
{
for (sourceReport <- sourceReportsOpt.get)
{
(sourceReport \ "m" \ "k").as[String] match
{
case `expectedSource1Key` =>
case _ => assert(false)
}
}
}
val sinkReports = (linkReport \ "rsk").as[Seq[JsObject]]
assert(sinkReports.size == 1)
for (sinkReport <- sinkReports)
{
(sinkReport \ "m" \ "k").as[String] match
{
case `expectedSink1Key` =>
case _ => assert(false)
}
}
(linkReport \ "m" \ "m").as[String] match
{
case "c" =>
case "d" =>
case _ => assert(false)
}
}
// Update links:
val linkCRenamed = link1Name + "_renamed"
val linkCUpdate = Json.obj(
"m" -> Json.obj(
"k" -> expectedLink1Key,
"n" -> linkCRenamed))
serviceResult = f.cellDirector.CallService(
"UpdateSignalLinks",
expectedTrunk1Key,
linkCUpdate)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get link reports:
var query = Json.obj(
"k" -> Json.arr(
expectedLink1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalLinks",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm link states:
linkReports = (Json.parse(serviceResult._results.head) \ "rsl").as[Seq[JsObject]]
assert(linkReports.size == 1)
for (report <- linkReports)
(report \ "m" \ "k").as[String] match
{
case `expectedLink1Key` =>
assert((report \ "m" \ "n").as[String] == linkCRenamed)
case _ => assert(false)
}
// Destroy links:
assert(CountTrunkElements(f, expectedTrunk1Key, "ss")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key, "sl")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key, "sk")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 5)
val linkCDestructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedLink1Key))
serviceResult = f.cellDirector.CallService(
"DestroySignalLinks",
expectedTrunk1Key,
linkCDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key, "ss")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key, "sl")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key, "sk")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 3)
// Get link reports:
query = Json.obj(
"k" -> Json.arr(
expectedLink1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalLinks",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm link states (should be none reporting):
linkReports = (Json.parse(serviceResult._results.head) \ "rsl").as[Seq[JsObject]]
assert(linkReports.isEmpty)
// Destroy trunk:
serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
}
//
// Signal Taps:
//
it must "pass basic signal tap tests" in
{ f =>
val expectedLink1Key = "sl1~sl"
val expectedSink1Key = "sk1~sk"
val expectedTap1Key = "st1~st"
val expectedTap1SinkKey = "st1.sk~sk"
val expectedTap1SourceKey = "st1.ss~ss"
val link1Name = "sl1"
val link1Tag = "!sl1"
val tap1Name = "st1"
val tap1Tag = "!st1"
val sink1Name = "sk1"
val sink1Tag = "!sk1"
//
// Test entity creation:
//
// Attempt to destroy any prior trunk (might fail):
val trunkDestructor = Json.obj("m" -> Json.obj(
"k" -> expectedTrunk1Key))
var serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.TrunkUnknown)
// Create trunk:
val trunkConstructor = Json.obj(
"m" -> Json.obj(
"t" -> trunk1Tag,
"n" -> trunk1Name))
serviceResult = f.cellDirector.CallService("CreateTrunks", trunkConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create tap:
val tap1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> tap1Tag,
"n" -> tap1Name,
"m" -> "c"))
serviceResult = f.cellDirector.CallService(
"CreateSignalTaps",
expectedTrunk1Key,
tap1Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm tap states:
var tapReports = (Json.parse(serviceResult._results.head) \ "rst").as[Seq[JsObject]]
assert(tapReports.size == 1)
for (tapReport <- tapReports)
{
(tapReport \ "m" \ "k").as[String] match
{
case `expectedTap1Key` =>
case _ => assert(false)
}
(tapReport \ "m" \ "n").as[String] match
{
case `tap1Name` =>
case _ => assert(false)
}
var sourceReportsOpt = (tapReport \ "rss").asOpt[Seq[JsObject]]
if (sourceReportsOpt.isDefined)
{
for (sourceReport <- sourceReportsOpt.get)
{
(sourceReport \ "m" \ "k").as[String] match
{
case `expectedTap1SourceKey` =>
case _ => assert(false)
}
}
}
val sinkReports = (tapReport \ "rsk").as[Seq[JsObject]]
assert(sinkReports.size == 1)
for (sinkReport <- sinkReports)
{
(sinkReport \ "m" \ "k").as[String] match
{
case `expectedTap1SinkKey` =>
case _ => assert(false)
}
}
(tapReport \ "m" \ "m").as[String] match
{
case "c" =>
case "d" =>
case _ => assert(false)
}
}
// Create destination sink:
val sink1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> sink1Tag,
"n" -> sink1Name,
"m" -> "c"))
serviceResult = f.cellDirector.CallService(
"CreateSignalSinks",
expectedTrunk1Key,
sink1Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create link between tap source and destination sink:
val link1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> link1Tag,
"n" -> link1Name,
"m" -> "c"),
"r" -> Json.obj(
"ss" -> expectedTap1SourceKey,
"sk" -> expectedSink1Key))
serviceResult = f.cellDirector.CallService(
"CreateSignalLinks",
expectedTrunk1Key,
link1Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
//
// Test entity updates:
//
// Update tap:
val tap1Renamed = tap1Name + "_renamed"
var tap1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedTap1Key,
"n" -> tap1Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateSignalTaps",
expectedTrunk1Key,
tap1Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm tap states:
var query = Json.obj(
"k" -> Json.arr(
expectedTap1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalTaps",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
tapReports = (Json.parse(serviceResult._results.head) \ "rst").as[Seq[JsObject]]
assert(tapReports.size == 1)
for (report <- tapReports)
(report \ "m" \ "k").as[String] match
{
case `expectedTap1Key` =>
assert((report \ "m" \ "n").as[String] == tap1Renamed)
case _ => assert(false)
}
//
// Test signal propagation:
//
// Initialize tap sink signal:
var sink1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedTap1SinkKey),
"s" -> Json.obj(
"s" -> "0.0"))
serviceResult = f.cellDirector.CallService(
"UpdateSignalSinks",
expectedTrunk1Key,
sink1Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm tap sink state:
query = Json.obj(
"k" -> Json.arr(
expectedTap1Key),
"s" -> Json.arr("cs"))
serviceResult = f.cellDirector.CallService(
"ReportSignalTaps",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
tapReports = (Json.parse(serviceResult._results.head) \ "rst").as[Seq[JsObject]]
assert(tapReports.size == 1)
for (report <- tapReports)
(report \ "m" \ "k").as[String] match
{
case `expectedTap1Key` =>
val sinkReports = (report \ "rsk").as[Seq[JsObject]]
assert(sinkReports.size == 1)
for (sinkReport <- sinkReports)
{
val trapReport = (sinkReport \ "s" \ "t").as[JsObject]
(trapReport \ expectedTap1SinkKey \ "v").as[String] match
{
case "0.000" =>
case _ => assert(false)
}
}
case _ => assert(false)
}
// Confirm destination sink states:
query = Json.obj(
"k" -> Json.arr(
expectedSink1Key),
"s" -> Json.arr("s"))
serviceResult = f.cellDirector.CallService(
"ReportSignalSinks",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
var sinkReports = (Json.parse(serviceResult._results.head) \ "rsk").as[Seq[JsObject]]
assert(sinkReports.size == 1)
for (sinkReport <- sinkReports)
(sinkReport \ "m" \ "k").as[String] match
{
case `expectedSink1Key` =>
val trapReport = (sinkReport \ "s" \ "t").as[JsObject]
(trapReport \ expectedLink1Key \ "v").as[String] match
{
case "0.000" =>
case _ => assert(false)
}
case _ => assert(false)
}
// Update tap sink signal:
sink1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedTap1SinkKey),
"s" -> Json.obj(
"s" -> 1.0.toString))
serviceResult = f.cellDirector.CallService(
"UpdateSignalSinks",
expectedTrunk1Key,
sink1Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm destination sink state:
query = Json.obj(
"k" -> Json.arr(
expectedSink1Key),
"s" -> Json.arr("s"))
serviceResult = f.cellDirector.CallService(
"ReportSignalSinks",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
sinkReports = (Json.parse(serviceResult._results.head) \ "rsk").as[Seq[JsObject]]
assert(sinkReports.size == 1)
for (sinkReport <- sinkReports)
(sinkReport \ "m" \ "k").as[String] match
{
case `expectedSink1Key` =>
val trapReport = (sinkReport \ "s" \ "t").as[JsObject]
(trapReport \ expectedLink1Key \ "v").as[String] match
{
case "1.000" =>
case _ => assert(false)
}
case _ => assert(false)
}
//
// Test entity destruction:
//
// Destroy tap:
assert(CountTrunkElements(f, expectedTrunk1Key, "st")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key, "sk")._1 == 2)
assert(CountTrunkElements(f, expectedTrunk1Key, "sl")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key, "ss")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 7)
val tap2Destructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedTap1Key))
serviceResult = f.cellDirector.CallService(
"DestroySignalTaps",
expectedTrunk1Key,
tap2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key, "st")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key, "sk")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key, "sl")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key, "ss")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 2)
// Get tap report:
query = Json.obj(
"k" -> Json.arr(
expectedTap1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalTaps",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm tap state (should be none reporting):
tapReports = (Json.parse(serviceResult._results.head) \ "rst").as[Seq[JsObject]]
assert(tapReports.isEmpty)
// Destroy trunk:
serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
}
//
// Signal Inputs:
//
it must "pass basic signal input tests" in
{ f =>
val expectedInput1Key = "smi1~smi"
val expectedTap1Key = "smi1.st~st"
val input1Name = "smi1"
val input1Tag = "!smi1"
//
// Test entity creation:
//
// Attempt to destroy any prior trunk (might fail):
val trunkDestructor = Json.obj("m" -> Json.obj(
"k" -> expectedTrunk1Key))
var serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.TrunkUnknown)
// Create trunk:
val trunkConstructor = Json.obj(
"m" -> Json.obj(
"t" -> trunk1Tag,
"n" -> trunk1Name))
serviceResult = f.cellDirector.CallService("CreateTrunks", trunkConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create input:
val inputConstructor = Json.obj(
"m" -> Json.obj(
"t" -> input1Tag,
"n" -> input1Name,
"m" -> "c"))
serviceResult = f.cellDirector.CallService(
"CreateSignalInputs",
expectedTrunk1Key,
inputConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm input state:
var inputReports = (Json.parse(serviceResult._results.head) \ "rsmi").as[Seq[JsObject]]
var inputReport = inputReports.head
(inputReport \ "m" \ "k").as[String] match
{
case `expectedInput1Key` =>
case _ => assert(false)
}
(inputReport \ "m" \ "n").as[String] match
{
case `input1Name` =>
case _ => assert(false)
}
var tapReports = (inputReport \ "rst").as[Seq[JsObject]]
var tapReport = tapReports.head
(tapReport \ "m" \ "k").as[String] match
{
case `expectedTap1Key` =>
case _ => assert(false)
}
(inputReport \ "m" \ "m").as[String] match
{
case "c" =>
case _ => assert(false)
}
//
// Test entity updates:
//
// Update input:
val input1Renamed = input1Name + "_renamed"
var input1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedInput1Key,
"n" -> input1Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateSignalInputs",
expectedTrunk1Key,
input1Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm input state:
var query = Json.obj(
"k" -> Json.arr(
expectedInput1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalInputs",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
inputReports = (Json.parse(serviceResult._results.head) \ "rsmi").as[Seq[JsObject]]
inputReport = inputReports.head
(inputReport \ "m" \ "k").as[String] match
{
case `expectedInput1Key` =>
assert((inputReport \ "m" \ "n").as[String] == input1Renamed)
case _ => assert(false)
}
//
// Test entity destruction:
//
// Destroy inputs:
assert(CountTrunkElements(f, expectedTrunk1Key, "smi")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 6)
val inputDestructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedInput1Key))
serviceResult = f.cellDirector.CallService(
"DestroySignalInputs",
expectedTrunk1Key,
inputDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key, "smi")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 2)
// Get input report:
query = Json.obj(
"k" -> Json.arr(
expectedInput1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalInputs",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm input state (should be none reporting):
inputReports = (Json.parse(serviceResult._results.head) \ "rsmi").as[Seq[JsObject]]
assert(inputReports.isEmpty)
// Destroy trunk:
serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 0)
}
//
// Signal Bridges:
//
it must "pass basic signal bridge tests" in
{ f =>
val expectedBridge1Key = "smb1~smb"
val expectedInput1Key = "smi1~smi"
val expectedTap1Key = "smb1.st~st"
val bridge1Name = "smb1"
val bridge1Tag = "!smb1"
val input1Name = "smi1"
val input1Tag = "!smi1"
//
// Test entity creation:
//
// Attempt to destroy any prior trunk (might fail):
val trunkDestructor = Json.obj("m" -> Json.obj(
"k" -> expectedTrunk1Key))
var serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.TrunkUnknown)
// Create trunk:
val trunkConstructor = Json.obj(
"m" -> Json.obj(
"t" -> trunk1Tag,
"n" -> trunk1Name))
serviceResult = f.cellDirector.CallService("CreateTrunks", trunkConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create input:
val inputConstructor = Json.obj(
"m" -> Json.obj(
"t" -> input1Tag,
"n" -> input1Name,
"m" -> "c"))
serviceResult = f.cellDirector.CallService(
"CreateSignalInputs",
expectedTrunk1Key,
inputConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm input state:
var inputReports = (Json.parse(serviceResult._results.head) \ "rsmi").as[Seq[JsObject]]
var inputReport = inputReports.head
(inputReport \ "m" \ "k").as[String] match
{
case `expectedInput1Key` =>
case _ => assert(false)
}
// Create bridge:
val bridgeConstructor = Json.obj(
"m" -> Json.obj(
"t" -> bridge1Tag,
"n" -> bridge1Name,
"m" -> "c"),
"r" -> Json.obj(
"sm" -> expectedInput1Key))
serviceResult = f.cellDirector.CallService(
"CreateSignalBridges",
expectedTrunk1Key,
bridgeConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm bridge state:
var bridgeReports = (Json.parse(serviceResult._results.head) \ "rsmb").as[Seq[JsObject]]
var bridgeReport = bridgeReports.head
(bridgeReport \ "m" \ "k").as[String] match
{
case `expectedBridge1Key` =>
case _ => assert(false)
}
(bridgeReport \ "m" \ "n").as[String] match
{
case `bridge1Name` =>
case _ => assert(false)
}
var tapReports = (bridgeReport \ "rst").as[Seq[JsObject]]
var tapReport = tapReports.head
(tapReport \ "m" \ "k").as[String] match
{
case `expectedTap1Key` =>
case _ => assert(false)
}
(bridgeReport \ "m" \ "m").as[String] match
{
case "c" =>
case _ => assert(false)
}
//
// Test entity updates:
//
// Update bridge:
val bridge1Renamed = bridge1Name + "_renamed"
var bridge1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedBridge1Key,
"n" -> bridge1Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateSignalBridges",
expectedTrunk1Key,
bridge1Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm bridge state:
var query = Json.obj(
"k" -> Json.arr(
expectedBridge1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalBridges",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
bridgeReports = (Json.parse(serviceResult._results.head) \ "rsmb").as[Seq[JsObject]]
bridgeReport = bridgeReports.head
(bridgeReport \ "m" \ "k").as[String] match
{
case `expectedBridge1Key` =>
assert((bridgeReport \ "m" \ "n").as[String] == bridge1Renamed)
case _ => assert(false)
}
//
// Test signal propagation:
//
// Put input's signal:
val input1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedInput1Key),
"s" -> Json.obj(
"s" -> "1.0"))
serviceResult = f.cellDirector.CallService(
"UpdateSignalInputs",
expectedTrunk1Key,
input1Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm bridge state:
query = Json.obj(
"k" -> Json.arr(
expectedBridge1Key),
"s" -> Json.arr(
"cs"))
serviceResult = f.cellDirector.CallService(
"ReportSignalBridges",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
bridgeReports = (Json.parse(serviceResult._results.head) \ "rsmb").as[Seq[JsObject]]
bridgeReport = bridgeReports.head
(bridgeReport \ "s" \ "s" \ "v").as[String] match
{
case "1.000" =>
case _ => assert(false)
}
//
// Test entity destruction:
//
// Destroy bridges:
assert(CountTrunkElements(f, expectedTrunk1Key, "smb")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 11)
val bridgeDestructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedBridge1Key))
serviceResult = f.cellDirector.CallService(
"DestroySignalBridges",
expectedTrunk1Key,
bridgeDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key, "smb")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 8)
// Get bridge report:
query = Json.obj(
"k" -> Json.arr(
expectedBridge1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalBridges",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm bridge state (should be none reporting):
bridgeReports = (Json.parse(serviceResult._results.head) \ "rsmb").as[Seq[JsObject]]
assert(bridgeReports.isEmpty)
// Destroy trunk:
serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 0)
}
//
// Signal Outputs:
//
it must "pass basic signal output tests" in
{ f =>
val expectedOutput1Key = "smo1~smo"
val expectedInput1Key = "smi1~smi"
val expectedTap1Key = "smo1.st~st"
val output1Name = "smo1"
val output1Tag = "!smo1"
val input1Name = "smi1"
val input1Tag = "!smi1"
//
// Test entity creation:
//
// Attempt to destroy any prior trunk (might fail):
val trunkDestructor = Json.obj("m" -> Json.obj(
"k" -> expectedTrunk1Key))
var serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.TrunkUnknown)
// Create trunk:
val trunkConstructor = Json.obj(
"m" -> Json.obj(
"t" -> trunk1Tag,
"n" -> trunk1Name))
serviceResult = f.cellDirector.CallService("CreateTrunks", trunkConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create input:
val inputConstructor = Json.obj(
"m" -> Json.obj(
"t" -> input1Tag,
"n" -> input1Name,
"m" -> "c"))
serviceResult = f.cellDirector.CallService(
"CreateSignalInputs",
expectedTrunk1Key,
inputConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm input state:
var inputReports = (Json.parse(serviceResult._results.head) \ "rsmi").as[Seq[JsObject]]
var inputReport = inputReports.head
(inputReport \ "m" \ "k").as[String] match
{
case `expectedInput1Key` =>
case _ => assert(false)
}
// Create output:
val outputConstructor = Json.obj(
"m" -> Json.obj(
"t" -> output1Tag,
"n" -> output1Name,
"m" -> "c"),
"r" -> Json.obj(
"sm" -> expectedInput1Key))
serviceResult = f.cellDirector.CallService(
"CreateSignalOutputs",
expectedTrunk1Key,
outputConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm output state:
var outputReports = (Json.parse(serviceResult._results.head) \ "rsmo").as[Seq[JsObject]]
var outputReport = outputReports.head
(outputReport \ "m" \ "k").as[String] match
{
case `expectedOutput1Key` =>
case _ => assert(false)
}
(outputReport \ "m" \ "n").as[String] match
{
case `output1Name` =>
case _ => assert(false)
}
var tapReports = (outputReport \ "rst").as[Seq[JsObject]]
var tapReport = tapReports.head
(tapReport \ "m" \ "k").as[String] match
{
case `expectedTap1Key` =>
case _ => assert(false)
}
(outputReport \ "m" \ "m").as[String] match
{
case "c" =>
case _ => assert(false)
}
//
// Test entity updates:
//
// Update output:
val output1Renamed = output1Name + "_renamed"
var output1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedOutput1Key,
"n" -> output1Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateSignalOutputs",
expectedTrunk1Key,
output1Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm output state:
var query = Json.obj(
"k" -> Json.arr(
expectedOutput1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalOutputs",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
outputReports = (Json.parse(serviceResult._results.head) \ "rsmo").as[Seq[JsObject]]
outputReport = outputReports.head
(outputReport \ "m" \ "k").as[String] match
{
case `expectedOutput1Key` =>
assert((outputReport \ "m" \ "n").as[String] == output1Renamed)
case _ => assert(false)
}
//
// Test signal propagation:
//
// Put input's signal:
val input1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedInput1Key),
"s" -> Json.obj(
"s" -> "1.0"))
serviceResult = f.cellDirector.CallService(
"UpdateSignalInputs",
expectedTrunk1Key,
input1Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm output state:
query = Json.obj(
"k" -> Json.arr(
expectedOutput1Key),
"s" -> Json.arr(
"cs"))
serviceResult = f.cellDirector.CallService(
"ReportSignalOutputs",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
outputReports = (Json.parse(serviceResult._results.head) \ "rsmo").as[Seq[JsObject]]
outputReport = outputReports.head
(outputReport \ "s" \ "s" \ "v").as[String] match
{
case "1.000" =>
case _ => assert(false)
}
//
// Test entity destruction:
//
// Destroy outputs:
assert(CountTrunkElements(f, expectedTrunk1Key, "smo")._1 == 1)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 11)
val outputDestructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedOutput1Key))
serviceResult = f.cellDirector.CallService(
"DestroySignalOutputs",
expectedTrunk1Key,
outputDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
assert(CountTrunkElements(f, expectedTrunk1Key, "smo")._1 == 0)
assert(CountTrunkElements(f, expectedTrunk1Key)._1 == 8)
// Get output report:
query = Json.obj(
"k" -> Json.arr(
expectedOutput1Key))
serviceResult = f.cellDirector.CallService(
"ReportSignalOutputs",
expectedTrunk1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm output state (should be none reporting):
outputReports = (Json.parse(serviceResult._results.head) \ "rsmo").as[Seq[JsObject]]
assert(outputReports.isEmpty)
// Destroy trunk:
serviceResult = f.cellDirector.CallService("DestroyTrunks", trunkDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
}
//
// Fields:
//
it must "pass basic field tests" in
{ f =>
// Attempt to destroy field (should fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm field state:
var fieldReports = (Json.parse(serviceResult._results.head) \ "rf").as[Seq[JsObject]]
var fieldReport = fieldReports.head.as[JsObject]
assert((fieldReport \ "m" \ "k").as[String] == expectedField1Key)
assert((fieldReport \ "m" \ "t").as[String] == field1Tag)
// Update field:
val field1Renamed = field1Tag + "_renamed"
val fieldUpdate = Json.obj(
"m" -> Json.obj(
"k" -> expectedField1Key,
"n" -> field1Renamed))
serviceResult = f.cellDirector.CallService("UpdateFields", fieldUpdate)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get field report:
var query = Json.obj(
"k" -> Json.arr(expectedField1Key))
serviceResult = f.cellDirector.CallService("ReportFields", query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm field state:
fieldReports = (Json.parse(serviceResult._results.head) \ "rf").as[Seq[JsObject]]
fieldReport = fieldReports.head.as[JsObject]
assert((fieldReport \ "m" \ "k").as[String] == expectedField1Key)
assert((fieldReport \ "m" \ "n").as[String] == field1Renamed)
// Destroy field:
serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get field report (should be none reporting):
serviceResult = f.cellDirector.CallService("ReportFields", query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
fieldReports = (Json.parse(serviceResult._results.head) \ "rf").as[Seq[JsObject]]
assert(fieldReports.isEmpty)
}
//
// Field Emitters:
//
it must "pass basic field emitter tests" in
{ f =>
val expectedFieldEmitter1Key = "fe1~fe"
val expectedFieldEmitter2Key = "fe2~fe"
val fieldEmitter1Tag = "!fe1"
val fieldEmitter1Name = "fe1"
val fieldEmitter2Tag = "!fe2"
val fieldEmitter2Name = "fe2"
// Attempt to destroy any prior field (might fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create field emitters:
val fieldEmitter1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> fieldEmitter1Tag,
"n" -> fieldEmitter1Name))
val fieldEmitter2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> fieldEmitter2Tag,
"n" -> fieldEmitter2Name))
serviceResult = f.cellDirector.CallService(
"CreateFieldEmitters",
expectedField1Key,
fieldEmitter1Constructor,
fieldEmitter2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm field emitter states:
var fieldEmitterReports = (Json.parse(serviceResult._results.head) \ "rfe").as[Seq[JsObject]]
assert(fieldEmitterReports.size == 2)
for (report <- fieldEmitterReports)
(report \ "m" \ "k").as[String] match
{
case `expectedFieldEmitter1Key` =>
assert((report \ "m" \ "n").as[String] == fieldEmitter1Name)
case `expectedFieldEmitter2Key` =>
assert((report \ "m" \ "n").as[String] == fieldEmitter2Name)
case _ => assert(false)
}
// Update field emitters:
val fieldEmitter1Renamed = fieldEmitter1Name + "_renamed"
val fieldEmitter2Renamed = fieldEmitter2Name + "_renamed"
val fieldEmitter1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedFieldEmitter1Key,
"n" -> fieldEmitter1Renamed))
val fieldEmitter2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedFieldEmitter2Key,
"n" -> fieldEmitter2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateFieldEmitters",
expectedField1Key,
fieldEmitter1Update,
fieldEmitter2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get field emitter reports:
var query = Json.obj(
"k" -> Json.arr(
expectedFieldEmitter1Key,
expectedFieldEmitter2Key))
serviceResult = f.cellDirector.CallService(
"ReportFieldEmitters",
expectedField1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm field emitter states:
fieldEmitterReports = (Json.parse(serviceResult._results.head) \ "rfe").as[Seq[JsObject]]
assert(fieldEmitterReports.size == 2)
for (report <- fieldEmitterReports)
(report \ "m" \ "k").as[String] match
{
case `expectedFieldEmitter1Key` =>
assert((report \ "m" \ "n").as[String] == fieldEmitter1Renamed)
case `expectedFieldEmitter2Key` =>
assert((report \ "m" \ "n").as[String] == fieldEmitter2Renamed)
case _ => assert(false)
}
// Call field emitters:
val fieldEmitter1Call = Json.obj(
"k" -> expectedFieldEmitter1Key,
"m" -> Json.obj(
"set_channel_ceiling" -> Json.arr("_", "0.75")))
val fieldEmitter2Call = Json.obj(
"k" -> expectedFieldEmitter1Key,
"m" -> Json.obj(
"create_channel" -> Json.arr("1")))
val fieldEmitter3Call = Json.obj(
"k" -> expectedFieldEmitter1Key,
"m" -> Json.obj(
"destroy_channel" -> Json.arr("1")))
val fieldEmitter4Call = Json.obj(
"k" -> expectedFieldEmitter1Key,
"m" -> Json.obj(
"set_channel_poles" -> Json.arr("_", "3")))
// val fieldEmitter5Call = Json.obj(
// "k" -> expectedFieldEmitter1Key,
// "m" -> Json.obj(
// "set_loudness_envelope" -> Json.arr("a000000a")))
// val fieldEmitter6Call = Json.obj(
// "k" -> expectedFieldEmitter1Key,
// "m" -> Json.obj(
// "set_loudness_envelope" -> Json.arr("a112233a")))
// val fieldEmitter7Call = Json.obj(
// "k" -> expectedFieldEmitter1Key,
// "m" -> Json.obj(
// "set_shape_envelope" -> Json.arr("5aa50")))
// val fieldEmitter8Call = Json.obj(
// "k" -> expectedFieldEmitter1Key,
// "m" -> Json.obj(
// "set_shape_envelope" -> Json.arr("a")))
// val fieldEmitter9Call = Json.obj(
// "k" -> expectedFieldEmitter1Key,
// "m" -> Json.obj(
// "set_shape_envelope" -> Json.arr("000000a")))
// val fieldEmitter10Call = Json.obj(
// "k" -> expectedFieldEmitter1Key,
// "m" -> Json.obj(
// "set_shape_envelope" -> Json.arr("0000000a")))
// val fieldEmitter11Call = Json.obj(
// "k" -> expectedFieldEmitter1Key,
// "m" -> Json.obj(
// "set_shape_envelope" -> Json.arr("a000000a")))
// val fieldEmitter12Call = Json.obj(
// "k" -> expectedFieldEmitter1Key,
// "m" -> Json.obj(
// "set_shape_envelope" -> Json.arr("a112233a")))
serviceResult = f.cellDirector.CallService(
"CallFieldEmitters",
expectedField1Key,
fieldEmitter1Call,
fieldEmitter2Call,
fieldEmitter3Call,
fieldEmitter4Call
// fieldEmitter5Call,
// fieldEmitter6Call,
// fieldEmitter7Call,
// fieldEmitter8Call,
// fieldEmitter9Call,
// fieldEmitter10Call,
// fieldEmitter11Call,
// fieldEmitter12Call)
)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get field emitter reports:
query = Json.obj(
"k" -> Json.arr(
expectedFieldEmitter1Key,
expectedFieldEmitter2Key))
serviceResult = f.cellDirector.CallService(
"ReportFieldEmitters",
expectedField1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm field emitter states:
fieldEmitterReports = (Json.parse(serviceResult._results.head) \ "rfe").as[Seq[JsObject]]
assert(fieldEmitterReports.size == 2)
for (report <- fieldEmitterReports)
(report \ "m" \ "k").as[String] match
{
case `expectedFieldEmitter1Key` =>
assert((report \ "m" \ "n").as[String] == fieldEmitter1Renamed)
case `expectedFieldEmitter2Key` =>
assert((report \ "m" \ "n").as[String] == fieldEmitter2Renamed)
case _ => assert(false)
}
// Destroy field emitters:
val fieldEmitter1Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedFieldEmitter1Key))
val fieldEmitter2Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedFieldEmitter2Key))
serviceResult = f.cellDirector.CallService(
"DestroyFieldEmitters",
expectedField1Key,
fieldEmitter1Destructor,
fieldEmitter2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get field emitter reports (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedFieldEmitter1Key,
expectedFieldEmitter2Key))
serviceResult = f.cellDirector.CallService(
"ReportFieldEmitters",
expectedField1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
fieldEmitterReports = (Json.parse(serviceResult._results.head) \ "rfe").as[Seq[JsObject]]
assert(fieldEmitterReports.isEmpty)
}
//
// Field Oscillators:
//
it must "pass basic field oscillator tests" in
{ f =>
val expectedFieldEmitter1Key = "fe1~fe"
val expectedFieldOscillator1Key = "fo1~fo"
val expectedFieldOscillator2Key = "fo2~fo"
val fieldEmitter1Tag = "!fe1"
val fieldEmitter1Name = "fe1"
val fieldOscillator1Tag = "!fo1"
val fieldOscillator1Name = "fo1"
val fieldOscillator2Tag = "!fo2"
val fieldOscillator2Name = "fo2"
// Attempt to destroy any prior field (might fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create field emitter:
val fieldEmitterConstructor = Json.obj(
"m" -> Json.obj(
"t" -> fieldEmitter1Tag,
"n" -> fieldEmitter1Name))
serviceResult = f.cellDirector.CallService(
"CreateFieldEmitters",
expectedField1Key,
fieldEmitterConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create field oscillators:
val fieldOscillator1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> fieldOscillator1Tag,
"n" -> fieldOscillator1Name),
"a" -> Json.obj(
"dc" -> Json.obj(
"tc" -> "0",
"dpo" -> Json.obj(
"w" -> "default.tws"))))
val fieldOscillator2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> fieldOscillator2Tag,
"n" -> fieldOscillator2Name),
"a" -> Json.obj(
"dc" -> Json.obj(
"tc" -> "1",
"dpo" -> Json.obj(
"w" -> "default.tws"))))
serviceResult = f.cellDirector.CallService(
"CreateFieldOscillators",
expectedField1Key,
expectedFieldEmitter1Key,
fieldOscillator1Constructor,
fieldOscillator2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm field oscillator states:
var fieldOscillatorReports = (Json.parse(serviceResult._results.head) \ "rfo").as[Seq[JsObject]]
assert(fieldOscillatorReports.size == 2)
for (report <- fieldOscillatorReports)
(report \ "m" \ "k").as[String] match
{
case `expectedFieldOscillator1Key` =>
assert((report \ "m" \ "n").as[String] == fieldOscillator1Name)
case `expectedFieldOscillator2Key` =>
assert((report \ "m" \ "n").as[String] == fieldOscillator2Name)
case _ => assert(false)
}
// Update field oscillators:
val fieldOscillator1Renamed = fieldOscillator1Name + "_renamed"
val fieldOscillator2Renamed = fieldOscillator2Name + "_renamed"
val fieldOscillator1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedFieldOscillator1Key,
"n" -> fieldOscillator1Renamed))
val fieldOscillator2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedFieldOscillator2Key,
"n" -> fieldOscillator2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateFieldOscillators",
expectedField1Key,
fieldOscillator1Update,
fieldOscillator2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get field oscillator reports:
var query = Json.obj(
"k" -> Json.arr(
expectedFieldOscillator1Key,
expectedFieldOscillator2Key))
serviceResult = f.cellDirector.CallService(
"ReportFieldOscillators", // limited
expectedField1Key,
expectedFieldEmitter1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm field oscillator states:
fieldOscillatorReports = (Json.parse(serviceResult._results.head) \ "rfo").as[Seq[JsObject]]
assert(fieldOscillatorReports.size == 2)
for (report <- fieldOscillatorReports)
(report \ "m" \ "k").as[String] match
{
case `expectedFieldOscillator1Key` =>
assert((report \ "m" \ "n").as[String] == fieldOscillator1Renamed)
case `expectedFieldOscillator2Key` =>
assert((report \ "m" \ "n").as[String] == fieldOscillator2Renamed)
case _ => assert(false)
}
// Call field oscillators:
val fieldOscillator1Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_loudness_poles" -> Json.arr("5aa50")))
val fieldOscillator2Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_loudness_poles" -> Json.arr("a")))
val fieldOscillator3Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_loudness_poles" -> Json.arr("000000a")))
val fieldOscillator4Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_loudness_poles" -> Json.arr("0000000a")))
val fieldOscillator5Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_loudness_poles" -> Json.arr("a000000a")))
val fieldOscillator6Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_loudness_poles" -> Json.arr("a112233a")))
val fieldOscillator7Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_shape_poles" -> Json.arr("5aa50")))
val fieldOscillator8Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_shape_poles" -> Json.arr("a")))
val fieldOscillator9Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_shape_poles" -> Json.arr("000000a")))
val fieldOscillator10Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_shape_poles" -> Json.arr("0000000a")))
val fieldOscillator11Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_shape_poles" -> Json.arr("a000000a")))
val fieldOscillator12Call = Json.obj(
"k" -> expectedFieldOscillator1Key,
"m" -> Json.obj(
"set_shape_poles" -> Json.arr("a112233a")))
serviceResult = f.cellDirector.CallService(
"CallFieldOscillators",
expectedField1Key,
fieldOscillator1Call,
fieldOscillator2Call,
fieldOscillator3Call,
fieldOscillator4Call,
fieldOscillator5Call,
fieldOscillator6Call,
fieldOscillator7Call,
fieldOscillator8Call,
fieldOscillator9Call,
fieldOscillator10Call,
fieldOscillator11Call,
fieldOscillator12Call)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get field oscillator reports:
query = Json.obj(
"k" -> Json.arr(
expectedFieldOscillator1Key,
expectedFieldOscillator2Key))
serviceResult = f.cellDirector.CallService(
"ReportFieldOscillators", // limited
expectedField1Key,
expectedFieldEmitter1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm field oscillator states:
fieldOscillatorReports = (Json.parse(serviceResult._results.head) \ "rfo").as[Seq[JsObject]]
assert(fieldOscillatorReports.size == 2)
for (report <- fieldOscillatorReports)
(report \ "m" \ "k").as[String] match
{
case `expectedFieldOscillator1Key` =>
assert((report \ "m" \ "n").as[String] == fieldOscillator1Renamed)
case `expectedFieldOscillator2Key` =>
assert((report \ "m" \ "n").as[String] == fieldOscillator2Renamed)
case _ => assert(false)
}
// Destroy field oscillators:
val fieldOscillator1Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedFieldOscillator1Key))
val fieldOscillator2Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedFieldOscillator2Key))
serviceResult = f.cellDirector.CallService(
"DestroyFieldOscillators",
expectedField1Key,
fieldOscillator1Destructor,
fieldOscillator2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get field oscillator reports (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedFieldOscillator1Key,
expectedFieldOscillator2Key))
serviceResult = f.cellDirector.CallService(
"ReportFieldOscillators", // unlimited
expectedField1Key,
"~~fe",
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
fieldOscillatorReports = (Json.parse(serviceResult._results.head) \ "rfo").as[Seq[JsObject]]
assert(fieldOscillatorReports.isEmpty)
}
//
// Subjects:
//
it must "pass basic subject tests" in
{ f =>
val subject1Name = "s1"
val subject1Tag = "!s1"
val subject2Name = "s2"
val subject2Tag = "!s2"
val expectedSubject1Key = "s1~s"
val expectedSubject2Key = "s2~s"
// Attempt to destroy any prior field (might fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create subjects:
val subject1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subject1Tag,
"n" -> subject1Name),
"s" -> Json.obj(
"p" -> Json.arr(0.5.toString, 0.5.toString),
"r" -> Json.arr(0.0.toString)))
val subject2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subject2Tag,
"n" -> subject2Name),
"s" -> Json.obj(
"p" -> Json.arr((-0.5).toString, (-0.5).toString),
"r" -> Json.arr(0.0.toString)))
serviceResult = f.cellDirector.CallService(
"CreateSubjects",
expectedField1Key,
subject1Constructor,
subject2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm subject states:
var subjectReports = (Json.parse(serviceResult._results.head) \ "rs").as[Seq[JsObject]]
assert(subjectReports.size == 2)
for (report <- subjectReports)
(report \ "m" \ "k").as[String] match
{
case `expectedSubject1Key` =>
assert((report \ "m" \ "n").as[String] == subject1Name)
case `expectedSubject2Key` =>
assert((report \ "m" \ "n").as[String] == subject2Name)
case _ => assert(false)
}
// Update subjects:
val subject1Renamed = subject1Tag + "_renamed"
val subject2Renamed = subject2Tag + "_renamed"
val subject1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedSubject1Key,
"n" -> subject1Renamed))
val subject2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedSubject2Key,
"n" -> subject2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateSubjects",
expectedField1Key,
subject1Update,
subject2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get subject reports:
var query = Json.obj(
"k" -> Json.arr(
expectedSubject1Key,
expectedSubject2Key))
serviceResult = f.cellDirector.CallService(
"ReportSubjects",
expectedField1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm subject states:
subjectReports = (Json.parse(serviceResult._results.head) \ "rs").as[Seq[JsObject]]
assert(subjectReports.size == 2)
for (report <- subjectReports)
(report \ "m" \ "k").as[String] match
{
case `expectedSubject1Key` =>
assert((report \ "m" \ "n").as[String] == subject1Renamed)
case `expectedSubject2Key` =>
assert((report \ "m" \ "n").as[String] == subject2Renamed)
case _ => assert(false)
}
// Destroy subjects:
val subject1Destructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedSubject1Key))
val subject2Destructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedSubject2Key))
serviceResult = f.cellDirector.CallService(
"DestroySubjects",
expectedField1Key,
subject1Destructor,
subject2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get subject reports (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedSubject1Key,
expectedSubject2Key))
serviceResult = f.cellDirector.CallService(
"ReportSubjects",
expectedField1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
subjectReports = (Json.parse(serviceResult._results.head) \ "rs").as[Seq[JsObject]]
assert(subjectReports.isEmpty)
}
//
// Subject Emitters:
//
it must "pass basic subject emitter tests" in
{ f =>
val subject1Name = "s1"
val subject1Tag = "!s1"
val subjectEmitter1Name = "se1"
val subjectEmitter1Tag = "!se1"
val subjectEmitter2Name = "se2"
val subjectEmitter2Tag = "!se2"
val expectedSubject1Key = "s1~s"
val expectedSubjectEmitter1Key = "se1~se"
val expectedSubjectEmitter2Key = "se2~se"
// Attempt to destroy any prior field (might fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create subject:
val subject1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subject1Tag,
"n" -> subject1Name),
"s" -> Json.obj(
"p" -> Json.arr(0.5.toString, 0.5.toString),
"r" -> Json.arr(0.0.toString)))
serviceResult = f.cellDirector.CallService(
"CreateSubjects",
expectedField1Key,
subject1Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create subject emitters:
val subjectEmitter1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subjectEmitter1Tag,
"n" -> subjectEmitter1Name))
val subjectEmitter2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subjectEmitter2Tag,
"n" -> subjectEmitter2Name))
serviceResult = f.cellDirector.CallService(
"CreateSubjectEmitters",
expectedField1Key,
expectedSubject1Key,
subjectEmitter1Constructor,
subjectEmitter2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm subject emitter states:
var subjectEmitterReports = (Json.parse(serviceResult._results.head) \ "rse").as[Seq[JsObject]]
assert(subjectEmitterReports.size == 2)
for (report <- subjectEmitterReports)
(report \ "m" \ "k").as[String] match
{
case `expectedSubjectEmitter1Key` =>
assert((report \ "m" \ "n").as[String] == subjectEmitter1Name)
case `expectedSubjectEmitter2Key` =>
assert((report \ "m" \ "n").as[String] == subjectEmitter2Name)
case _ => assert(false)
}
// Update subject emitters:
val subjectEmitter1Renamed = subjectEmitter1Name + "_renamed"
val subjectEmitter2Renamed = subjectEmitter2Name + "_renamed"
val subjectEmitter1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedSubjectEmitter1Key,
"n" -> subjectEmitter1Renamed))
val subjectEmitter2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedSubjectEmitter2Key,
"n" -> subjectEmitter2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateSubjectEmitters",
expectedField1Key,
subjectEmitter1Update,
subjectEmitter2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get subject emitter reports:
var query = Json.obj(
"k" -> Json.arr(
expectedSubjectEmitter1Key,
expectedSubjectEmitter2Key),
"s" -> Json.arr("asc"))
serviceResult = f.cellDirector.CallService(
"ReportSubjectEmitters", // limited
expectedField1Key,
expectedSubject1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm subject emitter states:
subjectEmitterReports = (Json.parse(serviceResult._results.head) \ "rse").as[Seq[JsObject]]
assert(subjectEmitterReports.size == 2)
for (report <- subjectEmitterReports)
(report \ "m" \ "k").as[String] match
{
case `expectedSubjectEmitter1Key` =>
assert((report \ "m" \ "n").as[String] == subjectEmitter1Renamed)
case `expectedSubjectEmitter2Key` =>
assert((report \ "m" \ "n").as[String] == subjectEmitter2Renamed)
case _ => assert(false)
}
// Destroy subject emitters:
val subjectEmitter1Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedSubjectEmitter1Key))
val subjectEmitter2Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedSubjectEmitter2Key))
serviceResult = f.cellDirector.CallService(
"DestroySubjectEmitters",
expectedField1Key,
subjectEmitter1Destructor,
subjectEmitter2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get subject emitter reports (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedSubjectEmitter1Key,
expectedSubjectEmitter2Key))
serviceResult = f.cellDirector.CallService(
"ReportSubjectEmitters", // unlimited
expectedField1Key,
"~~s",
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
subjectEmitterReports = (Json.parse(serviceResult._results.head) \ "rse").as[Seq[JsObject]]
assert(subjectEmitterReports.isEmpty)
}
//
// Subject Oscillators:
//
it must "pass basic subject oscillator tests" in
{ f =>
val expectedSubject1Key = "s1~s"
val expectedSubjectEmitter1Key = "se1~se"
val expectedSubjectOscillator1Key = "so1~so"
val expectedSubjectOscillator2Key = "so2~so"
val subject1Tag = "!s1"
val subject1Name = "s1"
val subjectEmitter1Tag = "!se1"
val subjectEmitter1Name = "se1"
val subjectOscillator1Tag = "!so1"
val subjectOscillator1Name = "so1"
val subjectOscillator2Tag = "!so2"
val subjectOscillator2Name = "so2"
// Attempt to destroy any prior field (might fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create subject:
val subject1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subject1Tag,
"n" -> subject1Name),
"s" -> Json.obj(
"p" -> Json.arr(0.5.toString, 0.5.toString),
"r" -> Json.arr(0.0.toString)))
serviceResult = f.cellDirector.CallService(
"CreateSubjects",
expectedField1Key,
subject1Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create subject emitter:
val subjectEmitterConstructor = Json.obj(
"m" -> Json.obj(
"t" -> subjectEmitter1Tag,
"n" -> subjectEmitter1Name))
serviceResult = f.cellDirector.CallService(
"CreateSubjectEmitters",
expectedField1Key,
expectedSubject1Key,
subjectEmitterConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create subject oscillators:
val subjectOscillator1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subjectOscillator1Tag,
"n" -> subjectOscillator1Name),
"a" -> Json.obj(
"dc" -> Json.obj(
"tc" -> "0",
"dpo" -> Json.obj(
"w" -> "default.tws"))))
val subjectOscillator2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subjectOscillator2Tag,
"n" -> subjectOscillator2Name),
"a" -> Json.obj(
"dc" -> Json.obj(
"tc" -> "1",
"dpo" -> Json.obj(
"w" -> "default.tws"))))
serviceResult = f.cellDirector.CallService(
"CreateSubjectOscillators",
expectedField1Key,
expectedSubjectEmitter1Key,
subjectOscillator1Constructor,
subjectOscillator2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm subject oscillator states:
var subjectOscillatorReports = (Json.parse(serviceResult._results.head) \ "rso").as[Seq[JsObject]]
assert(subjectOscillatorReports.size == 2)
for (report <- subjectOscillatorReports)
(report \ "m" \ "k").as[String] match
{
case `expectedSubjectOscillator1Key` =>
assert((report \ "m" \ "n").as[String] == subjectOscillator1Name)
case `expectedSubjectOscillator2Key` =>
assert((report \ "m" \ "n").as[String] == subjectOscillator2Name)
case _ => assert(false)
}
// Update subject oscillators:
val subjectOscillator1Renamed = subjectOscillator1Name + "_renamed"
val subjectOscillator2Renamed = subjectOscillator2Name + "_renamed"
val subjectOscillator1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedSubjectOscillator1Key,
"n" -> subjectOscillator1Renamed))
val subjectOscillator2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedSubjectOscillator2Key,
"n" -> subjectOscillator2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateSubjectOscillators",
expectedField1Key,
subjectOscillator1Update,
subjectOscillator2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get subject oscillator reports:
var query = Json.obj(
"k" -> Json.arr(
expectedSubjectOscillator1Key,
expectedSubjectOscillator2Key))
serviceResult = f.cellDirector.CallService(
"ReportSubjectOscillators", // limited
expectedField1Key,
expectedSubjectEmitter1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm subject oscillator states:
subjectOscillatorReports = (Json.parse(serviceResult._results.head) \ "rso").as[Seq[JsObject]]
assert(subjectOscillatorReports.size == 2)
for (report <- subjectOscillatorReports)
(report \ "m" \ "k").as[String] match
{
case `expectedSubjectOscillator1Key` =>
assert((report \ "m" \ "n").as[String] == subjectOscillator1Renamed)
case `expectedSubjectOscillator2Key` =>
assert((report \ "m" \ "n").as[String] == subjectOscillator2Renamed)
case _ => assert(false)
}
// Destroy subject oscillators:
val subjectOscillator1Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedSubjectOscillator1Key))
val subjectOscillator2Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedSubjectOscillator2Key))
serviceResult = f.cellDirector.CallService(
"DestroySubjectOscillators",
expectedField1Key,
subjectOscillator1Destructor,
subjectOscillator2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get subject oscillator reports (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedSubjectOscillator1Key,
expectedSubjectOscillator2Key))
serviceResult = f.cellDirector.CallService(
"ReportSubjectOscillators", // unlimited
expectedField1Key,
"~~se",
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
subjectOscillatorReports = (Json.parse(serviceResult._results.head) \ "rso").as[Seq[JsObject]]
assert(subjectOscillatorReports.isEmpty)
}
//
// Probes:
//
it must "pass basic probe tests" in
{ f =>
val probe1Name = "p1"
val probe1Tag = "!p1"
val probe2Name = "p2"
val probe2Tag = "!p2"
val expectedProbe1Key = "p1~p"
val expectedProbe2Key = "p2~p"
// Attempt to destroy any prior field (might fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create probes:
val probe1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probe1Tag,
"n" -> probe1Name),
"a" -> Json.obj(
"aa" -> 1.0.toString),
"s" -> Json.obj(
"p" -> Json.arr(0.5.toString, 0.5.toString),
"r" -> Json.arr(0.0.toString)))
val probe2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probe2Tag,
"n" -> probe2Name),
"s" -> Json.obj(
"p" -> Json.arr((-0.5).toString, (-0.5).toString),
"r" -> Json.arr(0.0.toString)))
serviceResult = f.cellDirector.CallService(
"CreateProbes",
expectedField1Key,
probe1Constructor,
probe2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm probe states:
var probeReports = (Json.parse(serviceResult._results.head) \ "rp").as[Seq[JsObject]]
assert(probeReports.size == 2)
for (report <- probeReports)
(report \ "m" \ "k").as[String] match
{
case `expectedProbe1Key` =>
assert((report \ "m" \ "n").as[String] == probe1Name)
case `expectedProbe2Key` =>
assert((report \ "m" \ "n").as[String] == probe2Name)
case _ => assert(false)
}
// Update probes:
val probe1Renamed = probe1Name + "_renamed"
val probe2Renamed = probe2Name + "_renamed"
val probe1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedProbe1Key,
"n" -> probe1Renamed))
val probe2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedProbe2Key,
"n" -> probe2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateProbes",
expectedField1Key,
probe1Update,
probe2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get probe reports:
var query = Json.obj(
"k" -> Json.arr(
expectedProbe1Key,
expectedProbe2Key))
serviceResult = f.cellDirector.CallService(
"ReportProbes",
expectedField1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm probe states:
probeReports = (Json.parse(serviceResult._results.head) \ "rp").as[Seq[JsObject]]
assert(probeReports.size == 2)
for (report <- probeReports)
(report \ "m" \ "k").as[String] match
{
case `expectedProbe1Key` =>
assert((report \ "m" \ "n").as[String] == probe1Renamed)
case `expectedProbe2Key` =>
assert((report \ "m" \ "n").as[String] == probe2Renamed)
case _ => assert(false)
}
// Destroy probes:
val probe1Destructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedProbe1Key))
val probe2Destructor = Json.obj(
"m" -> Json.obj(
"k" -> expectedProbe2Key))
serviceResult = f.cellDirector.CallService(
"DestroyProbes",
expectedField1Key,
probe1Destructor,
probe2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get probe reports (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedProbe1Key,
expectedProbe2Key))
serviceResult = f.cellDirector.CallService(
"ReportProbes",
expectedField1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
probeReports = (Json.parse(serviceResult._results.head) \ "rp").as[Seq[JsObject]]
assert(probeReports.isEmpty)
}
//
// Probe Emitters:
//
it must "pass basic probe emitter tests" in
{ f =>
val probe1Name = "p1"
val probe1Tag = "!p1"
val probeEmitter1Name = "pe1"
val probeEmitter1Tag = "!pe1"
val probeEmitter2Name = "pe2"
val probeEmitter2Tag = "!pe2"
val expectedProbe1Key = "p1~p"
val expectedProbeEmitter1Key = "pe1~pe"
val expectedProbeEmitter2Key = "pe2~pe"
// Attempt to destroy any prior field (might fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create probe:
val probe1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probe1Tag,
"n" -> probe1Name),
"s" -> Json.obj(
"p" -> Json.arr(0.5.toString, 0.5.toString),
"r" -> Json.arr(0.0.toString)))
serviceResult = f.cellDirector.CallService(
"CreateProbes",
expectedField1Key,
probe1Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create probe emitters:
val probeEmitter1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probeEmitter1Tag,
"n" -> probeEmitter1Name))
val probeEmitter2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probeEmitter2Tag,
"n" -> probeEmitter2Name))
serviceResult = f.cellDirector.CallService(
"CreateProbeEmitters",
expectedField1Key,
expectedProbe1Key,
probeEmitter1Constructor,
probeEmitter2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm probe emitter states:
var probeEmitterReports = (Json.parse(serviceResult._results.head) \ "rpe").as[Seq[JsObject]]
assert(probeEmitterReports.size == 2)
for (report <- probeEmitterReports)
(report \ "m" \ "k").as[String] match
{
case `expectedProbeEmitter1Key` =>
assert((report \ "m" \ "n").as[String] == probeEmitter1Name)
case `expectedProbeEmitter2Key` =>
assert((report \ "m" \ "n").as[String] == probeEmitter2Name)
case _ => assert(false)
}
// Update probe emitters:
val probeEmitter1Renamed = probeEmitter1Name + "_renamed"
val probeEmitter2Renamed = probeEmitter2Name + "_renamed"
val probeEmitter1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedProbeEmitter1Key,
"n" -> probeEmitter1Renamed))
val probeEmitter2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedProbeEmitter2Key,
"n" -> probeEmitter2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateProbeEmitters",
expectedField1Key,
probeEmitter1Update,
probeEmitter2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get probe emitter reports:
var query = Json.obj(
"k" -> Json.arr(
expectedProbeEmitter1Key,
expectedProbeEmitter2Key))
serviceResult = f.cellDirector.CallService(
"ReportProbeEmitters", // limited
expectedField1Key,
expectedProbe1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm probe emitter states:
probeEmitterReports = (Json.parse(serviceResult._results.head) \ "rpe").as[Seq[JsObject]]
assert(probeEmitterReports.size == 2)
for (report <- probeEmitterReports)
(report \ "m" \ "k").as[String] match
{
case `expectedProbeEmitter1Key` =>
assert((report \ "m" \ "n").as[String] == probeEmitter1Renamed)
case `expectedProbeEmitter2Key` =>
assert((report \ "m" \ "n").as[String] == probeEmitter2Renamed)
case _ => assert(false)
}
// Destroy probe emitters:
val probeEmitter1Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedProbeEmitter1Key))
val probeEmitter2Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedProbeEmitter2Key))
serviceResult = f.cellDirector.CallService(
"DestroyProbeEmitters",
expectedField1Key,
probeEmitter1Destructor,
probeEmitter2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get probe emitter reports (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedProbeEmitter1Key,
expectedProbeEmitter2Key))
serviceResult = f.cellDirector.CallService(
"ReportProbeEmitters", // unlimited
expectedField1Key,
"~~p",
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
probeEmitterReports = (Json.parse(serviceResult._results.head) \ "rpe").as[Seq[JsObject]]
assert(probeEmitterReports.isEmpty)
}
//
// Probe Oscillators:
//
it must "pass basic probe oscillator tests" in
{ f =>
val expectedProbe1Key = "p1~p"
val expectedProbeEmitter1Key = "pe1~pe"
val expectedProbeOscillator1Key = "po1~po"
val expectedProbeOscillator2Key = "po2~po"
val probe1Tag = "!p1"
val probe1Name = "p1"
val probeEmitter1Tag = "!pe1"
val probeEmitter1Name = "pe1"
val probeOscillator1Tag = "!po1"
val probeOscillator1Name = "po1"
val probeOscillator2Tag = "!po2"
val probeOscillator2Name = "po2"
// Attempt to destroy any prior field (might fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create probe:
val probe1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probe1Tag,
"n" -> probe1Name),
"s" -> Json.obj(
"p" -> Json.arr(0.5.toString, 0.5.toString),
"r" -> Json.arr(0.0.toString)))
serviceResult = f.cellDirector.CallService(
"CreateProbes",
expectedField1Key,
probe1Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create probe emitter:
val probeEmitterConstructor = Json.obj(
"m" -> Json.obj(
"t" -> probeEmitter1Tag,
"n" -> probeEmitter1Name))
serviceResult = f.cellDirector.CallService(
"CreateProbeEmitters",
expectedField1Key,
expectedProbe1Key,
probeEmitterConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create probe oscillators:
val probeOscillator1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probeOscillator1Tag,
"n" -> probeOscillator1Name),
"a" -> Json.obj(
"dc" -> Json.obj(
"tc" -> "0",
"dpo" -> Json.obj(
"w" -> "default.tws"))))
val probeOscillator2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probeOscillator2Tag,
"n" -> probeOscillator2Name),
"a" -> Json.obj(
"dc" -> Json.obj(
"tc" -> "1",
"dpo" -> Json.obj(
"w" -> "default.tws"))))
serviceResult = f.cellDirector.CallService(
"CreateProbeOscillators",
expectedField1Key,
expectedProbeEmitter1Key,
probeOscillator1Constructor,
probeOscillator2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm probe oscillator states:
var probeOscillatorReports = (Json.parse(serviceResult._results.head) \ "rpo").as[Seq[JsObject]]
assert(probeOscillatorReports.size == 2)
for (report <- probeOscillatorReports)
(report \ "m" \ "k").as[String] match
{
case `expectedProbeOscillator1Key` =>
assert((report \ "m" \ "n").as[String] == probeOscillator1Name)
case `expectedProbeOscillator2Key` =>
assert((report \ "m" \ "n").as[String] == probeOscillator2Name)
case _ => assert(false)
}
// Update probe oscillators:
val probeOscillator1Renamed = probeOscillator1Name + "_renamed"
val probeOscillator2Renamed = probeOscillator2Name + "_renamed"
val probeOscillator1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedProbeOscillator1Key,
"n" -> probeOscillator1Renamed))
val probeOscillator2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedProbeOscillator2Key,
"n" -> probeOscillator2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateProbeOscillators",
expectedField1Key,
probeOscillator1Update,
probeOscillator2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get probe oscillator reports:
var query = Json.obj(
"k" -> Json.arr(
expectedProbeOscillator1Key,
expectedProbeOscillator2Key))
serviceResult = f.cellDirector.CallService(
"ReportProbeOscillators", // limited
expectedField1Key,
expectedProbeEmitter1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm probe oscillator states:
probeOscillatorReports = (Json.parse(serviceResult._results.head) \ "rpo").as[Seq[JsObject]]
assert(probeOscillatorReports.size == 2)
for (report <- probeOscillatorReports)
(report \ "m" \ "k").as[String] match
{
case `expectedProbeOscillator1Key` =>
assert((report \ "m" \ "n").as[String] == probeOscillator1Renamed)
case `expectedProbeOscillator2Key` =>
assert((report \ "m" \ "n").as[String] == probeOscillator2Renamed)
case _ => assert(false)
}
// Destroy probe oscillators:
val probeOscillator1Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedProbeOscillator1Key))
val probeOscillator2Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedProbeOscillator2Key))
serviceResult = f.cellDirector.CallService(
"DestroyProbeOscillators",
expectedField1Key,
probeOscillator1Destructor,
probeOscillator2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get probe oscillator reports (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedProbeOscillator1Key,
expectedProbeOscillator2Key))
serviceResult = f.cellDirector.CallService(
"ReportProbeOscillators", // unlimited
expectedField1Key,
"~~pe",
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
probeOscillatorReports = (Json.parse(serviceResult._results.head) \ "rpo").as[Seq[JsObject]]
assert(probeOscillatorReports.isEmpty)
}
//
// Probe Collectors:
//
it must "pass basic probe collector tests" in
{ f =>
val probe1Name = "p1"
val probe1Tag = "!p1"
val probeCollector1Name = "pc1"
val probeCollector1Tag = "!pc1"
val probeCollector2Name = "pc2"
val probeCollector2Tag = "!pc2"
val expectedProbe1Key = "p1~p"
val expectedProbeCollector1Key = "pc1~pc"
val expectedProbeCollector2Key = "pc2~pc"
// Attempt to destroy any prior field (might fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create probe:
val probe1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probe1Tag,
"n" -> probe1Name),
"s" -> Json.obj(
"p" -> Json.arr(0.5.toString, 0.5.toString),
"r" -> Json.arr(0.0.toString)))
serviceResult = f.cellDirector.CallService(
"CreateProbes",
expectedField1Key,
probe1Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create probe collectors:
val probeCollector1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probeCollector1Tag,
"n" -> probeCollector1Name),
"a" -> Json.obj(
"aa" -> 1.0.toString,
"dt" -> 1.0.toString,
"st" -> 1.0.toString))
val probeCollector2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> probeCollector2Tag,
"n" -> probeCollector2Name),
"a" -> Json.obj(
"aa" -> 1.0.toString,
"dt" -> 1.0.toString,
"st" -> 1.0.toString))
serviceResult = f.cellDirector.CallService(
"CreateProbeCollectors",
expectedField1Key,
expectedProbe1Key,
probeCollector1Constructor,
probeCollector2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm probe collector states:
var probeCollectorReports = (Json.parse(serviceResult._results.head) \ "rpc").as[Seq[JsObject]]
assert(probeCollectorReports.size == 2)
for (report <- probeCollectorReports)
(report \ "m" \ "k").as[String] match
{
case `expectedProbeCollector1Key` =>
assert((report \ "m" \ "n").as[String] == probeCollector1Name)
case `expectedProbeCollector2Key` =>
assert((report \ "m" \ "n").as[String] == probeCollector2Name)
case _ => assert(false)
}
// Update probe collectors:
val probeCollector1Renamed = probeCollector1Name + "_renamed"
val probeCollector2Renamed = probeCollector2Name + "_renamed"
val probeCollector1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedProbeCollector1Key,
"n" -> probeCollector1Renamed))
val probeCollector2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedProbeCollector2Key,
"n" -> probeCollector2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateProbeCollectors",
expectedField1Key,
probeCollector1Update,
probeCollector2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get probe collector reports:
var query = Json.obj(
"k" -> Json.arr(
expectedProbeCollector1Key,
expectedProbeCollector2Key))
serviceResult = f.cellDirector.CallService(
"ReportProbeCollectors", // limited
expectedField1Key,
expectedProbe1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm probe collector states:
probeCollectorReports = (Json.parse(serviceResult._results.head) \ "rpc").as[Seq[JsObject]]
assert(probeCollectorReports.size == 2)
for (report <- probeCollectorReports)
(report \ "m" \ "k").as[String] match
{
case `expectedProbeCollector1Key` =>
assert((report \ "m" \ "n").as[String] == probeCollector1Renamed)
case `expectedProbeCollector2Key` =>
assert((report \ "m" \ "n").as[String] == probeCollector2Renamed)
case _ => assert(false)
}
// Destroy probe collectors:
val probeCollector1Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedProbeCollector1Key))
val probeCollector2Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedProbeCollector2Key))
serviceResult = f.cellDirector.CallService(
"DestroyProbeCollectors",
expectedField1Key,
probeCollector1Destructor,
probeCollector2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get probe collector reports (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedProbeCollector1Key,
expectedProbeCollector2Key))
serviceResult = f.cellDirector.CallService(
"ReportProbeCollectors", // unlimited
expectedField1Key,
"~~p",
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
probeCollectorReports = (Json.parse(serviceResult._results.head) \ "rpc").as[Seq[JsObject]]
assert(probeCollectorReports.isEmpty)
}
//
// Emitter Patches:
//
it must "pass basic emitter patch tests" in
{ f =>
val subject1Name = "s1"
val subject1Tag = "!s1"
val subjectEmitter1Name = "se1"
val subjectEmitter1Tag = "!se1"
val subjectEmitter2Name = "se2"
val subjectEmitter2Tag = "!se2"
val expectedSubject1Key = "s1~s"
val expectedSubjectEmitter1Key = "se1~se"
val expectedSubjectEmitter2Key = "se2~se"
// Attempt to destroy any prior field (might fail):
val fieldDestructor = Json.obj(
"m" -> Json.obj("k" -> expectedField1Key),
"s" -> "s")
var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
assert(
serviceResult._status == Cell.ErrorCodes.Ok ||
serviceResult._status == Cell.ErrorCodes.FieldUnknown)
// Create field:
val fieldConstructor = Json.obj(
"m" -> Json.obj(
"t" -> field1Tag,
"n" -> field1Name),
"a" -> Json.obj(
"ac" -> 0.0.toString,
"ar" -> 0.0.toString,
"ad" -> 1000.0.toString,
"g" -> "2S"))
serviceResult = f.cellDirector.CallService("CreateFields", fieldConstructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create subject:
val subject1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subject1Tag,
"n" -> subject1Name),
"s" -> Json.obj(
"p" -> Json.arr(0.5.toString, 0.5.toString),
"r" -> Json.arr(0.0.toString)))
serviceResult = f.cellDirector.CallService(
"CreateSubjects",
expectedField1Key,
subject1Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Create subject emitters:
val subjectEmitter1Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subjectEmitter1Tag,
"n" -> subjectEmitter1Name))
val subjectEmitter2Constructor = Json.obj(
"m" -> Json.obj(
"t" -> subjectEmitter2Tag,
"n" -> subjectEmitter2Name))
serviceResult = f.cellDirector.CallService(
"CreateSubjectEmitters",
expectedField1Key,
expectedSubject1Key,
subjectEmitter1Constructor,
subjectEmitter2Constructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm subject emitter states:
var subjectEmitterReports = (Json.parse(serviceResult._results.head) \ "rse").as[Seq[JsObject]]
assert(subjectEmitterReports.size == 2)
for (report <- subjectEmitterReports)
(report \ "m" \ "k").as[String] match
{
case `expectedSubjectEmitter1Key` =>
assert((report \ "m" \ "n").as[String] == subjectEmitter1Name)
case `expectedSubjectEmitter2Key` =>
assert((report \ "m" \ "n").as[String] == subjectEmitter2Name)
case _ => assert(false)
}
// Update subject emitters:
val subjectEmitter1Renamed = subjectEmitter1Name + "_renamed"
val subjectEmitter2Renamed = subjectEmitter2Name + "_renamed"
val subjectEmitter1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedSubjectEmitter1Key,
"n" -> subjectEmitter1Renamed))
val subjectEmitter2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedSubjectEmitter2Key,
"n" -> subjectEmitter2Renamed))
serviceResult = f.cellDirector.CallService(
"UpdateSubjectEmitters",
expectedField1Key,
subjectEmitter1Update,
subjectEmitter2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get subject emitter reports:
var query = Json.obj(
"k" -> Json.arr(
expectedSubjectEmitter1Key,
expectedSubjectEmitter2Key),
"s" -> Json.arr("asc"))
serviceResult = f.cellDirector.CallService(
"ReportSubjectEmitters", // limited
expectedField1Key,
expectedSubject1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm subject emitter states:
subjectEmitterReports = (Json.parse(serviceResult._results.head) \ "rse").as[Seq[JsObject]]
assert(subjectEmitterReports.size == 2)
for (report <- subjectEmitterReports)
(report \ "m" \ "k").as[String] match
{
case `expectedSubjectEmitter1Key` =>
assert((report \ "m" \ "n").as[String] == subjectEmitter1Renamed)
case `expectedSubjectEmitter2Key` =>
assert((report \ "m" \ "n").as[String] == subjectEmitter2Renamed)
case _ => assert(false)
}
val expectedEmitterPatch1Key = "se1.smpe~smpe"
val expectedEmitterPatch2Key = "se2.smpe~smpe"
// Get emitter patch reports:
query = Json.obj(
"k" -> Json.arr(
expectedEmitterPatch1Key,
expectedEmitterPatch2Key),
"s" -> Json.arr("asc"))
serviceResult = f.cellDirector.CallService(
"ReportEmitterPatches",
expectedField1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm emitter patch states:
var emitterPatchReports = (Json.parse(serviceResult._results.head) \ "rsmpe").as[Seq[JsObject]]
assert(emitterPatchReports.size == 2)
for (report <- emitterPatchReports)
(report \ "m" \ "k").as[String] match
{
case `expectedEmitterPatch1Key` | `expectedEmitterPatch2Key` =>
val channel = (report \ "a" \ "dpe" \ "dc" \ "_").as[JsObject]
assert((channel \ "dpo" \ "w").as[String] == "default.tws")
case _ => assert(false)
}
val emitterPatch1Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedEmitterPatch1Key),
"a" -> Json.obj(
"dpe" -> Json.obj(
"dc" -> Json.obj(
"_" ->Json.obj(
"de" -> Patch.kDefaultPatchEnvelopeDef,
"dpo" -> Json.obj(
"w" -> "test.tws"))))))
val emitterPatch2Update = Json.obj(
"m" -> Json.obj(
"k" -> expectedEmitterPatch2Key),
"a" -> Json.obj(
"dpe" -> Json.obj(
"dc" -> Json.obj(
"_" ->Json.obj(
"de" -> Patch.kDefaultPatchEnvelopeDef,
"dpo" -> Json.obj(
"w" -> "test.tws"))))))
serviceResult = f.cellDirector.CallService(
"UpdateEmitterPatches",
expectedField1Key,
emitterPatch1Update,
emitterPatch2Update)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get emitter patch reports:
query = Json.obj(
"k" -> Json.arr(
expectedEmitterPatch1Key,
expectedEmitterPatch2Key),
"s" -> Json.arr("asc"))
serviceResult = f.cellDirector.CallService(
"ReportEmitterPatches",
expectedField1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Confirm emitter patch states:
emitterPatchReports = (Json.parse(serviceResult._results.head) \ "rsmpe").as[Seq[JsObject]]
assert(emitterPatchReports.size == 2)
for (report <- emitterPatchReports)
(report \ "m" \ "k").as[String] match
{
case `expectedEmitterPatch1Key` | `expectedEmitterPatch2Key` =>
val channel = (report \ "s" \ "c" \ "_").as[JsObject]
assert((channel \ "dpo" \ "w").as[String] == "test.tws")
case _ => assert(false)
}
// Get subject emitter patch report:
query = Json.obj(
"s" -> Json.arr("as"))
serviceResult = f.cellDirector.CallService(
"ReportPatchOfSubjectEmitter",
expectedField1Key,
expectedSubjectEmitter1Key,
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Destroy subject emitters:
val subjectEmitter1Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedSubjectEmitter1Key))
val subjectEmitter2Destructor = Json.obj(
"m" -> Json.obj("k" -> expectedSubjectEmitter2Key))
serviceResult = f.cellDirector.CallService(
"DestroySubjectEmitters",
expectedField1Key,
subjectEmitter1Destructor,
subjectEmitter2Destructor)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
// Get subject emitter reports (should be none reporting):
query = Json.obj(
"k" -> Json.arr(
expectedSubjectEmitter1Key,
expectedSubjectEmitter2Key))
serviceResult = f.cellDirector.CallService(
"ReportSubjectEmitters", // unlimited
expectedField1Key,
"~~s",
query)
assert(serviceResult._status == Cell.ErrorCodes.Ok)
subjectEmitterReports = (Json.parse(serviceResult._results.head) \ "rse").as[Seq[JsObject]]
assert(subjectEmitterReports.isEmpty)
}
// //
// // Final cleanup:
// //
// it must "pass final cleanup tests" in
// { f =>
// // Attempt to destroy prior field (should not fail):
// val fieldDestructor = Json.obj(
// "m" -> Json.obj("k" -> expectedField1Key),
// "s" -> "s")
// var serviceResult = f.cellDirector.CallService("DestroyFields", fieldDestructor)
// assert(serviceResult.status == Cell.ErrorCodes.Ok)
// }
}
| taranos/taranoscsf-core | src/test/scala/org/taranos/mc/test/TrunkSpec.scala | Scala | agpl-3.0 | 157,995 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import common.enums.VatRegStatus
import config.{AuthClientConnector, BaseControllerComponents, FrontendAppConfig}
import models.CurrentProfile
import play.api.mvc.{Action, AnyContent}
import services.{SessionProfile, SessionService, VatRegistrationService}
import views.html.pages.SubmissionInProgress
import javax.inject.{Inject, Singleton}
import scala.concurrent.{ExecutionContext, Future}
@Singleton
class SubmissionInProgressController @Inject()(view: SubmissionInProgress,
val authConnector: AuthClientConnector,
val sessionService: SessionService,
vatRegistrationService: VatRegistrationService)
(implicit appConfig: FrontendAppConfig,
val executionContext: ExecutionContext,
baseControllerComponents: BaseControllerComponents)
extends BaseController with SessionProfile {
val show: Action[AnyContent] = isAuthenticatedWithProfileNoStatusCheck {
implicit request =>
implicit profile =>
Future.successful(Ok(view()))
}
val submit: Action[AnyContent] = isAuthenticatedWithProfileNoStatusCheck {
implicit request =>
implicit profile =>
for {
status <- vatRegistrationService.getStatus(profile.registrationId)
_ <- sessionService.cache[CurrentProfile]("CurrentProfile", profile.copy(vatRegistrationStatus = status))
redirect = status match {
case VatRegStatus.submitted => Redirect(controllers.routes.ApplicationSubmissionController.show)
case VatRegStatus.locked => Redirect(controllers.routes.SubmissionInProgressController.show)
case VatRegStatus.duplicateSubmission => Redirect(controllers.routes.ErrorController.alreadySubmitted)
case VatRegStatus.failed => Redirect(controllers.routes.ErrorController.submissionFailed)
case VatRegStatus.failedRetryable => Redirect(controllers.routes.ErrorController.submissionRetryable)
}
} yield redirect
}
}
| hmrc/vat-registration-frontend | app/controllers/SubmissionInProgressController.scala | Scala | apache-2.0 | 2,819 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.stats.message
import io.gatling.commons.stats.{ KO, OK, Status }
import org.scalacheck.Gen.alphaStr
import io.gatling.BaseSpec
class StatusSpec extends BaseSpec {
"Status.apply" should "return OK when passing 'OK'" in {
Status("OK") shouldBe OK
}
it should "return OK when passing 'KO'" in {
Status("KO") shouldBe KO
}
it should "throw an IllegalArgumentException on any other string" in {
forAll(alphaStr.suchThat(s => s != "OK" && s != "KO")) { string =>
an[IllegalArgumentException] should be thrownBy Status(string)
}
}
}
| wiacekm/gatling | gatling-core/src/test/scala/io/gatling/core/stats/message/StatusSpec.scala | Scala | apache-2.0 | 1,209 |
package com.thetestpeople.trt.jenkins.trigger
import java.net.URI
object JenkinsUrlHelper {
def getServerUrl(jobUrl: URI) = {
val jobUrlString = jobUrl.toString
jobUrlString.lastIndexOf("/job/") match {
case -1 ⇒ throw new RuntimeException(s"Rerun URL is not a Jenkins job: $jobUrlString")
case i ⇒ new URI(jobUrlString.substring(0, i))
}
}
} | thetestpeople/trt | app/com/thetestpeople/trt/jenkins/trigger/JenkinsUrlHelper.scala | Scala | mit | 379 |
package com.anakiou.modbus.msg
import java.io.DataInput
import java.io.DataOutput
import com.anakiou.modbus.Modbus
import com.anakiou.modbus.ModbusCoupler
import com.anakiou.modbus.procimg.IllegalAddressException
import com.anakiou.modbus.procimg.InputRegister
class ReadInputRegistersRequest extends ModbusRequest() {
private var m_Reference: Int = _
private var m_WordCount: Int = _
setFunctionCode(Modbus.READ_INPUT_REGISTERS)
setDataLength(4)
def this(ref: Int, count: Int) {
this()
setFunctionCode(Modbus.READ_INPUT_REGISTERS)
setDataLength(4)
setReference(ref)
setWordCount(count)
}
def createResponse(): ModbusResponse = {
var response: ReadInputRegistersResponse = null
var inpregs: Array[InputRegister] = null
val procimg = ModbusCoupler.getReference.getProcessImage
try {
inpregs = procimg.getInputRegisterRange(this.getReference, this.getWordCount)
} catch {
case iaex: IllegalAddressException => return createExceptionResponse(Modbus.ILLEGAL_ADDRESS_EXCEPTION)
}
response = new ReadInputRegistersResponse(inpregs)
if (!isHeadless) {
response.setTransactionID(this.getTransactionID)
response.setProtocolID(this.getProtocolID)
} else {
response.setHeadless()
}
response.setUnitID(this.getUnitID)
response.setFunctionCode(this.getFunctionCode)
response
}
def setReference(ref: Int) {
m_Reference = ref
}
def getReference(): Int = m_Reference
def setWordCount(count: Int) {
m_WordCount = count
}
def getWordCount(): Int = m_WordCount
def writeData(dout: DataOutput) {
dout.writeShort(m_Reference)
dout.writeShort(m_WordCount)
}
def readData(din: DataInput) {
m_Reference = din.readUnsignedShort()
m_WordCount = din.readUnsignedShort()
}
}
| anakiou/scamod | src/com/anakiou/modbus/msg/ReadInputRegistersRequest.scala | Scala | apache-2.0 | 1,824 |
package org.apress.prospark
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{ Milliseconds, Seconds, StreamingContext }
import org.apache.hadoop.io.{ Text, LongWritable, IntWritable }
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.spark.streaming.dstream.DStream
import org.apache.hadoop.mapred.TextOutputFormat
import org.apache.hadoop.mapreduce.lib.output.{ TextOutputFormat => NewTextOutputFormat }
import org.apache.spark.streaming.dstream.PairDStreamFunctions
import org.apache.log4j.LogManager
import org.json4s._
import org.json4s.native.JsonMethods._
import java.text.SimpleDateFormat
import java.util.Date
import org.apache.spark.HashPartitioner
object RedditKeyValueApp {
def main(args: Array[String]) {
if (args.length != 3) {
System.err.println(
"Usage: RedditKeyValueApp <appname> <input_path> <input_path_popular>")
System.exit(1)
}
val Seq(appName, inputPath, inputPathPopular) = args.toSeq
val LOG = LogManager.getLogger(this.getClass)
val conf = new SparkConf()
.setAppName(appName)
.setJars(SparkContext.jarOfClass(this.getClass).toSeq)
val ssc = new StreamingContext(conf, Seconds(1))
LOG.info("Started at %d".format(ssc.sparkContext.startTime))
val comments = ssc.fileStream[LongWritable, Text, TextInputFormat](inputPath, (f: Path) => true, newFilesOnly = false).map(pair => pair._2.toString)
val popular = ssc.fileStream[LongWritable, Text, TextInputFormat](inputPathPopular, (f: Path) => true, newFilesOnly = false).map(pair => pair._2.toString)
val topAuthors = comments.map(rec => ((parse(rec) \\ "author").values.toString, 1))
.groupByKey()
.map(r => (r._2.sum, r._1))
.transform(rdd => rdd.sortByKey(ascending = false))
val topAuthors2 = comments.map(rec => ((parse(rec) \\ "author").values.toString, 1))
.reduceByKey(_ + _)
.map(r => (r._2, r._1))
.transform(rdd => rdd.sortByKey(ascending = false))
val topAuthorsByAvgContent = comments.map(rec => ((parse(rec) \\ "author").values.toString, (parse(rec) \\ "body").values.toString.split(" ").length))
.combineByKey(
(v) => (v, 1),
(accValue: (Int, Int), v) => (accValue._1 + v, accValue._2 + 1),
(accCombine1: (Int, Int), accCombine2: (Int, Int)) => (accCombine1._1 + accCombine2._1, accCombine1._2 + accCombine2._2),
new HashPartitioner(ssc.sparkContext.defaultParallelism))
.map({ case (k, v) => (k, v._1 / v._2.toFloat) })
.map(r => (r._2, r._1))
.transform(rdd => rdd.sortByKey(ascending = false))
val keyedBySubreddit = comments.map(rec => (((parse(rec)) \\ "subreddit").values.toString, rec))
val keyedBySubreddit2 = popular.map(rec => ({
val t = rec.split(",")
(t(1).split("/")(4), t(0))
}))
val commentsWithIndustry = keyedBySubreddit.join(keyedBySubreddit2)
val keyedBySubredditCo = comments.map(rec => (((parse(rec)) \\ "subreddit").values.toString, rec))
val keyedBySubredditCo2 = popular.map(rec => ({
val t = rec.split(",")
(t(1).split("/")(4), t(0))
}))
val commentsWithIndustryCo = keyedBySubreddit.cogroup(keyedBySubreddit2)
val checkpointPath = "/tmp"
ssc.checkpoint(checkpointPath)
val updateFunc = (values: Seq[Int], state: Option[Int]) => {
val currentCount = values.sum
val previousCount = state.getOrElse(0)
Some(currentCount + previousCount)
}
val keyedBySubredditState = comments.map(rec => (((parse(rec)) \\ "subreddit").values.toString, 1))
val globalCount = keyedBySubredditState.updateStateByKey(updateFunc)
.map(r => (r._2, r._1))
.transform(rdd => rdd.sortByKey(ascending = false))
ssc.start()
ssc.awaitTermination()
}
} | ZubairNabi/prosparkstreaming | Chap3/src/main/scala/org/apress/prospark/L3-DStreamKeyValue.scala | Scala | apache-2.0 | 3,851 |
package io.finch
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{Future => ScalaFuture}
import cats.Id
import cats.effect.IO
import com.twitter.finagle.http.Response
import com.twitter.util.{Future => TwitterFuture}
import org.scalacheck.Arbitrary
import org.scalatestplus.scalacheck.ScalaCheckDrivenPropertyChecks
class MethodSpec extends FinchSpec with ScalaCheckDrivenPropertyChecks {
behavior of "method"
implicit val arbResponse: Arbitrary[Response] =
Arbitrary(genOutput[String].map(_.toResponse[Id, Text.Plain]))
it should "map Output value to endpoint" in {
checkValue((i: String) => get(zero)(Ok(i)))
}
it should "map Response value to endpoint" in {
checkValue((i: Response) => get(zero)(i))
}
it should "map F[Output[A]] value to endpoint" in {
checkValue((i: String) => get(zero)(IO.pure(Ok(i))))
}
it should "map TwitterFuture[Output[A]] value to endpoint" in {
checkValue((i: String) => get(zero)(TwitterFuture.value(Ok(i))))
}
it should "map ScalaFuture[Output[A]] value to endpoint" in {
checkValue((i: String) => get(zero)(ScalaFuture.successful(Ok(i))))
}
it should "map F[Response] value to endpoint" in {
checkValue((i: Response) => get(zero)(IO.pure(Ok(i).toResponse[Id, Text.Plain])))
}
it should "map TwitterFuture[Response] value to endpoint" in {
checkValue((i: Response) => get(zero)(TwitterFuture.value(Ok(i).toResponse[Id, Text.Plain])))
}
it should "map ScalaFuture[Response] value to endpoint" in {
checkValue((i: Response) => get(zero)(ScalaFuture.successful(Ok(i).toResponse[Id, Text.Plain])))
}
it should "map A => Output function to endpoint" in {
checkFunction(get(path[Int]) { i: Int => Ok(i) })
}
it should "map A => Response function to endpoint" in {
checkFunction(get(path[Int]) { i: Int => Ok(i).toResponse[Id, Text.Plain] })
}
it should "map A => F[Output[A]] function to endpoint" in {
checkFunction(get(path[Int]) { i: Int => IO.pure(i).map(Ok) })
}
it should "map A => TwitterFuture[Output[A]] function to endpoint" in {
checkFunction(get(path[Int]) { i: Int => TwitterFuture.value(i).map(Ok) })
}
it should "map A => ScalaFuture[Output[A]] function to endpoint" in {
checkFunction(get(path[Int]) { i: Int => ScalaFuture.successful(i).map(Ok) })
}
it should "map A => F[Response] function to endpoint" in {
checkFunction(get(path[Int]) { i: Int => IO.pure(i).map(Ok(_).toResponse[Id, Text.Plain]) })
}
it should "map A => TwitterFuture[Response] function to endpoint" in {
checkFunction(get(path[Int]) { i: Int => TwitterFuture.value(i).map(Ok(_).toResponse[Id, Text.Plain]) })
}
it should "map A => ScalaFuture[Response] function to endpoint" in {
checkFunction(get(path[Int]) { i: Int => ScalaFuture.successful(i).map(Ok(_).toResponse[Id, Text.Plain]) })
}
it should "map (A, B) => Output function to endpoint" in {
checkFunction2(get(path[Int] :: path[Int])((x: Int, y: Int) => Ok(s"$x$y")))
}
it should "map (A, B) => Response function to endpoint" in {
checkFunction2(get(path[Int] :: path[Int])((x: Int, y: Int) => Ok(s"$x$y").toResponse[Id, Text.Plain]))
}
it should "map (A, B) => F[Output[String]] function to endpoint" in {
checkFunction2(get(path[Int] :: path[Int])((x: Int, y: Int) => IO.pure(Ok(s"$x$y"))))
}
it should "map (A, B) => TwitterFuture[Output[String]] function to endpoint" in {
checkFunction2(get(path[Int] :: path[Int])((x: Int, y: Int) => TwitterFuture.value(Ok(s"$x$y"))))
}
it should "map (A, B) => ScalaFuture[Output[String]] function to endpoint" in {
checkFunction2(get(path[Int] :: path[Int])((x: Int, y: Int) => ScalaFuture.successful(Ok(s"$x$y"))))
}
it should "map (A, B) => F[Response] function to endpoint" in {
checkFunction2(get(path[Int] :: path[Int]) { (x: Int, y: Int) =>
IO.pure(Ok(s"$x$y").toResponse[Id, Text.Plain])
})
}
it should "map (A, B) => TwitterFuture[Response] function to endpoint" in {
checkFunction2(get(path[Int] :: path[Int]) { (x: Int, y: Int) =>
TwitterFuture.value(Ok(s"$x$y").toResponse[Id, Text.Plain])
})
}
it should "map (A, B) => ScalaFuture[Response] function to endpoint" in {
checkFunction2(get(path[Int] :: path[Int]) { (x: Int, y: Int) =>
ScalaFuture.successful(Ok(s"$x$y").toResponse[Id, Text.Plain])
})
}
behavior of "Custom Type Program[_]"
case class Program[A](value: A)
implicit val conv = new ToAsync[Program, IO] {
def apply[A](a: Program[A]): IO[A] = IO(a.value)
}
it should "map Program[Output[_]] value to endpoint" in {
checkValue((i: String) => get(zero)(Program(Ok(i))))
}
it should "map A => Program[Output[_]] function to endpoint" in {
checkFunction(get(path[Int]) { i: Int => Program(Ok(i)) })
}
it should "map (A, B) => Program[Output[_]] function to endpoint" in {
checkFunction2(get(path[Int] :: path[Int]) { (x: Int, y: Int) =>
Program(Ok(s"$x$y"))
})
}
it should "map Program[Response] value to endpoint" in {
checkValue((i: Response) => get(zero)(Program(i)))
}
it should "map A => Program[Response] function to endpoint" in {
checkFunction(get(path[Int]) { i: Int => Program(Ok(i).toResponse[Id, Text.Plain]) })
}
it should "map (A, B) => Program[Response] function to endpoint" in {
checkFunction2(get(path[Int] :: path[Int]) { (x: Int, y: Int) =>
Program(Ok(s"$x$y").toResponse[Id, Text.Plain])
})
}
private def checkValue[A: Arbitrary](f: A => Endpoint[IO, A]): Unit =
forAll { (input: A) =>
val e = f(input)
e(Input.get("/")).awaitValueUnsafe() shouldBe Some(input)
}
private def checkFunction(e: Endpoint[IO, _]): Unit =
forAll { (input: Int) =>
e(Input.get(s"/$input")).awaitValueUnsafe() match {
case Some(r: Response) => r.contentString shouldBe input.toString
case Some(a: Int) => a shouldBe input
case _ => ()
}
}
private def checkFunction2(e: Endpoint[IO, _]): Unit =
forAll { (x: Int, y: Int) =>
e(Input.get(s"/$x/$y")).awaitValueUnsafe() match {
case Some(r: Response) => r.contentString shouldBe s"$x$y"
case Some(a: String) => a shouldBe s"$x$y"
case _ => ()
}
}
}
| finagle/finch | core/src/test/scala/io/finch/MethodSpec.scala | Scala | apache-2.0 | 6,365 |
package gov.uk.dvla.vehicles.keeper.stepdefs
import cucumber.api.java.en.{Then, When, Given}
import gov.uk.dvla.vehicles.keeper.helpers.AcceptanceTestHelper
import org.openqa.selenium.WebDriver
import org.scalatest.selenium.WebBrowser.{click, go, pageTitle, pageSource}
import pages.changekeeper.DateOfSalePage
import pages.changekeeper.NewKeeperChooseYourAddressPage
import pages.changekeeper.NewKeeperEnterAddressManuallyPage
import pages.changekeeper.PrivateKeeperDetailsPage
import pages.changekeeper.VehicleLookupPage
import uk.gov.dvla.vehicles.presentation.common.helpers.webbrowser.WebBrowserDriver
import uk.gov.dvla.vehicles.presentation.common.testhelpers.RandomVrmGenerator
class KeeperToKeeperAddressFieldsSteps(webBrowserDriver: WebBrowserDriver) extends AcceptanceTestHelper {
implicit val webDriver = webBrowserDriver.asInstanceOf[WebDriver]
def goToEnterKeeperAddressManuallyPage() {
go to VehicleLookupPage
VehicleLookupPage.vehicleRegistrationNumber.value = RandomVrmGenerator.uniqueVrm
VehicleLookupPage.documentReferenceNumber.value = "11111111111"
click on VehicleLookupPage.emailInvisible
click on VehicleLookupPage.vehicleSoldToPrivateIndividual
click on VehicleLookupPage.next
pageTitle shouldEqual PrivateKeeperDetailsPage.title withClue trackingId
click on PrivateKeeperDetailsPage.mr
PrivateKeeperDetailsPage.firstNameTextBox.value = "tue"
PrivateKeeperDetailsPage.lastNameTextBox.value = "nny"
PrivateKeeperDetailsPage.postcodeTextBox.value = "qq99qw"
click on PrivateKeeperDetailsPage.emailInvisible
click on PrivateKeeperDetailsPage.next
pageTitle shouldEqual NewKeeperChooseYourAddressPage.title withClue trackingId
}
@Given("^that the user is on the Enter Address page$")
def that_the_user_is_on_the_Enter_Address_page() {
goToEnterKeeperAddressManuallyPage()
}
@When("^the user tries to enter the new keeper address$")
def the_user_tries_to_enter_the_new_keeper_address() {
}
@Then("^the user will have the field labels \\"(.*?)\\" Line two of address with no label Line three of address with no label Town or City with field label \\"(.*?)\\" Postcode with field label \\"(.*?)\\"$")
def the_user_will_have_the_field_labels_Line_two_of_address_with_no_label_Line_three_of_address_with_no_label_Town_or_City_with_field_label_Postcode_with_field_label(a1:String, a2:String, a3:String) {
click on NewKeeperChooseYourAddressPage.manualAddress
pageSource.contains(a1.trim) shouldBe true withClue trackingId
pageSource.contains(a2.trim) shouldBe true withClue trackingId
pageSource.contains(a3.trim) shouldBe true withClue trackingId
}
@Then("^there will be hint text stating \\"(.*?)\\" below the field Building name/number and street$")
def there_will_be_hint_text_stating_below_the_field_Building_name_number_and_street(a1:String) {
click on NewKeeperChooseYourAddressPage.manualAddress
pageSource.contains(a1) shouldBe true withClue trackingId
}
@When("^the user has selected the submit control$")
def the_user_has_selected_the_submit_control() {
click on NewKeeperEnterAddressManuallyPage.next
}
@Given("^the address is not blank and has a valid format$")
def the_address_is_not_blank_and_has_a_valid_format() {
goToEnterKeeperAddressManuallyPage()
click on NewKeeperChooseYourAddressPage.manualAddress
NewKeeperEnterAddressManuallyPage.addressBuildingNameOrNumber.value = "1 first lane"
NewKeeperEnterAddressManuallyPage.addressPostTown.value = "hghjg"
NewKeeperEnterAddressManuallyPage.addressPostcode.value = "sa11aa"
}
@Then("^there is no address error message is displayed \\"(.*?)\\"$")
def there_is_no_address_error_message_is_displayed(f:String) {
}
@Then("^the trader details are retained$")
def the_trader_details_are_retained() {
pageTitle shouldEqual DateOfSalePage.title withClue trackingId
}
@Given("^the data in Line one of the address has less than (\\\\d+) characters$")
def the_data_in_Line_one_of_the_address_has_less_than_characters(d:Int) {
goToEnterKeeperAddressManuallyPage()
click on NewKeeperChooseYourAddressPage.manualAddress
NewKeeperEnterAddressManuallyPage.addressBuildingNameOrNumber.value = "1G"
NewKeeperEnterAddressManuallyPage.addressPostTown.value = "hghjg"
}
@Then("^an error message is displayed \\"(.*?)\\"$")
def an_error_message_is_displayed(errMsgForAddress:String) {
NewKeeperEnterAddressManuallyPage.errorTextForFields(errMsgForAddress) shouldBe true withClue trackingId
}
@Given("^the town or city is null OR the town or city has less than (\\\\d+) characters$")
def the_town_or_city_is_null_OR_the_town_or_city_has_less_than_characters(h:Int) {
goToEnterKeeperAddressManuallyPage()
click on NewKeeperChooseYourAddressPage.manualAddress
NewKeeperEnterAddressManuallyPage.addressPostTown.value = "df"
}
@Then("^there is a error message displayed \\"(.*?)\\"$")
def there_is_a_error_message_displayed(errMsgForTown:String) {
NewKeeperEnterAddressManuallyPage.errorTextForFields(errMsgForTown) shouldBe true withClue trackingId
}
@Given("^the user has entered a postcode on either the private or business keeper page$")
def the_user_has_entered_a_postcode_on_either_the_private_or_business_keeper_page() {
goToEnterKeeperAddressManuallyPage()
click on NewKeeperChooseYourAddressPage.manualAddress
NewKeeperEnterAddressManuallyPage.addressPostTown.value = "df"
}
@Given("^the user is on the manual address page$")
def the_user_is_on_the_manual_address_page() {
goToEnterKeeperAddressManuallyPage()
click on NewKeeperChooseYourAddressPage.manualAddress
NewKeeperEnterAddressManuallyPage.addressBuildingNameOrNumber.value = "1Gffhf"
NewKeeperEnterAddressManuallyPage.addressPostTown.value = "hghjg"
NewKeeperEnterAddressManuallyPage.addressPostcode.value = "sa11aa"
}
@Then("^the user is taken to the Date of Sale page$")
def the_user_is_taken_to_the_date_of_sale_page() {
pageTitle shouldEqual DateOfSalePage.title withClue trackingId
}
@When("^the user has selected the Back control$")
def the_user_has_selected_the_Back_control() {
goToEnterKeeperAddressManuallyPage()
click on NewKeeperChooseYourAddressPage.manualAddress
click on NewKeeperEnterAddressManuallyPage.back
}
@Then("^the user is taken to the previous Address not found page$")
def the_user_is_taken_to_the_previous_Address_not_found_page() {
pageTitle shouldEqual NewKeeperChooseYourAddressPage.title withClue trackingId
pageSource should include("No address found for that postcode") withClue trackingId
}
}
| dvla/vehicles-change-keeper-online | acceptance-tests/src/test/scala/gov/uk/dvla/vehicles/keeper/stepdefs/KeeperToKeeperAddressFieldsSteps.scala | Scala | mit | 6,678 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.codegen.calls
import org.apache.flink.table.planner.codegen.CodeGenUtils.{BINARY_STRING, qualifyMethod}
import org.apache.flink.table.planner.codegen.GenerateUtils.{generateCallIfArgsNotNull, generateCallIfArgsNullable}
import org.apache.flink.table.planner.codegen.{CodeGeneratorContext, GeneratedExpression}
import org.apache.flink.table.types.logical.LogicalType
import java.lang.reflect.Method
import java.util.TimeZone
class MethodCallGen(method: Method, argsNullable: Boolean = false, wrapTryCatch: Boolean = false)
extends CallGenerator {
override def generate(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression],
returnType: LogicalType): GeneratedExpression = {
if (argsNullable) {
generateCallIfArgsNullable(
ctx, returnType, operands, !method.getReturnType.isPrimitive, wrapTryCatch) {
originalTerms => convertResult(ctx, originalTerms)
}
} else {
generateCallIfArgsNotNull(
ctx, returnType, operands, !method.getReturnType.isPrimitive, wrapTryCatch) {
originalTerms => convertResult(ctx, originalTerms)
}
}
}
private def convertResult(ctx: CodeGeneratorContext, originalTerms: Seq[String]): String = {
val terms = originalTerms.zip(method.getParameterTypes).map { case (term, clazz) =>
// convert the StringData parameter to String if the method parameter accept String
if (clazz == classOf[String]) {
s"$term.toString()"
} else {
term
}
}
// generate method invoke code and adapt when it's a time zone related function
val call = if (terms.length + 1 == method.getParameterCount &&
method.getParameterTypes()(terms.length) == classOf[TimeZone]) {
// insert the zoneID parameters for timestamp functions
val timeZone = ctx.addReusableSessionTimeZone()
s"""
|${qualifyMethod(method)}(${terms.mkString(", ")}, $timeZone)
""".stripMargin
} else {
s"""
|${qualifyMethod(method)}(${terms.mkString(", ")})
""".stripMargin
}
// convert String to StringData if the return type is String
if (method.getReturnType == classOf[String]) {
s"$BINARY_STRING.fromString($call)"
} else {
call
}
}
}
| xccui/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/codegen/calls/MethodCallGen.scala | Scala | apache-2.0 | 3,116 |
/*
* Copyright 2016 rdbc contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rdbc.tck
import akka.actor.ActorSystem
import akka.stream.Materializer
import akka.stream.scaladsl.{Sink, Source}
import io.rdbc.sapi._
import io.rdbc.sapi.exceptions.InvalidQueryException
import io.rdbc.tck.util.Subscribers
import scala.concurrent.Future
trait SyntaxErrorSpec
extends RdbcSpec
with TableSpec {
protected def arbitraryDataType: String
protected implicit def system: ActorSystem
protected implicit def materializer: Materializer
"Error should be returned when query is invalid when" - {
stmtTest("Select", (c, t) => c.statement(sql"select * should_be_from #$t"))
stmtTest("Insert", (c, t) => c.statement(sql"insert should_be_into #$t values (1)"))
stmtTest("Returning insert", (c, t) =>
c.statement(sql"insert should_be_into #$t values (1)", StatementOptions.ReturnGenKeys)
)
stmtTest("Delete", (c, t) => c.statement(sql"delete should_be_from #$t"))
stmtTest("Update", (c, t) => c.statement(sql"update #$t should_be_set col = null"))
stmtTest("DDL", (c, t) => c.statement(sql"alter should_be_table #$t drop column col"))
}
"Streaming arguments should" - {
"fail with an InvalidQueryException" - {
"when statement is incorrect syntactically" in { c =>
withTable(c, s"col $arbitraryDataType") { tbl =>
val stmt = c.statement(s"insert should_be_into #$tbl values (:x)")
val src = Source(Vector(Vector(1), Vector(2))).runWith(Sink.asPublisher(fanout = false))
assertInvalidQueryThrown {
stmt.streamArgsByIdx(src).get
}
}
}
}
}
private def stmtTest(stmtType: String, stmt: (Connection, String) => ExecutableStatement): Unit = {
s"executing a $stmtType for" - {
executedFor("nothing", _.execute())
executedFor("set", _.executeForSet())
executedFor("value", _.executeForValue(_.int(1)))
executedFor("first row", _.executeForFirstRow())
executedFor("generated key", _.executeForKey[String])
executedFor("stream", stmt => {
val rs = stmt.stream()
val subscriber = Subscribers.eager()
rs.subscribe(subscriber)
subscriber.rows
})
def executedFor[A](executorName: String, executor: ExecutableStatement => Future[A]): Unit = {
executorName in { c =>
withTable(c, s"col $arbitraryDataType") { tbl =>
assertInvalidQueryThrown {
executor(stmt(c, tbl)).get
}
}
}
}
}
}
private def assertInvalidQueryThrown(body: => Any): Unit = {
assertThrows[InvalidQueryException](body)
}
}
| rdbc-io/rdbc | rdbc-tck/src/main/scala/io/rdbc/tck/SyntaxErrorSpec.scala | Scala | apache-2.0 | 3,222 |
package cn.commercial.model
import cn.changhong.orm.Tables.Tables.{AccessTokenRow, UserRow}
import cn.changhong.finagle.http.MyAuth2Server._
import cn.changhong.orm.Tables.Tables._
import scala.slick.
/**
* Created by yangguo on 14-11-3.
*/
object AuthDao{
def login(username:String,password:String,clientId:String,accessToken:Option[String]): Option[AccessTokenRow]={
DB.withTransaction{ implicit session=>
Usertt.filter(u=>u.username === username)
}
}
}
| guoyang2011/myfinagle | Server/src/main/scala/cn/commercial/model/Dao.scala | Scala | apache-2.0 | 476 |
/*
* Copyright 2015 Foundational Development
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package pro.foundev.benchmarks.spark_throughput.launchers
import org.apache.spark.SparkContext
import org.apache.spark.storage.StorageLevel
import pro.foundev.benchmarks.spark_throughput.Result
class CacheBenchmarkLauncher(sc: SparkContext, tableSuffix: String, st: StorageLevel )
extends BenchmarkLauncher(sc, tableSuffix) {
/**
* Spark code benchmark
* @return should be result of benchmark run
*/
override def all(): Seq[Result] = {
val cachedRDD = cassandraRDD.map(x=>x.getLong(1)).persist(st)
timer.profile(()=>cachedRDD.max())
val firstResult = new Result("firstRun " + st.description, timer.getMillis(), 0, tableSuffix)
timer.profile(()=>cachedRDD.max())
val secondResult = new Result("secondRun " + st.description, timer.getMillis(), 0, tableSuffix)
cachedRDD.unpersist(true)
Seq(firstResult, secondResult)
}
/**
* Spark Sql code benchmark
* @return should be result of benchmark run
*/
override def sqlAll(): Seq[Result] = ???
}
| rssvihla/datastax_work | spark_commons/benchmarks/spark_throughput/src/main/scala/pro/foundev/benchmarks/spark_throughput/launchers/CacheBenchmarkLauncher.scala | Scala | apache-2.0 | 1,609 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming.continuous
import scala.collection.mutable
import org.apache.spark.SparkEnv
import org.apache.spark.internal.Logging
import org.apache.spark.rpc.{RpcCallContext, RpcEndpointRef, RpcEnv, ThreadSafeRpcEndpoint}
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.sources.v2.reader.streaming.{ContinuousReadSupport, PartitionOffset}
import org.apache.spark.sql.sources.v2.writer.WriterCommitMessage
import org.apache.spark.sql.sources.v2.writer.streaming.StreamingWriteSupport
import org.apache.spark.util.RpcUtils
private[continuous] sealed trait EpochCoordinatorMessage extends Serializable
// Driver epoch trigger message
/**
* Atomically increment the current epoch and get the new value.
*/
private[sql] case object IncrementAndGetEpoch extends EpochCoordinatorMessage
/**
* The RpcEndpoint stop() will wait to clear out the message queue before terminating the
* object. This can lead to a race condition where the query restarts at epoch n, a new
* EpochCoordinator starts at epoch n, and then the old epoch coordinator commits epoch n + 1.
* The framework doesn't provide a handle to wait on the message queue, so we use a synchronous
* message to stop any writes to the ContinuousExecution object.
*/
private[sql] case object StopContinuousExecutionWrites extends EpochCoordinatorMessage
// Init messages
/**
* Set the reader and writer partition counts. Tasks may not be started until the coordinator
* has acknowledged these messages.
*/
private[sql] case class SetReaderPartitions(numPartitions: Int) extends EpochCoordinatorMessage
case class SetWriterPartitions(numPartitions: Int) extends EpochCoordinatorMessage
// Partition task messages
/**
* Get the current epoch.
*/
private[sql] case object GetCurrentEpoch extends EpochCoordinatorMessage
/**
* Commit a partition at the specified epoch with the given message.
*/
private[sql] case class CommitPartitionEpoch(
partitionId: Int,
epoch: Long,
message: WriterCommitMessage) extends EpochCoordinatorMessage
/**
* Report that a partition is ending the specified epoch at the specified offset.
*/
private[sql] case class ReportPartitionOffset(
partitionId: Int,
epoch: Long,
offset: PartitionOffset) extends EpochCoordinatorMessage
/** Helper object used to create reference to [[EpochCoordinator]]. */
private[sql] object EpochCoordinatorRef extends Logging {
private def endpointName(id: String) = s"EpochCoordinator-$id"
/**
* Create a reference to a new [[EpochCoordinator]].
*/
def create(
writeSupport: StreamingWriteSupport,
readSupport: ContinuousReadSupport,
query: ContinuousExecution,
epochCoordinatorId: String,
startEpoch: Long,
session: SparkSession,
env: SparkEnv): RpcEndpointRef = synchronized {
val coordinator = new EpochCoordinator(
writeSupport, readSupport, query, startEpoch, session, env.rpcEnv)
val ref = env.rpcEnv.setupEndpoint(endpointName(epochCoordinatorId), coordinator)
logInfo("Registered EpochCoordinator endpoint")
ref
}
def get(id: String, env: SparkEnv): RpcEndpointRef = synchronized {
val rpcEndpointRef = RpcUtils.makeDriverRef(endpointName(id), env.conf, env.rpcEnv)
logDebug("Retrieved existing EpochCoordinator endpoint")
rpcEndpointRef
}
}
/**
* Handles three major epoch coordination tasks for continuous processing:
*
* * Maintains a local epoch counter (the "driver epoch"), incremented by IncrementAndGetEpoch
* and pollable from executors by GetCurrentEpoch. Note that this epoch is *not* immediately
* reflected anywhere in ContinuousExecution.
* * Collates ReportPartitionOffset messages, and forwards to ContinuousExecution when all
* readers have ended a given epoch.
* * Collates CommitPartitionEpoch messages, and forwards to ContinuousExecution when all readers
* have both committed and reported an end offset for a given epoch.
*/
private[continuous] class EpochCoordinator(
writeSupport: StreamingWriteSupport,
readSupport: ContinuousReadSupport,
query: ContinuousExecution,
startEpoch: Long,
session: SparkSession,
override val rpcEnv: RpcEnv)
extends ThreadSafeRpcEndpoint with Logging {
private var queryWritesStopped: Boolean = false
private var numReaderPartitions: Int = _
private var numWriterPartitions: Int = _
private var currentDriverEpoch = startEpoch
// (epoch, partition) -> message
private val partitionCommits =
mutable.Map[(Long, Int), WriterCommitMessage]()
// (epoch, partition) -> offset
private val partitionOffsets =
mutable.Map[(Long, Int), PartitionOffset]()
private var lastCommittedEpoch = startEpoch - 1
// Remembers epochs that have to wait for previous epochs to be committed first.
private val epochsWaitingToBeCommitted = mutable.HashSet.empty[Long]
private def resolveCommitsAtEpoch(epoch: Long) = {
val thisEpochCommits = findPartitionCommitsForEpoch(epoch)
val nextEpochOffsets =
partitionOffsets.collect { case ((e, _), o) if e == epoch => o }
if (thisEpochCommits.size == numWriterPartitions &&
nextEpochOffsets.size == numReaderPartitions) {
// Check that last committed epoch is the previous one for sequencing of committed epochs.
// If not, add the epoch being currently processed to epochs waiting to be committed,
// otherwise commit it.
if (lastCommittedEpoch != epoch - 1) {
logDebug(s"Epoch $epoch has received commits from all partitions " +
s"and is waiting for epoch ${epoch - 1} to be committed first.")
epochsWaitingToBeCommitted.add(epoch)
} else {
commitEpoch(epoch, thisEpochCommits)
lastCommittedEpoch = epoch
// Commit subsequent epochs that are waiting to be committed.
var nextEpoch = lastCommittedEpoch + 1
while (epochsWaitingToBeCommitted.contains(nextEpoch)) {
val nextEpochCommits = findPartitionCommitsForEpoch(nextEpoch)
commitEpoch(nextEpoch, nextEpochCommits)
epochsWaitingToBeCommitted.remove(nextEpoch)
lastCommittedEpoch = nextEpoch
nextEpoch += 1
}
// Cleanup state from before last committed epoch,
// now that we know all partitions are forever past it.
for (k <- partitionCommits.keys.filter { case (e, _) => e < lastCommittedEpoch }) {
partitionCommits.remove(k)
}
for (k <- partitionOffsets.keys.filter { case (e, _) => e < lastCommittedEpoch }) {
partitionOffsets.remove(k)
}
}
}
}
/**
* Collect per-partition commits for an epoch.
*/
private def findPartitionCommitsForEpoch(epoch: Long): Iterable[WriterCommitMessage] = {
partitionCommits.collect { case ((e, _), msg) if e == epoch => msg }
}
/**
* Commit epoch to the offset log.
*/
private def commitEpoch(epoch: Long, messages: Iterable[WriterCommitMessage]): Unit = {
logDebug(s"Epoch $epoch has received commits from all partitions " +
s"and is ready to be committed. Committing epoch $epoch.")
// Sequencing is important here. We must commit to the writer before recording the commit
// in the query, or we will end up dropping the commit if we restart in the middle.
writeSupport.commit(epoch, messages.toArray)
query.commit(epoch)
}
override def receive: PartialFunction[Any, Unit] = {
// If we just drop these messages, we won't do any writes to the query. The lame duck tasks
// won't shed errors or anything.
case _ if queryWritesStopped => ()
case CommitPartitionEpoch(partitionId, epoch, message) =>
logDebug(s"Got commit from partition $partitionId at epoch $epoch: $message")
if (!partitionCommits.isDefinedAt((epoch, partitionId))) {
partitionCommits.put((epoch, partitionId), message)
resolveCommitsAtEpoch(epoch)
}
case ReportPartitionOffset(partitionId, epoch, offset) =>
partitionOffsets.put((epoch, partitionId), offset)
val thisEpochOffsets =
partitionOffsets.collect { case ((e, _), o) if e == epoch => o }
if (thisEpochOffsets.size == numReaderPartitions) {
logDebug(s"Epoch $epoch has offsets reported from all partitions: $thisEpochOffsets")
query.addOffset(epoch, readSupport, thisEpochOffsets.toSeq)
resolveCommitsAtEpoch(epoch)
}
}
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case GetCurrentEpoch =>
val result = currentDriverEpoch
logDebug(s"Epoch $result")
context.reply(result)
case IncrementAndGetEpoch =>
currentDriverEpoch += 1
context.reply(currentDriverEpoch)
case SetReaderPartitions(numPartitions) =>
numReaderPartitions = numPartitions
context.reply(())
case SetWriterPartitions(numPartitions) =>
numWriterPartitions = numPartitions
context.reply(())
case StopContinuousExecutionWrites =>
queryWritesStopped = true
context.reply(())
}
}
| lvdongr/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/continuous/EpochCoordinator.scala | Scala | apache-2.0 | 9,896 |
package dotty.tools
package dotc
package typer
import core._
import ast._
import Trees._, Constants._, StdNames._, Scopes._, Denotations._
import Contexts._, Symbols._, Types._, SymDenotations._, Names._, NameOps._, Flags._, Decorators._
import ast.desugar, ast.desugar._
import ProtoTypes._
import util.Positions._
import util.{Attachment, SourcePosition, DotClass}
import collection.mutable
import annotation.tailrec
import ErrorReporting._
import tpd.ListOfTreeDecorator
import config.Printers._
import language.implicitConversions
trait NamerContextOps { this: Context =>
/** Enter symbol into current class, if current class is owner of current context,
* or into current scope, if not. Should always be called instead of scope.enter
* in order to make sure that updates to class members are reflected in
* finger prints.
*/
def enter(sym: Symbol): Symbol = {
ctx.owner match {
case cls: ClassSymbol => cls.enter(sym)
case _ => this.scope.openForMutations.enter(sym)
}
sym
}
/** The denotation with the given name in current context */
def denotNamed(name: Name): Denotation =
if (owner.isClass)
if (outer.owner == owner) { // inner class scope; check whether we are referring to self
if (scope.size == 1) {
val elem = scope.lastEntry
if (elem.name == name) return elem.sym.denot // return self
}
assert(scope.size <= 1, scope)
owner.thisType.member(name)
}
else // we are in the outermost context belonging to a class; self is invisible here. See inClassContext.
owner.findMember(name, owner.thisType, EmptyFlags)
else
scope.denotsNamed(name).toDenot(NoPrefix)
/** Either the current scope, or, if the current context owner is a class,
* the declarations of the current class.
*/
def effectiveScope: Scope =
if (owner != null && owner.isClass) owner.asClass.unforcedDecls
else scope
/** The symbol (stored in some typer's symTree) of an enclosing context definition */
def symOfContextTree(tree: untpd.Tree) = {
def go(ctx: Context): Symbol = {
ctx.typeAssigner match {
case typer: Typer =>
tree.getAttachment(typer.SymOfTree) match {
case Some(sym) => sym
case None =>
var cx = ctx.outer
while (cx.typeAssigner eq typer) cx = cx.outer
go(cx)
}
case _ => NoSymbol
}
}
go(this)
}
/** Context where `sym` is defined, assuming we are in a nested context. */
def defContext(sym: Symbol) =
outersIterator
.dropWhile(_.owner != sym)
.dropWhile(_.owner == sym)
.next
}
/** This class creates symbols from definitions and imports and gives them
* lazy types.
*
* Timeline:
*
* During enter, trees are expanded as necessary, populating the expandedTree map.
* Symbols are created, and the symOfTree map is set up.
*
* Symbol completion causes some trees to be already typechecked and typedTree
* entries are created to associate the typed trees with the untyped expanded originals.
*
* During typer, original trees are first expanded using expandedTree. For each
* expanded member definition or import we extract and remove the corresponding symbol
* from the symOfTree map and complete it. We then consult the typedTree map to see
* whether a typed tree exists already. If yes, the typed tree is returned as result.
* Otherwise, we proceed with regular type checking.
*
* The scheme is designed to allow sharing of nodes, as long as each duplicate appears
* in a different method.
*/
class Namer { typer: Typer =>
import untpd._
val TypedAhead = new Attachment.Key[tpd.Tree]
val ExpandedTree = new Attachment.Key[Tree]
val SymOfTree = new Attachment.Key[Symbol]
/** A partial map from unexpanded member and pattern defs and to their expansions.
* Populated during enterSyms, emptied during typer.
*/
//lazy val expandedTree = new mutable.AnyRefMap[DefTree, Tree]
/*{
override def default(tree: DefTree) = tree // can't have defaults on AnyRefMaps :-(
}*/
/** A map from expanded MemberDef, PatDef or Import trees to their symbols.
* Populated during enterSyms, emptied at the point a typed tree
* with the same symbol is created (this can be when the symbol is completed
* or at the latest when the tree is typechecked.
*/
//lazy val symOfTree = new mutable.AnyRefMap[Tree, Symbol]
/** A map from expanded trees to their typed versions.
* Populated when trees are typechecked during completion (using method typedAhead).
*/
// lazy val typedTree = new mutable.AnyRefMap[Tree, tpd.Tree]
/** A map from method symbols to nested typers.
* Populated when methods are completed. Emptied when they are typechecked.
* The nested typer contains new versions of the four maps above including this
* one, so that trees that are shared between different DefDefs can be independently
* used as indices. It also contains a scope that contains nested parameters.
*/
lazy val nestedTyper = new mutable.AnyRefMap[Symbol, Typer]
/** The scope of the typer.
* For nested typers this is a place parameters are entered during completion
* and where they survive until typechecking. A context with this typer also
* has this scope.
*/
val scope = newScope
/** The symbol of the given expanded tree. */
def symbolOfTree(tree: Tree)(implicit ctx: Context): Symbol = {
val xtree = expanded(tree)
xtree.getAttachment(TypedAhead) match {
case Some(ttree) => ttree.symbol
case none => xtree.attachment(SymOfTree)
}
}
/** The enclosing class with given name; error if none exists */
def enclosingClassNamed(name: TypeName, pos: Position)(implicit ctx: Context): Symbol = {
if (name.isEmpty) NoSymbol
else {
val cls = ctx.owner.enclosingClassNamed(name)
if (!cls.exists) ctx.error(s"no enclosing class or object is named $name", pos)
cls
}
}
/** Find moduleClass/sourceModule in effective scope */
private def findModuleBuddy(name: Name)(implicit ctx: Context) = {
val scope = ctx.effectiveScope
val it = scope.lookupAll(name).filter(_ is Module)
assert(it.hasNext, s"no companion $name in $scope")
it.next
}
/** If this tree is a member def or an import, create a symbol of it
* and store in symOfTree map.
*/
def createSymbol(tree: Tree)(implicit ctx: Context): Symbol = {
def privateWithinClass(mods: Modifiers) =
enclosingClassNamed(mods.privateWithin, mods.pos)
def record(sym: Symbol): Symbol = {
val refs = tree.attachmentOrElse(References, Nil)
if (refs.nonEmpty) {
tree.removeAttachment(References)
refs foreach (_.pushAttachment(OriginalSymbol, sym))
}
tree.pushAttachment(SymOfTree, sym)
sym
}
/** Add moduleClass/sourceModule to completer if it is for a module val or class */
def adjustIfModule(completer: LazyType, tree: MemberDef) =
if (tree.mods is Module) {
val name = tree.name.encode
if (name.isTermName)
completer withModuleClass (_ => findModuleBuddy(name.moduleClassName))
else
completer withSourceModule (_ => findModuleBuddy(name.sourceModuleName))
}
else completer
typr.println(i"creating symbol for $tree in ${ctx.mode}")
def checkNoConflict(name: Name): Unit = {
def preExisting = ctx.effectiveScope.lookup(name)
if (ctx.owner is PackageClass) {
if (preExisting.isDefinedInCurrentRun)
ctx.error(s"${preExisting.showLocated} is compiled twice, runid = ${ctx.runId}", tree.pos)
}
else if ((!ctx.owner.isClass || name.isTypeName) && preExisting.exists) {
ctx.error(i"$name is already defined as $preExisting", tree.pos)
}
}
val inSuperCall = if (ctx.mode is Mode.InSuperCall) InSuperCall else EmptyFlags
tree match {
case tree: TypeDef if tree.isClassDef =>
val name = tree.name.encode.asTypeName
checkNoConflict(name)
val cls = record(ctx.newClassSymbol(
ctx.owner, name, tree.mods.flags | inSuperCall,
cls => adjustIfModule(new ClassCompleter(cls, tree)(ctx), tree),
privateWithinClass(tree.mods), tree.pos, ctx.source.file))
cls.completer.asInstanceOf[ClassCompleter].init()
cls
case tree: MemberDef =>
val name = tree.name.encode
checkNoConflict(name)
val isDeferred = lacksDefinition(tree)
val deferred = if (isDeferred) Deferred else EmptyFlags
val method = if (tree.isInstanceOf[DefDef]) Method else EmptyFlags
val inSuperCall1 = if (tree.mods is ParamOrAccessor) EmptyFlags else inSuperCall
// suppress inSuperCall for constructor parameters
val higherKinded = tree match {
case tree: TypeDef if tree.tparams.nonEmpty && isDeferred => HigherKinded
case _ => EmptyFlags
}
// to complete a constructor, move one context further out -- this
// is the context enclosing the class. Note that the context in which a
// constructor is recorded and the context in which it is completed are
// different: The former must have the class as owner (because the
// constructor is owned by the class), the latter must not (because
// constructor parameters are interpreted as if they are outside the class).
// Don't do this for Java constructors because they need to see the import
// of the companion object, and it is not necessary for them because they
// have no implementation.
val cctx = if (tree.name == nme.CONSTRUCTOR && !(tree.mods is JavaDefined)) ctx.outer else ctx
record(ctx.newSymbol(
ctx.owner, name, tree.mods.flags | deferred | method | higherKinded | inSuperCall1,
adjustIfModule(new Completer(tree)(cctx), tree),
privateWithinClass(tree.mods), tree.pos))
case tree: Import =>
record(ctx.newSymbol(
ctx.owner, nme.IMPORT, Synthetic, new Completer(tree), NoSymbol, tree.pos))
case _ =>
NoSymbol
}
}
/** If `sym` exists, enter it in effective scope. Check that
* package members are not entered twice in the same run.
*/
def enterSymbol(sym: Symbol)(implicit ctx: Context) = {
if (sym.exists) {
typr.println(s"entered: $sym in ${ctx.owner} and ${ctx.effectiveScope}")
ctx.enter(sym)
}
sym
}
/** Create package if it does not yet exist. */
private def createPackageSymbol(pid: RefTree)(implicit ctx: Context): Symbol = {
val pkgOwner = pid match {
case Ident(_) => if (ctx.owner eq defn.EmptyPackageClass) defn.RootClass else ctx.owner
case Select(qual: RefTree, _) => createPackageSymbol(qual).moduleClass
}
val existing = pkgOwner.info.decls.lookup(pid.name)
if ((existing is Package) && (pkgOwner eq existing.owner)) existing
else ctx.newCompletePackageSymbol(pkgOwner, pid.name.asTermName).entered
}
/** Expand tree and store in `expandedTree` */
def expand(tree: Tree)(implicit ctx: Context): Unit = tree match {
case mdef: DefTree =>
val expanded = desugar.defTree(mdef)
typr.println(i"Expansion: $mdef expands to $expanded")
if (expanded ne mdef) mdef.pushAttachment(ExpandedTree, expanded)
case _ =>
}
/** The expanded version of this tree, or tree itself if not expanded */
def expanded(tree: Tree)(implicit ctx: Context): Tree = tree match {
case ddef: DefTree => ddef.attachmentOrElse(ExpandedTree, ddef)
case _ => tree
}
/** A new context that summarizes an import statement */
def importContext(sym: Symbol, selectors: List[Tree])(implicit ctx: Context) =
ctx.fresh.setImportInfo(new ImportInfo(sym, selectors))
/** A new context for the interior of a class */
def inClassContext(selfInfo: DotClass /* Should be Type | Symbol*/)(implicit ctx: Context): Context = {
val localCtx: Context = ctx.fresh.setNewScope
selfInfo match {
case sym: Symbol if sym.exists && sym.name != nme.WILDCARD =>
localCtx.scope.openForMutations.enter(sym)
case _ =>
}
localCtx
}
/** For all class definitions `stat` in `xstats`: If the companion class if not also defined
* in `xstats`, invalidate it by setting its info to NoType.
*/
def invalidateCompanions(pkg: Symbol, xstats: List[untpd.Tree])(implicit ctx: Context): Unit = {
val definedNames = xstats collect { case stat: NameTree => stat.name }
def invalidate(name: TypeName) =
if (!(definedNames contains name)) {
val member = pkg.info.decl(name).asSymDenotation
if (member.isClass && !(member is Package)) member.info = NoType
}
xstats foreach {
case stat: TypeDef if stat.isClassDef =>
invalidate(stat.name.moduleClassName)
case _ =>
}
}
/** Expand tree and create top-level symbols for statement and enter them into symbol table */
def index(stat: Tree)(implicit ctx: Context): Context = {
expand(stat)
indexExpanded(stat)
}
/** Create top-level symbols for all statements in the expansion of this statement and
* enter them into symbol table
*/
def indexExpanded(stat: Tree)(implicit ctx: Context): Context = expanded(stat) match {
case pcl: PackageDef =>
val pkg = createPackageSymbol(pcl.pid)
index(pcl.stats)(ctx.fresh.setOwner(pkg.moduleClass))
invalidateCompanions(pkg, Trees.flatten(pcl.stats map expanded))
ctx
case imp: Import =>
importContext(createSymbol(imp), imp.selectors)
case mdef: DefTree =>
enterSymbol(createSymbol(mdef))
ctx
case stats: Thicket =>
for (tree <- stats.toList) enterSymbol(createSymbol(tree))
ctx
case _ =>
ctx
}
/** Create top-level symbols for statements and enter them into symbol table */
def index(stats: List[Tree])(implicit ctx: Context): Context = {
/** Merge the definitions of a synthetic companion generated by a case class
* and the real companion, if both exist.
*/
def mergeCompanionDefs() = {
val classDef = mutable.Map[TypeName, TypeDef]()
for (cdef @ TypeDef(name, _) <- stats)
if (cdef.isClassDef) classDef(name) = cdef
for (mdef @ ModuleDef(name, _) <- stats)
classDef get name.toTypeName match {
case Some(cdef) =>
val Thicket(vdef :: (mcls @ TypeDef(_, impl: Template)) :: Nil) = mdef.attachment(ExpandedTree)
cdef.attachmentOrElse(ExpandedTree, cdef) match {
case Thicket(cls :: mval :: TypeDef(_, compimpl: Template) :: crest) =>
val mcls1 = cpy.TypeDef(mcls)(
rhs = cpy.Template(impl)(body = compimpl.body ++ impl.body))
mdef.putAttachment(ExpandedTree, Thicket(vdef :: mcls1 :: Nil))
cdef.putAttachment(ExpandedTree, Thicket(cls :: crest))
case _ =>
}
case none =>
}
}
stats foreach expand
mergeCompanionDefs()
(ctx /: stats) ((ctx, stat) => indexExpanded(stat)(ctx))
}
/** The completer of a symbol defined by a member def or import (except ClassSymbols) */
class Completer(val original: Tree)(implicit ctx: Context) extends LazyType {
protected def localContext(owner: Symbol) = ctx.fresh.setOwner(owner).setTree(original)
private def typeSig(sym: Symbol): Type = original match {
case original: ValDef =>
if (sym is Module) moduleValSig(sym)
else valOrDefDefSig(original, sym, Nil, identity)(localContext(sym).setNewScope)
case original: DefDef =>
val typer1 = new Typer
nestedTyper(sym) = typer1
typer1.defDefSig(original, sym)(localContext(sym).setTyper(typer1))
case original: TypeDef =>
assert(!original.isClassDef)
typeDefSig(original, sym)(localContext(sym).setNewScope)
case imp: Import =>
try {
val expr1 = typedAheadExpr(imp.expr, AnySelectionProto)
ImportType(expr1)
} catch {
case ex: CyclicReference =>
typr.println(s"error while completing ${imp.expr}")
throw ex
}
}
final override def complete(denot: SymDenotation)(implicit ctx: Context) = {
if (completions != noPrinter && ctx.typerState != this.ctx.typerState) {
completions.println(completions.getClass.toString)
def levels(c: Context): Int =
if (c.typerState eq this.ctx.typerState) 0
else if (c.typerState == null) -1
else if (c.outer.typerState == c.typerState) levels(c.outer)
else levels(c.outer) + 1
completions.println(s"!!!completing ${denot.symbol.showLocated} in buried typerState, gap = ${levels(ctx)}")
}
completeInCreationContext(denot)
}
def completeInCreationContext(denot: SymDenotation): Unit =
denot.info = typeSig(denot.symbol)
}
class ClassCompleter(cls: ClassSymbol, original: TypeDef)(ictx: Context) extends Completer(original)(ictx) {
withDecls(newScope)
protected implicit val ctx: Context = localContext(cls).setMode(ictx.mode &~ Mode.InSuperCall)
val TypeDef(name, impl @ Template(constr, parents, self, body)) = original
val (params, rest) = body span {
case td: TypeDef => td.mods is Param
case td: ValDef => td.mods is ParamAccessor
case _ => false
}
def init() = index(params)
/** The type signature of a ClassDef with given symbol */
override def completeInCreationContext(denot: SymDenotation): Unit = {
/** The type of a parent constructor. Types constructor arguments
* only if parent type contains uninstantiated type parameters.
*/
def parentType(parent: untpd.Tree)(implicit ctx: Context): Type =
if (parent.isType) {
typedAheadType(parent).tpe
} else {
val (core, targs) = stripApply(parent) match {
case TypeApply(core, targs) => (core, targs)
case core => (core, Nil)
}
val Select(New(tpt), nme.CONSTRUCTOR) = core
val targs1 = targs map (typedAheadType(_))
val ptype = typedAheadType(tpt).tpe appliedTo targs1.tpes
if (ptype.typeParams.isEmpty) ptype
else typedAheadExpr(parent).tpe
}
def checkedParentType(parent: untpd.Tree): Type = {
val ptype = parentType(parent)(ctx.superCallContext)
if (cls.isRefinementClass) ptype
else checkClassTypeWithStablePrefix(ptype, parent.pos, traitReq = parent ne parents.head)
}
val selfInfo =
if (self.isEmpty) NoType
else if (cls is Module) cls.owner.thisType select sourceModule
else createSymbol(self)
// pre-set info, so that parent types can refer to type params
denot.info = ClassInfo(cls.owner.thisType, cls, Nil, decls, selfInfo)
// Ensure constructor is completed so that any parameter accessors
// which have type trees deriving from its parameters can be
// completed in turn. Note that parent types access such parameter
// accessors, that's why the constructor needs to be completed before
// the parent types are elaborated.
index(constr)
symbolOfTree(constr).ensureCompleted()
val parentTypes = ensureFirstIsClass(parents map checkedParentType)
val parentRefs = ctx.normalizeToClassRefs(parentTypes, cls, decls)
typr.println(s"completing $denot, parents = $parents, parentTypes = $parentTypes, parentRefs = $parentRefs")
index(rest)(inClassContext(selfInfo))
denot.info = ClassInfo(cls.owner.thisType, cls, parentRefs, decls, selfInfo)
if (cls is Trait) {
if (body forall isNoInitMember) {
cls.setFlag(NoInits)
if (body forall isPureInterfaceMember)
cls.setFlag(PureInterface)
}
}
}
}
/** Typecheck tree during completion, and remember result in typedtree map */
private def typedAheadImpl(tree: Tree, pt: Type)(implicit ctx: Context): tpd.Tree = {
val xtree = expanded(tree)
xtree.getAttachment(TypedAhead) match {
case Some(ttree) => ttree
case none =>
val ttree = typer.typed(tree, pt)
xtree.pushAttachment(TypedAhead, ttree)
ttree
}
}
def typedAheadType(tree: Tree, pt: Type = WildcardType)(implicit ctx: Context): tpd.Tree =
typedAheadImpl(tree, pt)(ctx retractMode Mode.PatternOrType addMode Mode.Type)
def typedAheadExpr(tree: Tree, pt: Type = WildcardType)(implicit ctx: Context): tpd.Tree =
typedAheadImpl(tree, pt)(ctx retractMode Mode.PatternOrType)
/** Enter and typecheck parameter list */
def completeParams(params: List[MemberDef])(implicit ctx: Context) = {
index(params)
for (param <- params) typedAheadExpr(param)
}
/** The signature of a module valdef.
* This will compute the corresponding module class TypeRef immediately
* without going through the defined type of the ValDef. This is necessary
* to avoid cyclic references involving imports and module val defs.
*/
def moduleValSig(sym: Symbol)(implicit ctx: Context): Type = {
val clsName = sym.name.moduleClassName
val cls = ctx.denotNamed(clsName) suchThat (_ is ModuleClass)
ctx.owner.thisType select (clsName, cls)
}
/** The type signature of a ValDef or DefDef
* @param mdef The definition
* @param sym Its symbol
* @param paramFn A wrapping function that produces the type of the
* defined symbol, given its final return type
*/
def valOrDefDefSig(mdef: ValOrDefDef, sym: Symbol, typeParams: List[Symbol], paramFn: Type => Type)(implicit ctx: Context): Type = {
def inferredType = {
/** A type for this definition that might be inherited from elsewhere:
* If this is a setter parameter, the corresponding getter type.
* If this is a class member, the conjunction of all result types
* of overridden methods.
* NoType if neither case holds.
*/
val inherited =
if (sym.owner.isTerm) NoType
else {
// TODO: Look only at member of supertype instead?
lazy val schema = paramFn(WildcardType)
val site = sym.owner.thisType
((NoType: Type) /: sym.owner.info.baseClasses.tail) { (tp, cls) =>
val iRawInfo =
cls.info.nonPrivateDecl(sym.name).matchingDenotation(site, schema).info
val iInstInfo = iRawInfo match {
case iRawInfo: PolyType =>
if (iRawInfo.paramNames.length == typeParams.length)
iRawInfo.instantiate(typeParams map (_.typeRef))
else NoType
case _ =>
if (typeParams.isEmpty) iRawInfo
else NoType
}
val iResType = iInstInfo.finalResultType.asSeenFrom(site, cls)
if (iResType.exists)
typr.println(s"using inherited type; raw: $iRawInfo, inst: $iInstInfo, inherited: $iResType")
tp & iResType
}
}
/** The proto-type to be used when inferring the result type from
* the right hand side. This is `WildcardType` except if the definition
* is a default getter. In that case, the proto-type is the type of
* the corresponding parameter where bound parameters are replaced by
* Wildcards.
*/
def rhsProto = {
val name = sym.asTerm.name
val idx = name.defaultGetterIndex
if (idx < 0) WildcardType
else {
val original = name.defaultGetterToMethod
val meth: Denotation =
if (original.isConstructorName && (sym.owner is ModuleClass))
sym.owner.companionClass.info.decl(nme.CONSTRUCTOR)
else
ctx.defContext(sym).denotNamed(original)
def paramProto(paramss: List[List[Type]], idx: Int): Type = paramss match {
case params :: paramss1 =>
if (idx < params.length) wildApprox(params(idx))
else paramProto(paramss1, idx - params.length)
case nil =>
WildcardType
}
val defaultAlts = meth.altsWith(_.hasDefaultParams)
if (defaultAlts.length == 1)
paramProto(defaultAlts.head.info.widen.paramTypess, idx)
else
WildcardType
}
}
// println(s"final inherited for $sym: ${inherited.toString}") !!!
// println(s"owner = ${sym.owner}, decls = ${sym.owner.info.decls.show}")
val rhsCtx = ctx.fresh addMode Mode.InferringReturnType
def rhsType = typedAheadExpr(mdef.rhs, rhsProto)(rhsCtx).tpe.widen.approximateUnion
def lhsType = fullyDefinedType(rhsType, "right-hand side", mdef.pos)
if (inherited.exists) inherited
else {
if (sym is Implicit) {
val resStr = if (mdef.isInstanceOf[DefDef]) "result " else ""
ctx.error(d"${resStr}type of implicit definition needs to be given explicitly", mdef.pos)
sym.resetFlag(Implicit)
}
lhsType orElse WildcardType
}
}
val pt = mdef.tpt match {
case _: untpd.DerivedTypeTree => WildcardType
case TypeTree(untpd.EmptyTree) => inferredType
case _ => WildcardType
}
paramFn(typedAheadType(mdef.tpt, pt).tpe)
}
/** The type signature of a DefDef with given symbol */
def defDefSig(ddef: DefDef, sym: Symbol)(implicit ctx: Context) = {
val DefDef(name, tparams, vparamss, _, _) = ddef
completeParams(tparams)
vparamss foreach completeParams
val isConstructor = name == nme.CONSTRUCTOR
def typeParams = tparams map symbolOfTree
def wrapMethType(restpe: Type): Type = {
var paramSymss = vparamss.nestedMap(symbolOfTree)
// Make sure constructor has one non-implicit parameter list
if (isConstructor &&
(paramSymss.isEmpty || paramSymss.head.nonEmpty && (paramSymss.head.head is Implicit)))
paramSymss = Nil :: paramSymss
val restpe1 = // try to make anonymous functions non-dependent, so that they can be used in closures
if (name == nme.ANON_FUN) avoid(restpe, paramSymss.flatten)
else restpe
val monotpe =
(paramSymss :\ restpe1) { (params, restpe) =>
val isJava = ddef.mods is JavaDefined
val make =
if (params.nonEmpty && (params.head is Implicit)) ImplicitMethodType
else if(isJava) JavaMethodType
else MethodType
if(isJava) params.foreach { symbol =>
if(symbol.info.isDirectRef(defn.ObjectClass)) symbol.info = defn.AnyType
}
make.fromSymbols(params, restpe)
}
if (typeParams.nonEmpty) PolyType.fromSymbols(typeParams, monotpe)
else if (vparamss.isEmpty) ExprType(monotpe)
else monotpe
}
if (isConstructor) {
// set result type tree to unit, but take the current class as result type of the symbol
typedAheadType(ddef.tpt, defn.UnitType)
wrapMethType(sym.owner.typeRef.appliedTo(typeParams map (_.typeRef)))
}
else valOrDefDefSig(ddef, sym, typeParams, wrapMethType)
}
def typeDefSig(tdef: TypeDef, sym: Symbol)(implicit ctx: Context): Type = {
completeParams(tdef.tparams)
val tparamSyms = tdef.tparams map symbolOfTree
val isDerived = tdef.rhs.isInstanceOf[untpd.DerivedTypeTree]
val toParameterize = tparamSyms.nonEmpty && !isDerived
val needsLambda = sym.allOverriddenSymbols.exists(_ is HigherKinded) && !isDerived
def abstracted(tp: Type): Type =
if (needsLambda) tp.LambdaAbstract(tparamSyms)
else if (toParameterize) tp.parameterizeWith(tparamSyms)
else tp
sym.info = abstracted(TypeBounds.empty)
// Temporarily set info of defined type T to ` >: Nothing <: Any.
// This is done to avoid cyclic reference errors for F-bounds.
// This is subtle: `sym` has now an empty TypeBounds, but is not automatically
// made an abstract type. If it had been made an abstract type, it would count as an
// abstract type of its enclosing class, which might make that class an invalid
// prefix. I verified this would lead to an error when compiling io.ClassPath.
// A distilled version is in pos/prefix.scala.
//
// The scheme critically relies on an implementation detail of isRef, which
// inspects a TypeRef's info, instead of simply dealiasing alias types.
val rhsType = typedAheadType(tdef.rhs).tpe
val unsafeInfo = rhsType match {
case _: TypeBounds => abstracted(rhsType).asInstanceOf[TypeBounds]
case _ => TypeAlias(abstracted(rhsType), if (sym is Local) sym.variance else 0)
}
sym.info = NoCompleter
checkNonCyclic(sym, unsafeInfo, reportErrors = true)
}
} | AlexSikia/dotty | src/dotty/tools/dotc/typer/Namer.scala | Scala | bsd-3-clause | 28,939 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.fixture
import scala.concurrent.{ExecutionContext, Promise, Future}
import org.scalatest._
import SharedHelpers.EventRecordingReporter
import org.scalatest.concurrent.SleepHelper
import org.scalatest.events.{InfoProvided, MarkupProvided}
import scala.util.Success
class AsyncFeatureSpecSpec2 extends org.scalatest.AsyncFunSpec {
describe("AsyncFeatureSpec") {
it("can be used for tests that return Future under parallel async test execution") {
class ExampleSpec extends fixture.AsyncFeatureSpec with ParallelTestExecution {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
val a = 1
Scenario("test 1") { fixture =>
Future {
assert(a == 1)
}
}
Scenario("test 2") { fixture =>
Future {
assert(a == 2)
}
}
Scenario("test 3") { fixture =>
Future {
pending
}
}
Scenario("test 4") { fixture =>
Future {
cancel
}
}
ignore("test 5") { fixture =>
Future {
cancel
}
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(rep) }
promise.future.map { repo =>
assert(repo.testStartingEventsReceived.length == 4)
assert(repo.testSucceededEventsReceived.length == 1)
assert(repo.testSucceededEventsReceived(0).testName == "Scenario: test 1")
assert(repo.testFailedEventsReceived.length == 1)
assert(repo.testFailedEventsReceived(0).testName == "Scenario: test 2")
assert(repo.testPendingEventsReceived.length == 1)
assert(repo.testPendingEventsReceived(0).testName == "Scenario: test 3")
assert(repo.testCanceledEventsReceived.length == 1)
assert(repo.testCanceledEventsReceived(0).testName == "Scenario: test 4")
assert(repo.testIgnoredEventsReceived.length == 1)
assert(repo.testIgnoredEventsReceived(0).testName == "Scenario: test 5")
}
}
it("can be used for tests that did not return Future under parallel async test execution") {
class ExampleSpec extends fixture.AsyncFeatureSpec with ParallelTestExecution {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
val a = 1
Scenario("test 1") { fixture =>
assert(a == 1)
}
Scenario("test 2") { fixture =>
assert(a == 2)
}
Scenario("test 3") { fixture =>
pending
}
Scenario("test 4") { fixture =>
cancel
}
ignore("test 5") { fixture =>
cancel
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(rep) }
promise.future.map { repo =>
assert(repo.testStartingEventsReceived.length == 4)
assert(repo.testSucceededEventsReceived.length == 1)
assert(repo.testSucceededEventsReceived(0).testName == "Scenario: test 1")
assert(repo.testFailedEventsReceived.length == 1)
assert(repo.testFailedEventsReceived(0).testName == "Scenario: test 2")
assert(repo.testPendingEventsReceived.length == 1)
assert(repo.testPendingEventsReceived(0).testName == "Scenario: test 3")
assert(repo.testCanceledEventsReceived.length == 1)
assert(repo.testCanceledEventsReceived(0).testName == "Scenario: test 4")
assert(repo.testIgnoredEventsReceived.length == 1)
assert(repo.testIgnoredEventsReceived(0).testName == "Scenario: test 5")
}
}
it("should run tests that return Future in serial by default") {
@volatile var count = 0
class ExampleSpec extends fixture.AsyncFeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Scenario("test 1") { fixture =>
Future {
SleepHelper.sleep(30)
assert(count == 0)
count = 1
Succeeded
}
}
Scenario("test 2") { fixture =>
Future {
assert(count == 1)
SleepHelper.sleep(50)
count = 2
Succeeded
}
}
Scenario("test 3") { fixture =>
Future {
assert(count == 2)
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(rep) }
promise.future.map { repo =>
assert(repo.testStartingEventsReceived.length == 3)
assert(repo.testSucceededEventsReceived.length == 3)
}
}
it("should run tests that does not return Future in serial by default") {
@volatile var count = 0
class ExampleSpec extends fixture.AsyncFeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Scenario("test 1") { fixture =>
SleepHelper.sleep(30)
assert(count == 0)
count = 1
Succeeded
}
Scenario("test 2") { fixture =>
assert(count == 1)
SleepHelper.sleep(50)
count = 2
Succeeded
}
Scenario("test 3") { fixture =>
assert(count == 2)
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(rep) }
promise.future.map { repo =>
assert(repo.testStartingEventsReceived.length == 3)
assert(repo.testSucceededEventsReceived.length == 3)
}
}
// SKIP-SCALATESTJS,NATIVE-START
it("should run tests and its future in same main thread when use SerialExecutionContext") {
var mainThread = Thread.currentThread
var test1Thread: Option[Thread] = None
var test2Thread: Option[Thread] = None
var onCompleteThread: Option[Thread] = None
class ExampleSpec extends fixture.AsyncFeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Scenario("test 1") { fixture =>
Future {
test1Thread = Some(Thread.currentThread)
succeed
}
}
Scenario("test 2") { fixture =>
Future {
test2Thread = Some(Thread.currentThread)
succeed
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.whenCompleted { s =>
onCompleteThread = Some(Thread.currentThread)
}
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(rep) }
promise.future.map { repo =>
assert(test1Thread.isDefined)
assert(test1Thread.get == mainThread)
assert(test2Thread.isDefined)
assert(test2Thread.get == mainThread)
assert(onCompleteThread.isDefined)
assert(onCompleteThread.get == mainThread)
}
}
it("should run tests and its true async future in the same thread when use SerialExecutionContext") {
var mainThread = Thread.currentThread
@volatile var test1Thread: Option[Thread] = None
@volatile var test2Thread: Option[Thread] = None
var onCompleteThread: Option[Thread] = None
class ExampleSpec extends fixture.AsyncFeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Scenario("test 1") { fixture =>
val promise = Promise[Assertion]
val timer = new java.util.Timer
timer.schedule(
new java.util.TimerTask {
def run(): Unit = {
promise.complete(Success(succeed))
}
},
1000
)
promise.future.map { s =>
test1Thread = Some(Thread.currentThread)
s
}
}
Scenario("test 2") { fixture =>
val promise = Promise[Assertion]
val timer = new java.util.Timer
timer.schedule(
new java.util.TimerTask {
def run(): Unit = {
promise.complete(Success(succeed))
}
},
500
)
promise.future.map { s =>
test2Thread = Some(Thread.currentThread)
s
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.whenCompleted { s =>
onCompleteThread = Some(Thread.currentThread)
}
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(rep) }
promise.future.map { repo =>
assert(test1Thread.isDefined)
assert(test1Thread.get == mainThread)
assert(test2Thread.isDefined)
assert(test2Thread.get == mainThread)
assert(onCompleteThread.isDefined)
assert(onCompleteThread.get == mainThread)
}
}
it("should not run out of stack space with nested futures when using SerialExecutionContext") {
class ExampleSpec extends fixture.AsyncFeatureSpec {
// Note we get a StackOverflowError with the following execution
// context.
// override implicit def executionContext: ExecutionContext = new ExecutionContext { def execute(runnable: Runnable) = runnable.run; def reportFailure(cause: Throwable) = () }
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
def sum(xs: List[Int]): Future[Int] =
xs match {
case Nil => Future.successful(0)
case x :: xs => Future(x).flatMap(xx => sum(xs).map(xxx => xx + xxx))
}
Scenario("test 1") { fixture =>
val fut: Future[Int] = sum((1 to 50000).toList)
fut.map(total => assert(total == 1250025000))
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(rep) }
promise.future.map { repo =>
assert(!rep.testSucceededEventsReceived.isEmpty)
}
}
// SKIP-SCALATESTJS,NATIVE-END
it("should run tests that returns Future and report their result in serial") {
class ExampleSpec extends fixture.AsyncFeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Scenario("test 1") { fixture =>
Future {
SleepHelper.sleep(60)
succeed
}
}
Scenario("test 2") { fixture =>
Future {
SleepHelper.sleep(30)
succeed
}
}
Scenario("test 3") { fixture =>
Future {
succeed
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(rep) }
promise.future.map { repo =>
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testStartingEventsReceived(0).testName == "Scenario: test 1")
assert(rep.testStartingEventsReceived(1).testName == "Scenario: test 2")
assert(rep.testStartingEventsReceived(2).testName == "Scenario: test 3")
assert(rep.testSucceededEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived(0).testName == "Scenario: test 1")
assert(rep.testSucceededEventsReceived(1).testName == "Scenario: test 2")
assert(rep.testSucceededEventsReceived(2).testName == "Scenario: test 3")
}
}
it("should run tests that does not return Future and report their result in serial") {
class ExampleSpec extends fixture.AsyncFeatureSpec {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Scenario("test 1") { fixture =>
SleepHelper.sleep(60)
succeed
}
Scenario("test 2") { fixture =>
SleepHelper.sleep(30)
succeed
}
Scenario("test 3") { fixture =>
succeed
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(rep) }
promise.future.map { repo =>
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testStartingEventsReceived(0).testName == "Scenario: test 1")
assert(rep.testStartingEventsReceived(1).testName == "Scenario: test 2")
assert(rep.testStartingEventsReceived(2).testName == "Scenario: test 3")
assert(rep.testSucceededEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived(0).testName == "Scenario: test 1")
assert(rep.testSucceededEventsReceived(1).testName == "Scenario: test 2")
assert(rep.testSucceededEventsReceived(2).testName == "Scenario: test 3")
}
}
it("should send an InfoProvided event for an info in main spec body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
info(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 1)
assert(infoList(0).message == "hi there")
}
}
it("should send an InfoProvided event for an info in feature body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
info(
"hi there"
)
Scenario("test 1") { fixture => succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 1)
assert(infoList(0).message == "hi there")
}
}
it("should send an InfoProvided event for an info in scenario body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
Scenario("test 1") { fixture =>
info("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[InfoProvided])
val infoProvided = recordedEvent.asInstanceOf[InfoProvided]
assert(infoProvided.message == "hi there")
}
}
it("should send an InfoProvided event for an info in Future returned by scenario body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
Scenario("test 1") { fixture =>
Future {
info("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[InfoProvided])
val infoProvided = recordedEvent.asInstanceOf[InfoProvided]
assert(infoProvided.message == "hi there")
}
}
it("should send a NoteProvided event for a note in main spec body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
note(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
}
it("should send a NoteProvided event for a note in feature body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
note(
"hi there"
)
Scenario("test 1") { fixture => succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
}
it("should send a NoteProvided event for a note in scenario body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
Scenario("test 1") { fixture =>
note("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
}
it("should send a NoteProvided event for a note in Future returned by scenario body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
Scenario("test 1") { fixture =>
Future {
note("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
}
it("should send an AlertProvided event for an alert in main spec body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
alert(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
}
it("should send an AlertProvided event for an alert in feature body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
alert(
"hi there"
)
Scenario("test 1") { fixture => succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
}
it("should send an AlertProvided event for an alert in scenario body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
Scenario("test 1") { fixture =>
alert("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
}
it("should send an AlertProvided event for an alert in Future returned by scenario body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
Scenario("test 1") { fixture =>
Future {
alert("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
}
it("should send a MarkupProvided event for a markup in main spec body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
markup(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 1)
assert(markupList(0).text == "hi there")
}
}
it("should send a MarkupProvided event for a markup in feature body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
markup(
"hi there"
)
Scenario("test 1") { fixture => succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 1)
assert(markupList(0).text == "hi there")
}
}
it("should send a MarkupProvided event for a markup in scenario body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
Scenario("test 1") { fixture =>
markup("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[MarkupProvided])
val markupProvided = recordedEvent.asInstanceOf[MarkupProvided]
assert(markupProvided.text == "hi there")
}
}
it("should send a MarkupProvided event for a markup in Future returned by scenario body") {
class MySuite extends fixture.AsyncFeatureSpecLike {
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
Feature("test feature") {
Scenario("test 1") { fixture =>
Future {
markup("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { repo =>
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[MarkupProvided])
val markupProvided = recordedEvent.asInstanceOf[MarkupProvided]
assert(markupProvided.text == "hi there")
}
}
it("should allow other execution context to be used") {
class TestSpec extends fixture.AsyncFeatureSpec {
// SKIP-SCALATESTJS,NATIVE-START
override implicit val executionContext = scala.concurrent.ExecutionContext.Implicits.global
// SKIP-SCALATESTJS,NATIVE-END
// SCALATESTJS-ONLY override implicit val executionContext = scala.scalajs.concurrent.JSExecutionContext.runNow
type FixtureParam = String
def withFixture(test: OneArgAsyncTest): FutureOutcome =
test("testing")
val a = 1
Feature("feature 1") {
Scenario("scenario A") { fixture =>
Future { assert(a == 1) }
}
}
Feature("feature 2") {
Scenario("scenario B") { fixture =>
Future { assert(a == 1) }
}
}
Feature("group3") {
Scenario("test C") { fixture =>
Future { assert(a == 1) }
}
}
}
val suite = new TestSpec
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
val promise = Promise[EventRecordingReporter]
status whenCompleted { _ => promise.success(reporter) }
promise.future.map { r =>
assert(reporter.scopeOpenedEventsReceived.length == 3)
assert(reporter.scopeClosedEventsReceived.length == 3)
assert(reporter.testStartingEventsReceived.length == 3)
assert(reporter.testSucceededEventsReceived.length == 3)
}
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/fixture/AsyncFeatureSpecSpec2.scala | Scala | apache-2.0 | 31,759 |
package scala.meta.contrib.implicits
import scala.meta._
import scala.meta.contrib._
import scala.meta.contrib.equality.{Structurally, Syntactically}
trait SetExtensions {
implicit class SetEnrichments[A <: Tree](set: Set[A]) {
def structurally: Set[Structurally[A]] =
set.map(Structurally(_))
def syntactically: Set[Syntactically[A]] =
set.map(Syntactically(_))
}
}
object SetExtensions extends SetExtensions
| scalameta/scalameta | scalameta/contrib/shared/src/main/scala/scala/meta/contrib/implicits/SetExtensions.scala | Scala | bsd-3-clause | 438 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.netty
import scala.util.Random
import org.mockito.Mockito.mock
import org.scalatest._
import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
import org.apache.spark.internal.config._
import org.apache.spark.network.BlockDataManager
class NettyBlockTransferServiceSuite
extends SparkFunSuite
with BeforeAndAfterEach
with ShouldMatchers {
private var service0: NettyBlockTransferService = _
private var service1: NettyBlockTransferService = _
override def afterEach() {
try {
if (service0 != null) {
service0.close()
service0 = null
}
if (service1 != null) {
service1.close()
service1 = null
}
} finally {
super.afterEach()
}
}
test("can bind to a random port") {
service0 = createService(port = 0)
service0.port should not be 0
}
test("can bind to two random ports") {
service0 = createService(port = 0)
service1 = createService(port = 0)
service0.port should not be service1.port
}
test("can bind to a specific port") {
val port = 17634 + Random.nextInt(10000)
logInfo("random port for test: " + port)
service0 = createService(port)
verifyServicePort(expectedPort = port, actualPort = service0.port)
}
test("can bind to a specific port twice and the second increments") {
val port = 17634 + Random.nextInt(10000)
logInfo("random port for test: " + port)
service0 = createService(port)
verifyServicePort(expectedPort = port, actualPort = service0.port)
service1 = createService(service0.port)
// `service0.port` is occupied, so `service1.port` should not be `service0.port`
verifyServicePort(expectedPort = service0.port + 1, actualPort = service1.port)
}
private def verifyServicePort(expectedPort: Int, actualPort: Int): Unit = {
actualPort should be >= expectedPort
// avoid testing equality in case of simultaneous tests
// the default value for `spark.port.maxRetries` is 100 under test
actualPort should be <= (expectedPort + 100)
}
private def createService(port: Int): NettyBlockTransferService = {
val conf = new SparkConf()
.set("spark.app.id", s"test-${getClass.getName}")
val securityManager = new SecurityManager(conf)
val blockDataManager = mock(classOf[BlockDataManager])
val service = new NettyBlockTransferService(conf, securityManager, "localhost", "localhost",
port, 1)
service.init(blockDataManager)
service
}
}
| spark0001/spark2.1.1 | core/src/test/scala/org/apache/spark/network/netty/NettyBlockTransferServiceSuite.scala | Scala | apache-2.0 | 3,320 |
object `inline-match-gadt` {
class Exactly[T]
erased def exactType[T]: Exactly[T] = compiletime.erasedValue
inline def foo[T](t: T): T =
inline exactType[T] match {
case _: Exactly[Int] => 23
case _ => t
}
}
| dotty-staging/dotty | tests/pos-custom-args/inline-match-gadt.scala | Scala | apache-2.0 | 235 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.process
import java.util.Date
import org.geotools.data.DataUtilities
import org.geotools.data.simple.SimpleFeatureCollection
import org.geotools.feature.simple.{SimpleFeatureBuilder, SimpleFeatureTypeBuilder}
import org.geotools.geometry.jts.{JTS, JTSFactoryFinder}
import org.geotools.process.factory.{DescribeParameter, DescribeProcess, DescribeResult}
import org.geotools.process.vector.VectorProcess
import org.geotools.referencing.crs.DefaultGeographicCRS
import org.joda.time.DateTime
import org.joda.time.DateTime.Property
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.`type`.GeometryType
import org.opengis.feature.simple.SimpleFeature
@DescribeProcess(title = "Point2PointProcess", description = "Aggregates a collection of points into a collection of line segments")
class Point2PointProcess extends VectorProcess {
private val baseType = SimpleFeatureTypes.createType("geomesa", "point2point", "*ls:LineString:srid=4326")
private val gf = JTSFactoryFinder.getGeometryFactory
@DescribeResult(name = "result", description = "Aggregated feature collection")
def execute(
@DescribeParameter(name = "data", description = "Input feature collection")
data: SimpleFeatureCollection,
@DescribeParameter(name = "groupingField", description = "Field on which to group")
groupingField: String,
@DescribeParameter(name = "sortField", description = "Field on which to sort")
sortField: String,
@DescribeParameter(name = "minimumNumberOfPoints", description = "Minimum number of points")
minPoints: Int,
@DescribeParameter(name = "breakOnDay", description = "Break connections on day marks")
breakOnDay: Boolean,
@DescribeParameter(name = "filterSingularPoints", description = "Filter out segments that fall on the same point", defaultValue = "true")
filterSingularPoints: Boolean
): SimpleFeatureCollection = {
import org.locationtech.geomesa.utils.geotools.Conversions._
import scala.collection.JavaConversions._
val queryType = data.getSchema
val sftBuilder = new SimpleFeatureTypeBuilder()
sftBuilder.init(baseType)
val groupingFieldIndex = data.getSchema.indexOf(groupingField)
sftBuilder.add(queryType.getAttributeDescriptors.get(groupingFieldIndex))
val sft = sftBuilder.buildFeatureType()
val builder = new SimpleFeatureBuilder(sft)
val sortFieldIndex = data.getSchema.indexOf(sortField)
val lineFeatures =
data.features().toList
.groupBy(_.get(groupingFieldIndex).asInstanceOf[String])
.filter { case (_, coll) => coll.size > minPoints }
.flatMap { case (group, coll) =>
val globalSorted = coll.sortBy(_.get(sortFieldIndex).asInstanceOf[java.util.Date])
val groups =
if (!breakOnDay) Array(globalSorted)
else
globalSorted
.groupBy { f => getDayOfYear(sortFieldIndex, f) }
.filter { case (_, g) => g.size >= 2 } // need at least two points in a day to create a
.map { case (_, g) => g }.toArray
val results = groups.flatMap { sorted =>
sorted.sliding(2).zipWithIndex.map { case (ptLst, idx) =>
import org.locationtech.geomesa.utils.geotools.Conversions.RichSimpleFeature
val pts = ptLst.map(_.point.getCoordinate)
val length = JTS.orthodromicDistance(pts.head, pts.last, DefaultGeographicCRS.WGS84)
val group = ptLst.head.getAttribute(groupingFieldIndex)
val sf = builder.buildFeature(s"$group-$idx", Array[AnyRef](gf.createLineString(pts.toArray), group))
(length, sf)
}
}
if (filterSingularPoints) results.filter { case (length, _) => length > 0.0 }.map { case (_, sf) => sf }
else results.map { case (_, sf) => sf }
}
DataUtilities.collection(lineFeatures.toArray)
}
def getDayOfYear(sortFieldIndex: Int, f: SimpleFeature): Property =
new DateTime(f.getAttribute(sortFieldIndex).asInstanceOf[Date]).dayOfYear()
}
| setumaven/geomesa | geomesa-process/src/main/scala/org/locationtech/geomesa/process/Point2PointProcess.scala | Scala | apache-2.0 | 4,697 |
package spark.deploy.worker
import spark.util.IntParam
import spark.util.MemoryParam
import spark.Utils
import java.lang.management.ManagementFactory
/**
* Command-line parser for the master.
*/
class WorkerArguments(args: Array[String]) {
var ip = Utils.localIpAddress()
var port = 0
var webUiPort = 8081
var cores = inferDefaultCores()
var memory = inferDefaultMemory()
var master: String = null
// Check for settings in environment variables
if (System.getenv("SPARK_WORKER_PORT") != null) {
port = System.getenv("SPARK_WORKER_PORT").toInt
}
if (System.getenv("SPARK_WORKER_CORES") != null) {
cores = System.getenv("SPARK_WORKER_CORES").toInt
}
if (System.getenv("SPARK_WORKER_MEMORY") != null) {
memory = Utils.memoryStringToMb(System.getenv("SPARK_WORKER_MEMORY"))
}
if (System.getenv("SPARK_WORKER_WEBUI_PORT") != null) {
webUiPort = System.getenv("SPARK_WORKER_WEBUI_PORT").toInt
}
parse(args.toList)
def parse(args: List[String]): Unit = args match {
case ("--ip" | "-i") :: value :: tail =>
ip = value
parse(tail)
case ("--port" | "-p") :: IntParam(value) :: tail =>
port = value
parse(tail)
case ("--cores" | "-c") :: IntParam(value) :: tail =>
cores = value
parse(tail)
case ("--memory" | "-m") :: MemoryParam(value) :: tail =>
memory = value
parse(tail)
case "--webui-port" :: IntParam(value) :: tail =>
webUiPort = value
parse(tail)
case ("--help" | "-h") :: tail =>
printUsageAndExit(0)
case value :: tail =>
if (master != null) { // Two positional arguments were given
printUsageAndExit(1)
}
master = value
parse(tail)
case Nil =>
if (master == null) { // No positional argument was given
printUsageAndExit(1)
}
case _ =>
printUsageAndExit(1)
}
/**
* Print usage and exit JVM with the given exit code.
*/
def printUsageAndExit(exitCode: Int) {
System.err.println(
"Usage: spark-worker [options] <master>\\n" +
"\\n" +
"Master must be a URL of the form spark://hostname:port\\n" +
"\\n" +
"Options:\\n" +
" -c CORES, --cores CORES Number of cores to use\\n" +
" -m MEM, --memory MEM Amount of memory to use (e.g. 1000M, 2G)\\n" +
" -i IP, --ip IP IP address or DNS name to listen on\\n" +
" -p PORT, --port PORT Port to listen on (default: random)\\n" +
" --webui-port PORT Port for web UI (default: 8081)")
System.exit(exitCode)
}
def inferDefaultCores(): Int = {
Runtime.getRuntime.availableProcessors()
}
def inferDefaultMemory(): Int = {
val bean = ManagementFactory.getOperatingSystemMXBean
.asInstanceOf[com.sun.management.OperatingSystemMXBean]
val totalMb = (bean.getTotalPhysicalMemorySize / 1024 / 1024).toInt
// Leave out 1 GB for the operating system, but don't return a negative memory size
math.max(totalMb - 1024, 512)
}
} | ankurdave/arthur | core/src/main/scala/spark/deploy/worker/WorkerArguments.scala | Scala | bsd-3-clause | 3,050 |
package ooyala.common.akka
import akka.actor.{ActorSystem, ActorRef}
import akka.pattern.gracefulStop
import scala.concurrent.Await
object AkkaTestUtils {
import scala.concurrent.duration._
// This is a var for now because we need to let people change it, and we can't pass this in as a param
// because then we would change the API. If we have it as a default param, we can't have multiple methods
// with the same name.
var timeout = 10 seconds
def shutdownAndWait(actor: ActorRef) {
if (actor != null) {
val stopped = gracefulStop(actor, timeout)
Await.result(stopped, timeout + (1 seconds))
}
}
def shutdownAndWait(system: ActorSystem) {
if (system != null) {
system.shutdown()
system.awaitTermination(timeout)
}
}
}
| nachiketa-shukla/spark-jobserver | akka-app/src/ooyala.common.akka/AkkaTestUtils.scala | Scala | apache-2.0 | 785 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.