code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*******************************************************************************
Copyright (c) 2012-2013, S-Core, KAIST.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
***************************************************************************** */
package kr.ac.kaist.jsaf.analysis.cfg
import _root_.java.util.{List => JList}
import kr.ac.kaist.jsaf.exceptions.StaticError
import kr.ac.kaist.jsaf.nodes._
import kr.ac.kaist.jsaf.nodes_util.{NodeUtil => NU, IRFactory}
import kr.ac.kaist.jsaf.scala_src.nodes._
import kr.ac.kaist.jsaf.scala_src.useful.ErrorLog
import kr.ac.kaist.jsaf.scala_src.useful.Lists._
import kr.ac.kaist.jsaf.useful.HasAt
import scala.collection.immutable.HashMap
import scala.collection.mutable.{HashSet => MHashSet}
import kr.ac.kaist.jsaf.analysis.typing.Config
import kr.ac.kaist.jsaf.analysis.asserts.{ASSERTHelper => AH}
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.analysis.typing.models.{ModelManager, DOMHelper}
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
class CFGBuilder (ir: IRRoot) {
/* Error handling
* The signal function collects errors during the disambiguation phase.
* To collect multiple errors,
* we should return a dummy value after signaling an error.
*/
val errors: ErrorLog = new ErrorLog
def signal(msg:String, hasAt:HasAt) = errors.signal(msg, hasAt)
def signal(hasAt:HasAt, msg:String) = errors.signal(msg, hasAt)
def signal(error: StaticError) = errors.signal(error)
def getErrors(): JList[StaticError] = toJavaList(errors.errors)
val captured = new CapturedVariableCollector().collect(ir)
val isCatchVar = MHashSet[String]()
/* root rule : IRRoot -> CFG */
def build(): CFG = {
ir match {
case SIRRoot(info, fds, vds, stmts) =>
val cfg = new CFG()
val local_vars = namesOfFunDecls(fds) ++ namesOfVars(vds)
val fid_global = cfg.newFunction("", Nil, local_vars, "top-level", info)
cfg.setGlobalFId(fid_global)
val node_start = cfg.newBlock(fid_global)
cfg.addEdge((fid_global, LEntry), node_start)
val ns1 = translateFunDecls(fds,cfg,List(node_start),fid_global)
val lmap: Map[String,Set[Node]]= HashMap("#return" -> Set(), "#throw" -> Set(), "#throw_end" -> Set(), "#after_catch" -> Set())
val (ns2, lmap1) = translateStmts(stmts, cfg, ns1, lmap, fid_global)
cfg.addEdge(ns2,(fid_global,LExit))
cfg.addExcEdge(lmap1("#throw").toList,(fid_global,LExitExc))
cfg.addEdge(lmap1("#throw_end").toList,(fid_global,LExitExc))
cfg.addEdge(lmap1("#after_catch").toList,(fid_global,LExitExc))
// add top function
if (Config.libMode)
addTopFunction(cfg)
cfg.setUserFuncCount()
cfg
}
}
/* fdvars rule : IRFunDecl list -> LocalVars
* collects variable names from sequence of IRFunDecl, function "name" ... */
private def namesOfFunDecls(fds: List[IRFunDecl]): List[CFGId] = {
fds.foldLeft(List[CFGId]())((vars,fd) => vars ++ List(id2cfgId(fd.getFtn.getName)))
}
/* vd* rule : IRVar list -> LocalVars
* collects variable names from sequence of IRVarStmt, var "name" */
private def namesOfVars(vds: List[IRVarStmt]): List[CFGId] = {
vds.foldLeft(List[CFGId]())((vars,vd) => vars ++ List(id2cfgId(vd.getLhs)))
}
/* arg* rule : IRStmt list -> ArgVars
* collects variable names from sequence of IRLoad, "name" := arguments[n] */
private def namesOfArgs(loads: List[IRStmt]): List[CFGId] = {
loads.asInstanceOf[List[IRExprStmt]].foldLeft(List[CFGId]())((args,load) => args ++ List(id2cfgId(load.getLhs)))
}
/* fd* rule : IRFunDecl list x CFG x Node list x FunctionId -> Node list */
private def translateFunDecls(fds: List[IRFunDecl], cfg: CFG, nodes: List[Node], fid: FunctionId): List[Node] = {
fds.foldLeft(nodes){case (tails,fd) => translateFunDecl(fd,cfg,tails,fid)}
}
// Collect event functions ids and store them in the temporary event map
private def getEventFunId(name: String, fid: FunctionId): Unit = {
// load event
if(name=="__LOADEvent__") {
val fid_set = DOMHelper.temp_eventMap("#LOAD")
DOMHelper.temp_eventMap+=("#LOAD" -> (fid_set + fid))
}
// unload event
else if(name=="__UNLOADEvent__") {
val fid_set = DOMHelper.temp_eventMap("#UNLOAD")
DOMHelper.temp_eventMap+=("#UNLOAD" -> (fid_set + fid))
}
// keyboard event
else if(name=="__KEYBOARDEvent__") {
val fid_set = DOMHelper.temp_eventMap("#KEYBOARD")
DOMHelper.temp_eventMap+=("#KEYBOARD" -> (fid_set + fid))
}
// mouse event
else if(name=="__MOUSEEvent__") {
val fid_set = DOMHelper.temp_eventMap("#MOUSE")
DOMHelper.temp_eventMap+=("#MOUSE" -> (fid_set + fid))
}
// other event
else if(name=="__OTHEREvent__") {
val fid_set = DOMHelper.temp_eventMap("#OTHER")
DOMHelper.temp_eventMap+=("#OTHER" -> (fid_set + fid))
}
()
}
/* fd rule : IRFunDecl x CFG x Node list x FunctionId -> Node list */
private def translateFunDecl(fd: IRFunDecl, cfg: CFG, nodes: List[Node], fid: FunctionId): List[Node] = {
fd match {
case SIRFunDecl(irinfo, SIRFunctional(_,name,params,args,fds,vds,body)) =>
val arg_vars = namesOfArgs(args)
val local_vars = (namesOfFunDecls(fds) ++ namesOfVars(vds)).filterNot(arg_vars.contains)
val fid_new = cfg.newFunction(id2cfgId(params(1)).toString, arg_vars, local_vars, name.getOriginalName, irinfo)
// collect event function ids
//if (Config.domMode)
// getEventFunId(name.getOriginalName, fid_new)
val node_start = cfg.newBlock(fid_new)
cfg.addEdge((fid_new, LEntry), node_start)
val lmap: Map[String,Set[Node]]= HashMap("#return" -> Set(), "#throw" -> Set(), "#throw_end" -> Set(), "#after_catch" -> Set())
val ns1 = translateFunDecls(fds,cfg,List(node_start),fid_new)
val (ns2,lmap1) = translateStmts(body, cfg, ns1, lmap, fid_new)
cfg.addEdge(ns2,(fid_new,LExit))
cfg.addEdge(lmap1("#return").toList,(fid_new,LExit))
cfg.addExcEdge(lmap1("#throw").toList,(fid_new,LExitExc))
cfg.addEdge(lmap1("#throw_end").toList,(fid_new,LExitExc))
cfg.addEdge(lmap1("#after_catch").toList,(fid_new,LExitExc))
val node_tail = getTail(cfg,nodes,fid)
cfg.addInst(node_tail,
CFGFunExpr(cfg.newInstId, irinfo, id2cfgId(name), None, fid_new,
newProgramAddr(), newProgramAddr(), None))
List(node_tail)
}
}
/* stmt* rule : IRStmt list x CFG x Node list x LabelMap x FunctionId -> Node list x LabelMap */
private def translateStmts(stmts: List[IRStmt], cfg: CFG, nodes: List[Node], lmap: Map[String, Set[Node]], fid: FunctionId) = {
stmts.foldLeft((nodes,lmap)){case ((tails,lmap1),stmt) => translateStmt(stmt,cfg,tails,lmap1,fid)}
}
/* stmt rule : IRStmt x CFG x Node list x LabelMap x FunctionId -> Node list x LabelMap */
private def translateStmt(stmt: IRStmt, cfg: CFG, nodes: List[Node], lmap: Map[String, Set[Node]],
fid: FunctionId): (List[Node], Map[String, Set[Node]]) = {
stmt match {
case SIRNoOp(irinfo, desc) =>
val n = getTail(cfg, nodes, fid)
val noop = CFGNoOp(cfg.newInstId, irinfo, desc)
cfg.addInst(n, noop)
cfg.addFileNoOp(irinfo.getSpan().getEnd().getFileName(), noop)
(List(n), lmap)
case SIRStmtUnit(irinfo, stmts) =>
translateStmts(stmts, cfg, nodes, lmap, fid)
case SIRSeq(irinfo, stmts) =>
translateStmts(stmts, cfg, nodes, lmap, fid)
case vd:IRVarStmt =>
signal("IRVarStmt should have been hoisted.", vd)
(nodes, lmap)
case fd:IRFunDecl =>
signal("IRFunDecl should have been hoisted.", fd)
(nodes, lmap)
case SIRFunExpr(irinfo, lhs, SIRFunctional(_,name,params,args,fds,vds,body)) =>
val arg_vars = namesOfArgs(args)
val local_vars = (namesOfFunDecls(fds) ++ namesOfVars(vds)).filterNot(arg_vars.contains)
val fid_new = cfg.newFunction(id2cfgId(params(1)).toString, arg_vars, local_vars, name.getOriginalName, irinfo)
val node_start = cfg.newBlock(fid_new)
cfg.addEdge((fid_new, LEntry), node_start)
val lmap_new: Map[String,Set[Node]]= HashMap("#return" -> Set(), "#throw" -> Set(), "#throw_end" -> Set(), "#after_catch" -> Set())
val ns1 = translateFunDecls(fds,cfg,List(node_start),fid_new)
val (ns2,lmap1) = translateStmts(body, cfg, ns1, lmap_new, fid_new)
cfg.addEdge(ns2,(fid_new,LExit))
cfg.addEdge(lmap1("#return").toList,(fid_new,LExit))
cfg.addExcEdge(lmap1("#throw").toList,(fid_new,LExitExc))
cfg.addEdge(lmap1("#throw_end").toList,(fid_new,LExitExc))
cfg.addEdge(lmap1("#after_catch").toList,(fid_new,LExitExc))
val node_tail = getTail(cfg,nodes,fid)
val nameCFGId = id2cfgId(name)
if (nameCFGId.getVarKind == CapturedVar) {
cfg.addInst(node_tail,
CFGFunExpr(cfg.newInstId, irinfo, id2cfgId(lhs), Some(nameCFGId), fid_new,
newProgramAddr(), newProgramAddr(), Some(newProgramAddr())))
} else {
cfg.addInst(node_tail,
CFGFunExpr(cfg.newInstId, irinfo, id2cfgId(lhs), None, fid_new,
newProgramAddr(), newProgramAddr(), None))
}
(List(node_tail), lmap)
/* PEI : when proto is not object*/
case SIRObject(irinfo, lhs, members, proto) =>
val node_tail = getTail(cfg,nodes,fid)
proto match {
case None =>
cfg.addInst(node_tail,
CFGAlloc(cfg.newInstId, irinfo, id2cfgId(lhs), None, newProgramAddr()))
case Some(p) =>
cfg.addInst(node_tail,
CFGAlloc(cfg.newInstId, irinfo,
id2cfgId(lhs), Some(id2cfgExpr(p)), newProgramAddr()))
}
members.foreach((m) => translateMember(m, cfg, node_tail, lhs))
(List(node_tail), lmap.updated("#throw", lmap("#throw") + node_tail))
case SIRTry(irinfo, body, name, catchblock, finblock) =>
(name, catchblock, finblock) match {
case (Some(x), Some(catb), None) =>
isCatchVar.add(x.getUniqueName)
/* try block */
val node1 = cfg.newBlock(fid)
cfg.addEdge(nodes, node1)
/* catch block */
val node2 = cfg.newBlock(fid)
cfg.addInst(node2, CFGCatch(cfg.newInstId, irinfo, id2cfgId(x)))
/* initial label map */
val lmap_try: Map[String,Set[Node]]= HashMap("#return" -> Set(), "#throw" -> Set(), "#throw_end" -> Set(), "#after_catch" -> Set())
/* try body */
val (ns1, lmap1) = translateStmt(body, cfg, List(node1), lmap_try, fid)
cfg.addExcEdge(lmap1("#throw").toList, node2)
cfg.addEdge(lmap1("#throw_end").toList, node2)
cfg.addEdge(lmap1("#after_catch").toList, node2)
/* catch body */
val (ns2, lmap2) = translateStmt(catb, cfg, List(node2), lmap1.updated("#throw", Set()).updated("#throw_end", Set()).updated("#after_catch", Set()), fid)
val lmap3 = lmap2.foldLeft(lmap)((m, kv) => {
if (m.contains(kv._1))
m.updated(kv._1, m(kv._1)++ kv._2)
else
m.updated(kv._1, kv._2)})
/* tail nodes */
val ns_tail =
if (ns1.size == 1 && lmap1("#throw").contains(ns1.head)) {
/* new tail node */
val node_new = cfg.newBlock(fid)
cfg.addEdge(ns1, node_new)
cfg.addEdge(ns2, node_new)
List(node_new)
}
else
ns1 ++ ns2
(ns_tail, lmap3)
case (None, None, Some(finb)) =>
/* try block */
val node1 = cfg.newBlock(fid)
cfg.addEdge(nodes, node1)
/* finally block */
val node2 = cfg.newBlock(fid)
/* initial label map */
val lmap_try: Map[String,Set[Node]]= HashMap("#return" -> Set(), "#throw" -> Set(), "#throw_end" -> Set(), "#after_catch" -> Set())
/* build try block */
val (ns1, lmap1) = translateStmt(body, cfg, List(node1), lmap_try, fid)
/* build finally block */
val (ns2, lmap2) = translateStmt(finb, cfg, List(node2), lmap, fid)
/* edge : try -> finally */
cfg.addEdge(ns1, node2)
val lmap3 = (lmap1-"#after_catch").foldLeft(lmap2)((map, kv) => {
if (!(kv._2.isEmpty)){
val node_dup = cfg.newBlock(fid)
val (ns, lm) = translateStmt(finb, cfg, List(node_dup), map, fid)
if (kv._1 == "#throw"){
cfg.addEdge(lmap1("#after_catch").toList, node_dup)
cfg.addExcEdge(kv._2.toList, node_dup)
lm.updated("#throw_end", lm("#throw_end") ++ ns)
}
else {
cfg.addEdge(kv._2.toList, node_dup)
lm.updated(kv._1, lm(kv._1) ++ ns)
}
}
else map})
(ns2, lmap3)
case (Some(x), Some(catb), Some(finb)) =>
isCatchVar.add(x.getUniqueName)
/* try block */
val node1 = cfg.newBlock(fid)
cfg.addEdge(nodes, node1)
/* catch block */
val node2 = cfg.newBlock(fid)
cfg.addInst(node2, CFGCatch(cfg.newInstId, irinfo, id2cfgId(x)))
/* finally block */
val node3 = cfg.newBlock(fid)
/* initial label map */
val lmap_try: Map[String,Set[Node]]= HashMap("#return" -> Set(), "#throw" -> Set(), "#throw_end" -> Set(), "#after_catch" -> Set())
/* build try block */
val (ns1, lmap1) = translateStmt(body, cfg, List(node1), lmap_try, fid)
/* exc edge : try -> catch */
cfg.addExcEdge(lmap1("#throw").toList, node2)
cfg.addEdge(lmap1("#throw_end").toList, node2)
cfg.addEdge(lmap1("#after_catch").toList, node2)
/* build catch block */
val (ns2, lmap2) = translateStmt(catb, cfg, List(node2), lmap1.updated("#throw", Set()).updated("#throw_end", Set()).updated("#after_catch", Set()), fid)
/* build finally block */
val (ns3, lmap3) = translateStmt(finb, cfg, List(node3), lmap, fid)
/* edge : try+catch -> finally */
cfg.addEdge(ns1 ++ ns2, node3)
val lmap4 = (lmap2-"#after_catch").foldLeft(lmap3)((map, kv) => {
if (!(kv._2.isEmpty)){
val node_dup = cfg.newBlock(fid)
val (ns, lm) = translateStmt(finb, cfg, List(node_dup), map, fid)
if (kv._1 == "#throw"){
cfg.addEdge(lmap2("#after_catch").toList, node_dup)
cfg.addExcEdge(kv._2.toList, node_dup)
lm.updated("#throw_end", lm("#throw_end") ++ ns)
}
else {
cfg.addEdge(kv._2.toList, node_dup)
lm.updated(kv._1, lm(kv._1) ++ ns)
}
}
else map})
(ns3, lmap4)
case _ =>
signal("Wrong IRTryStmt.", stmt)
(nodes, lmap)
}
// case SIRArgs(irinfo, lhs, elements) =>
// translateStmt(SIRArray(irinfo, lhs, elements), cfg, nodes, lmap, fid)
/* PEI : element assign */
case SIRArgs(irinfo, lhs, elements) =>
val node_tail = getTail(cfg, nodes, fid)
cfg.addInst(node_tail,
CFGAllocArg(cfg.newInstId, irinfo,
id2cfgId(lhs), elements.length, newProgramAddr()))
val _ = elements.foldLeft(0){case (k, e) =>
e match {
case None => k+1
case Some(e1) => translateElement(irinfo, e1, cfg, node_tail, lhs, k)
}}
(List(node_tail), lmap.updated("#throw", lmap("#throw") + node_tail))
/* PEI : element assign */
case SIRArray(irinfo, lhs, elements) =>
val node_tail = getTail(cfg, nodes, fid)
cfg.addInst(node_tail,
CFGAllocArray(cfg.newInstId, irinfo,
id2cfgId(lhs), elements.length, newProgramAddr()))
val _ = elements.foldLeft(0){case (k, e) =>
e match {
case None => k+1
case Some(e1) => translateElement(irinfo, e1, cfg, node_tail, lhs, k)
}}
(List(node_tail), lmap.updated("#throw", lmap("#throw") + node_tail))
/* PEI : element assign */
case SIRArrayNumber(irinfo, lhs, elements) =>
val node_tail = getTail(cfg, nodes, fid)
cfg.addInst(node_tail,
CFGAllocArray(cfg.newInstId, irinfo,
id2cfgId(lhs), elements.length, newProgramAddr()))
val _ = elements.foldLeft(0){case (k, e) =>
translateDoubleElement(irinfo, e, cfg, node_tail, lhs, k)
}
(List(node_tail), lmap.updated("#throw", lmap("#throw") + node_tail))
case SIRBreak(irinfo, label) =>
val ns = lmap.get(label.getUniqueName) match {
case None => nodes.toSet
case Some(n) => n ++ nodes.toSet
}
(Nil, lmap.updated(label.getUniqueName, ns))
/* PEI : fun == "<>toObject" */
case SIRInternalCall(irinfo, lhs, fun@(SIRTmpId(_, originalName, uniqueName, _)), arg1, arg2) =>
val n1 = getTail(cfg, nodes, fid)
val (addr,lm) = if (uniqueName.equals("<>Global<>toObject") || uniqueName.equals("<>Global<>iteratorInit")) (Some(newProgramAddr()), lmap.updated("#throw", lmap("#throw")+n1)) else (None,lmap)
val argslist = arg2 match {
case None => List(ir2cfgExpr(arg1))
case Some(arg) => List(ir2cfgExpr(arg1), id2cfgExpr(arg))
}
cfg.addInst(n1,
CFGInternalCall(cfg.newInstId, irinfo,
id2cfgId(lhs), id2cfgId(fun), argslist, addr))
(List(n1), lm)
/* PEI : call, after-call */
case SIRCall(irinfo, lhs, fun, thisB, args) =>
val n1 = getTail(cfg, nodes, fid)
val addr = newProgramAddr()
cfg.addInst(n1,
CFGCall(cfg.newInstId, irinfo,
id2cfgExpr(fun), id2cfgExpr(thisB), id2cfgExpr(args), addr))
val n2 = cfg.newAfterCallBlock(fid, id2cfgId(lhs))
// after-catch
val n3 = cfg.newAfterCatchBlock(fid)
cfg.addCall(n1, n2, n3)
(List(n2), lmap.updated("#throw", lmap("#throw")+n1).updated("#after_catch", lmap("#after_catch")+n3))
/* PEI : construct, after-call */
case SIRNew(irinfo, lhs, cons, args) if (args.length == 2) =>
val n1 = getTail(cfg, nodes, fid)
val addr = newProgramAddr()
cfg.addInst(n1,
CFGConstruct(cfg.newInstId, irinfo,
id2cfgExpr(cons), id2cfgExpr(args(0)), id2cfgExpr(args(1)), addr))
val n2 = cfg.newAfterCallBlock(fid, id2cfgId(lhs))
// after-catch
val n3 = cfg.newAfterCatchBlock(fid)
cfg.addCall(n1, n2, n3)
(List(n2), lmap.updated("#throw", lmap("#throw")+n1).updated("#after_catch", lmap("#after_catch")+n3))
case c@SIRNew(irinfo, lhs, fun, args) =>
signal("IRNew should have two elements in args.", c)
(Nil, lmap)
/* PEI : id lookup */
case SIRDelete(irinfo, lhs, id) =>
val n = getTail(cfg, nodes, fid)
cfg.addInst(n, CFGDelete(cfg.newInstId, irinfo, id2cfgId(lhs), id2cfgExpr(id)))
(List(n), lmap.updated("#throw", lmap("#throw") + n))
/* PEI : id lookup */
case SIRDeleteProp(irinfo, lhs, obj, index) =>
val n = getTail(cfg, nodes, fid)
cfg.addInst(n,
CFGDeleteProp(cfg.newInstId, irinfo,
id2cfgId(lhs), id2cfgExpr(obj), ir2cfgExpr(index)))
(List(n), lmap.updated("#throw", lmap("#throw") + n))
/* PEI : expr == IRId */
case SIRExprStmt(irinfo, lhs, expr, _) =>
val n = getTail(cfg, nodes, fid)
cfg.addInst(n, CFGExprStmt(cfg.newInstId, irinfo, id2cfgId(lhs), ir2cfgExpr(expr)))
/* expr match {
case _:IRId => (List(n), lmap.updated("#throw", lmap("#throw") + n))
case _ => (List(n), lmap)
} */
/* XXX: temporal code for exception. */
(List(n), lmap.updated("#throw", lmap("#throw") + n))
case SIRIf(irinfo, cond, trueblock, falseblock) =>
/* true block */
val n1 = cfg.newBlock(fid)
cfg.addEdge(nodes, n1)
/* false block */
val n2 = cfg.newBlock(fid)
cfg.addEdge(nodes, n2)
/* Insert assert instructions */
val condinfo = cond.getInfo
cfg.addInst(n1, CFGAssert(cfg.newInstId, condinfo, ir2cfgExpr(cond), true))
cond match {
case SIRBin(_, first, op, second) if AH.isAssertOperator(op) =>
cfg.addInst(n2,
CFGAssert(cfg.newInstId, condinfo,
CFGBin(condinfo,
ir2cfgExpr(first), AH.transIROp(op), ir2cfgExpr(second)), false))
case _ =>
cfg.addInst(n2,
CFGAssert(cfg.newInstId, condinfo,
CFGUn(condinfo, IRFactory.makeOp("!"), ir2cfgExpr(cond)), false))
}
/* true block */
val (ns1, lmap1) = translateStmt(trueblock, cfg, List(n1), lmap, fid)
/* false block */
falseblock match {
case None =>
(ns1++List(n2), lmap1.updated("#throw", lmap1("#throw") + n1 + n2))
case Some(stmt) =>
val (ns2, lmap2) = translateStmt(stmt, cfg, List(n2), lmap1, fid)
(ns1++ns2, lmap2.updated("#throw", lmap2("#throw") + n1 + n2))
}
case SIRLabelStmt(irinfo, label, stmt) =>
val n = cfg.newBlock(fid)
val (ns1, lmap1) = translateStmt(stmt, cfg, nodes, lmap.updated(label.getUniqueName, Set()), fid)
cfg.addEdge(ns1, n)
cfg.addEdge(lmap1(label.getUniqueName).toList, n)
val lmap2 = lmap1 - label.getUniqueName
(List(n), lmap2)
/* PEI : expr lookup */
case SIRReturn(irinfo, expr) =>
val n = getTail(cfg, nodes, fid)
expr match {
case None => cfg.addInst(n, CFGReturn(cfg.newInstId, irinfo, None))
case Some(x) => cfg.addInst(n, CFGReturn(cfg.newInstId, irinfo, Some(ir2cfgExpr(x))))
}
(Nil, lmap.updated("#return", lmap("#return") + n).updated("#throw", lmap("#throw") + n))
/* PEI : id lookup */
case SIRStore(irinfo, obj, index, rhs) =>
val n = getTail(cfg, nodes, fid)
cfg.addInst(n,
CFGStore(cfg.newInstId, irinfo,
id2cfgExpr(obj), ir2cfgExpr(index), ir2cfgExpr(rhs)))
(List(n), lmap.updated("#throw", lmap("#throw") + n))
case SIRThrow(irinfo, expr) =>
val n = getTail(cfg, nodes, fid)
cfg.addInst(n, CFGThrow(cfg.newInstId, irinfo, ir2cfgExpr(expr)))
(Nil, lmap.updated("#throw", lmap("#throw") + n))
case SIRWhile(irinfo, cond, body) =>
// Checks whether this while loop is originated from for-in or not.
// TODO Need to find more graceful way.
val bForin = body match {
case SIRSeq(_, stmts) if stmts.size > 0 => stmts(0) match {
case SIRInternalCall(_, _, fun@(SIRTmpId(_, _, "<>Global<>iteratorNext", _)), _, _) => true
case _ => false
}
case _ => false
}
val unrollingCount =
if (bForin) Config.defaultForinUnrollingCount
else Config.defaultUnrollingCount
if(unrollingCount == 0) {
/* tail node */
val n1 = getTail(cfg, nodes, fid)
/* while loop head */
val n_head = cfg.newBlock(fid)
/* loop body */
val n2 = cfg.newBlock(fid)
/* loop out */
val n3 = cfg.newBlock(fid)
/* Insert assert instruction */
val condinfo = cond.getInfo
cfg.addInst(n2, CFGAssert(cfg.newInstId, condinfo, ir2cfgExpr(cond), true))
cond match {
case SIRBin(_, first, op, second) if AH.isAssertOperator(op) =>
cfg.addInst(n3,
CFGAssert(cfg.newInstId, condinfo,
CFGBin(condinfo,
ir2cfgExpr(first), AH.transIROp(op), ir2cfgExpr(second)), false))
case _ =>
cfg.addInst(n3,
CFGAssert(cfg.newInstId, condinfo,
CFGUn(condinfo, IRFactory.makeOp("!"), ir2cfgExpr(cond)), false))
}
/* add edge from tail to loop head */
cfg.addEdge(n1, n_head)
/* add edge from loop head to loop body */
cfg.addEdge(n_head, n2)
/* add edge from loop head to out*/
cfg.addEdge(n_head, n3)
/* build loop body */
val (ns1, lmap1) = translateStmt(body, cfg, List(n2), lmap, fid)
/* add edge from tails of loop body to loop head */
cfg.addEdge(ns1, n_head)
(List(n3), lmap1.updated("#throw", lmap1("#throw") + n2 + n3))
}
else {
var updatedlmap = lmap
def newBranchBlocks(headNode: Node): (BlockNode, BlockNode, List[Node]) = {
val trueNode = cfg.newBlock(fid) // loop body
val falseNode = cfg.newBlock(fid) // loop out
/* Insert assert instruction */
val condinfo = cond.getInfo
cfg.addInst(trueNode, CFGAssert(cfg.newInstId, condinfo, ir2cfgExpr(cond), true))
cond match {
case SIRBin(_, first, op, second) if AH.isAssertOperator(op) =>
cfg.addInst(falseNode,
CFGAssert(cfg.newInstId, condinfo,
CFGBin(condinfo,
ir2cfgExpr(first), AH.transIROp(op), ir2cfgExpr(second)), false))
case _ =>
cfg.addInst(falseNode,
CFGAssert(cfg.newInstId, condinfo,
CFGUn(condinfo, IRFactory.makeOp("!"), ir2cfgExpr(cond)), false))
}
/* build loop body */
val (leafNodes, newlmap) = translateStmt(body, cfg, List(trueNode), updatedlmap, fid)
updatedlmap = newlmap.updated("#throw", newlmap("#throw") + trueNode + falseNode)
/* add edge from loop head to loop body */
cfg.addEdge(headNode, trueNode)
/* add edge from loop head to out*/
cfg.addEdge(headNode, falseNode)
(trueNode, falseNode, leafNodes)
}
/* while loop head */
val headNode = cfg.newBlock(fid)
/* (loop body, loop out, loop body's leaf nodes) */
var (lastBodyNode, lastOutNode, lastLeafNodes) = newBranchBlocks(headNode)
/* add edge from tails of loop body to loop head */
cfg.addEdge(lastLeafNodes, headNode)
/* tail node */
var tailNode: Node = getTail(cfg, nodes, fid)
/* unrolling */
for(i <- 0 until unrollingCount) {
/* (loop body, loop out, loop body's leaf nodes) */
val (bodyNode, outNode, leafNodes) = newBranchBlocks(tailNode)
/* add edge from unrolling out to last out*/
cfg.addEdge(outNode, lastOutNode)
if(leafNodes.length > 1) {
tailNode = cfg.newBlock(fid)
cfg.addEdge(leafNodes, tailNode)
}
else tailNode = leafNodes.head
}
/* add edge from unrolled tail to loop head */
cfg.addEdge(tailNode, headNode)
(List(lastOutNode), updatedlmap)
}
case _ => {
System.err.println("* Warning: following IR statement is ignored: "+ stmt)
(nodes, lmap)
}
}
/* statements */
//case SIREval(irinfo, lhs, _, arg) => (Nil, label_map)
//case SIRWith(irinfo, expr, stmt) => (Nil, label_map)
//case SIRGetProp(irinfo, fun) => (Nil, label_map)
//case SIRSetProp(irinfo, fun) => (Nil, label_map)
}
/* mem rule : IRField x CFG x Node x IRId -> Unit */
private def translateMember(mem: IRMember, cfg: CFG, node: BlockNode, lhs: IRId): Unit = {
mem match {
case SIRField(irinfo, prop, expr) =>
val lhs_expr = CFGVarRef(irinfo, id2cfgId(lhs))
val index_expr = CFGString(prop.getUniqueName)
cfg.addInst(node, CFGStore(cfg.newInstId, irinfo, lhs_expr, index_expr, ir2cfgExpr(expr)))
case getOrSet =>
signal("IRGetProp, IRSetProp is not supported.", getOrSet)
Unit
}
}
/* elem rule : IRSpanInfo x IRExpr x CFG x Node x IRId x Int-> Int */
private def translateElement(irinfo: IRSpanInfo, elem: IRExpr, cfg: CFG, node: BlockNode, lhs: IRId, index: Int): Int = {
val lhs_expr = CFGVarRef(irinfo, id2cfgId(lhs))
cfg.addInst(node,
CFGStore(cfg.newInstId, irinfo,
lhs_expr, CFGString(index.toString), ir2cfgExpr(elem)))
(index + 1)
}
private def translateDoubleElement(irinfo: IRSpanInfo, elem: Double, cfg: CFG, node: BlockNode, lhs: IRId, index: Int): Int = {
val lhs_expr = CFGVarRef(irinfo, id2cfgId(lhs))
cfg.addInst(node,
CFGStore(cfg.newInstId, irinfo,
lhs_expr, CFGString(index.toString),
CFGNumber(elem.toString, javaToScalaDouble(elem))))
(index + 1)
}
implicit def javaToScalaDouble(d: java.lang.Double) = d.doubleValue
implicit def javaToScalaLong(l: java.lang.Long) = l.longValue
private def isInternalCall(fname: String): Boolean = NU.isGlobalName(fname)
private def ir2cfgExpr(expr: IRExpr): CFGExpr =
expr match {
/* PEI : id lookup */
case SIRLoad(info, obj, index) =>
CFGLoad(info, id2cfgExpr(obj), ir2cfgExpr(index))
/* PEI : op \\in {instanceof, in}, id lookup */
case SIRBin(info, first, op, second) =>
CFGBin(info, ir2cfgExpr(first), op, ir2cfgExpr(second))
/* PEI : id lookup */
case SIRUn(info, op, expr) =>
CFGUn(info, op, ir2cfgExpr(expr))
case id:IRId => CFGVarRef(id.getInfo, id2cfgId(id))
case SIRThis(info) => CFGThis(info)
case SIRNumber(_, text, num) => CFGNumber(text, javaToScalaDouble(num))
case SIRString(_, str) => CFGString(str)
case SIRBool(_, bool) => CFGBool(bool)
case _:IRNull => CFGNull()
}
private def id2cfgExpr(id: IRId): CFGExpr = CFGVarRef(id.getInfo, id2cfgId(id))
private def idList2cfgIdList(id: List[IRId]): List[CFGId] = id.map(id2cfgId)
private var nameEnv: Map[String, String] = HashMap()
private var uniqueNameCounter = 0
private def id2cfgId(id: IRId): CFGId = {
val text = id.getUniqueName
nameEnv.get(text) match {
case Some(s) =>
// previously mapped name
id match {
case id:IRUserId =>
if (id.isGlobal)
CFGUserId(id.getInfo, s, GlobalVar, id.getOriginalName, id.isWith)
else if (captured(text)) {
if (isCatchVar(text))
CFGUserId(id.getInfo, s, CapturedCatchVar, id.getOriginalName, id.isWith)
else CFGUserId(id.getInfo, s, CapturedVar, id.getOriginalName, id.isWith)
}
else CFGUserId(id.getInfo, s, PureLocalVar, id.getOriginalName, id.isWith)
case id:IRTmpId =>
if (id.isGlobal) CFGTempId(s, GlobalVar)
else CFGTempId(s, PureLocalVar)
}
case None =>
val name =
if (!NU.isInternal(text) || NU.isGlobalName(text)) {
text
} else {
val text1 = text.dropRight(NU.significantBits)
uniqueNameCounter += 1
text1 + uniqueNameCounter.toString
}
nameEnv += (text -> name)
id match {
case id:IRUserId =>
if (id.isGlobal)
CFGUserId(id.getInfo, name, GlobalVar, id.getOriginalName, id.isWith)
else if (captured(text)) {
if (isCatchVar(text))
CFGUserId(id.getInfo, name, CapturedCatchVar, id.getOriginalName, id.isWith)
else CFGUserId(id.getInfo, name, CapturedVar, id.getOriginalName, id.isWith)
}
else CFGUserId(id.getInfo, name, PureLocalVar, id.getOriginalName, id.isWith)
case id:IRTmpId =>
if (id.isGlobal) CFGTempId(name, GlobalVar)
else CFGTempId(name, PureLocalVar)
}
}
}
/* getTail : CFG x Node list x FunctionId -> BlockNode */
private def getTail(cfg:CFG, nodes: List[Node], fid: FunctionId): BlockNode = {
nodes match {
case Nil =>
val node_new = cfg.newBlock(fid)
node_new
case node::Nil =>
nodes.head.asInstanceOf[BlockNode]
case _ =>
val node_new = cfg.newBlock(fid)
nodes.foreach((node) => cfg.addEdge(node, node_new))
node_new
}
}
def addTopFunction(cfg: CFG): Unit = {
val dummy_info = IRFactory.makeInfo(IRFactory.dummySpan("TopFunction"))
cfg.addTopFunction("", Nil, Nil, "top-function", dummy_info)
val node = cfg.newBlock(FIdTop)
cfg.addInst(node, CFGReturn(cfg.newInstId, dummy_info, Option(CFGVarRef(dummy_info, CFGTempId("<>TopVal<>", GlobalVar)))))
cfg.addEdge((FIdTop, LEntry), node)
cfg.addEdge(node, (FIdTop, LExit))
cfg.addExcEdge(node, (FIdTop, LExitExc))
}
}
|
daejunpark/jsaf
|
src/kr/ac/kaist/jsaf/analysis/cfg/CFGBuilder.scala
|
Scala
|
bsd-3-clause
| 33,416 |
package gitbucket.core.controller
import gitbucket.core.helper.xml
import gitbucket.core.model.Account
import gitbucket.core.service._
import gitbucket.core.util.Implicits._
import gitbucket.core.util.SyntaxSugars._
import gitbucket.core.util.{Keys, LDAPUtil, ReferrerAuthenticator, UsersAuthenticator}
import io.github.gitbucket.scalatra.forms._
import org.scalatra.Ok
class IndexController extends IndexControllerBase
with RepositoryService with ActivityService with AccountService with RepositorySearchService with IssuesService
with UsersAuthenticator with ReferrerAuthenticator
trait IndexControllerBase extends ControllerBase {
self: RepositoryService with ActivityService with AccountService with RepositorySearchService
with UsersAuthenticator with ReferrerAuthenticator =>
case class SignInForm(userName: String, password: String)
val signinForm = mapping(
"userName" -> trim(label("Username", text(required))),
"password" -> trim(label("Password", text(required)))
)(SignInForm.apply)
// val searchForm = mapping(
// "query" -> trim(text(required)),
// "owner" -> trim(text(required)),
// "repository" -> trim(text(required))
// )(SearchForm.apply)
//
// case class SearchForm(query: String, owner: String, repository: String)
get("/"){
context.loginAccount.map { account =>
val visibleOwnerSet: Set[String] = Set(account.userName) ++ getGroupsByUserName(account.userName)
gitbucket.core.html.index(getRecentActivitiesByOwners(visibleOwnerSet), Nil, getUserRepositories(account.userName, withoutPhysicalInfo = true))
}.getOrElse {
gitbucket.core.html.index(getRecentActivities(), getVisibleRepositories(None, withoutPhysicalInfo = true), Nil)
}
}
get("/signin"){
val redirect = params.get("redirect")
if(redirect.isDefined && redirect.get.startsWith("/")){
flash += Keys.Flash.Redirect -> redirect.get
}
gitbucket.core.html.signin(flash.get("userName"), flash.get("password"), flash.get("error"))
}
post("/signin", signinForm){ form =>
authenticate(context.settings, form.userName, form.password) match {
case Some(account) => signin(account)
case None => {
flash += "userName" -> form.userName
flash += "password" -> form.password
flash += "error" -> "Sorry, your Username and/or Password is incorrect. Please try again."
redirect("/signin")
}
}
}
get("/signout"){
session.invalidate
redirect("/")
}
get("/activities.atom"){
contentType = "application/atom+xml; type=feed"
xml.feed(getRecentActivities())
}
get("/sidebar-collapse"){
if(params("collapse") == "true"){
session.setAttribute("sidebar-collapse", "true")
} else {
session.setAttribute("sidebar-collapse", null)
}
Ok()
}
/**
* Set account information into HttpSession and redirect.
*/
private def signin(account: Account) = {
session.setAttribute(Keys.Session.LoginAccount, account)
updateLastLoginDate(account.userName)
if(LDAPUtil.isDummyMailAddress(account)) {
redirect("/" + account.userName + "/_edit")
}
flash.get(Keys.Flash.Redirect).asInstanceOf[Option[String]].map { redirectUrl =>
if(redirectUrl.stripSuffix("/") == request.getContextPath){
redirect("/")
} else {
redirect(redirectUrl)
}
}.getOrElse {
redirect("/")
}
}
/**
* JSON API for collaborator completion.
*/
get("/_user/proposals")(usersOnly {
contentType = formats("json")
val user = params("user").toBoolean
val group = params("group").toBoolean
org.json4s.jackson.Serialization.write(
Map("options" -> (
getAllUsers(false)
.withFilter { t => (user, group) match {
case (true, true) => true
case (true, false) => !t.isGroupAccount
case (false, true) => t.isGroupAccount
case (false, false) => false
}}.map { t => t.userName }
))
)
})
/**
* JSON API for checking user or group existence.
* Returns a single string which is any of "group", "user" or "".
*/
post("/_user/existence")(usersOnly {
getAccountByUserName(params("userName")).map { account =>
if(account.isGroupAccount) "group" else "user"
} getOrElse ""
})
// TODO Move to RepositoryViwerController?
get("/:owner/:repository/search")(referrersOnly { repository =>
defining(params.getOrElse("q", "").trim, params.getOrElse("type", "code")){ case (query, target) =>
val page = try {
val i = params.getOrElse("page", "1").toInt
if(i <= 0) 1 else i
} catch {
case e: NumberFormatException => 1
}
target.toLowerCase match {
case "issue" => gitbucket.core.search.html.issues(
if(query.nonEmpty) searchIssues(repository.owner, repository.name, query) else Nil,
query, page, repository)
case "wiki" => gitbucket.core.search.html.wiki(
if(query.nonEmpty) searchWikiPages(repository.owner, repository.name, query) else Nil,
query, page, repository)
case _ => gitbucket.core.search.html.code(
if(query.nonEmpty) searchFiles(repository.owner, repository.name, query) else Nil,
query, page, repository)
}
}
})
get("/search"){
val query = params.getOrElse("query", "").trim.toLowerCase
val visibleRepositories = getVisibleRepositories(context.loginAccount, repositoryUserName = None, withoutPhysicalInfo = true)
val repositories = visibleRepositories.filter { repository =>
repository.name.toLowerCase.indexOf(query) >= 0 || repository.owner.toLowerCase.indexOf(query) >= 0
}
context.loginAccount.map { account =>
gitbucket.core.search.html.repositories(query, repositories, Nil, getUserRepositories(account.userName, withoutPhysicalInfo = true))
}.getOrElse {
gitbucket.core.search.html.repositories(query, repositories, visibleRepositories, Nil)
}
}
}
|
shiena/gitbucket
|
src/main/scala/gitbucket/core/controller/IndexController.scala
|
Scala
|
apache-2.0
| 6,038 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.views.helpers
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.play.views.html.helpers.{dateFieldsInline, dateFieldsFreeYearInline}
import play.twirl.api.Html
import play.api.data.Form
import play.api.data.Forms.{of => fieldOf, mapping}
import org.jsoup.Jsoup
import play.api.test.Helpers._
import play.api.data.format.Formats._
class DateFieldsSpec extends WordSpec with Matchers {
val months = Seq("January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December")
case class DummyFormData(day: Int, month: Int, year: Int)
def dummyForm = Form(
mapping(
"dummyField.day" -> fieldOf[Int],
"dummyField.month" -> fieldOf[Int],
"dummyField.year" -> fieldOf[Int]
)(DummyFormData.apply)(DummyFormData.unapply))
"The Date Fields with a freeform year input box" should {
"Display months using long nouns" in {
val doc = Jsoup.parse(contentAsString(dateFieldsFreeYearInline(dummyForm, "dummyField", Html("label"))))
months.zipWithIndex.foreach { case (month: String, i: Int) =>
doc.getElementById(s"dummyField.month-${i+1}").text shouldBe month
}
}
}
"The Date Fields with a limited year input box" should {
"Display months using long nouns" in {
val doc = Jsoup.parse(contentAsString(dateFieldsInline(dummyForm, "dummyField", Html("label"), 1 to 2, None)))
months.zipWithIndex.foreach { case (month: String, i: Int) =>
doc.getElementById(s"dummyField.month-${i+1}").text shouldBe month
}
}
}
}
|
benaveryee/play-ui
|
src/test/scala/uk/gov/hmrc/play/views/helpers/DateFieldsSpec.scala
|
Scala
|
apache-2.0
| 2,198 |
package org.vitrivr.adampro.data.index.structures.va.marks
import breeze.linalg.{max, min}
import org.vitrivr.adampro.data.datatypes.vector.Vector
import org.vitrivr.adampro.data.datatypes.vector.Vector._
import org.vitrivr.adampro.data.index.IndexingTaskTuple
import org.vitrivr.adampro.data.index.structures.va.VAIndex.Marks
import org.vitrivr.adampro.utils.Logging
import scala.collection.mutable.ListBuffer
/**
* adamtwo
*
* Ivan Giangreco
* September 2015
*
* equifrequent marks generator: all cells have about the same number of points; for this we use training data, build a histogram
* and try to fit approximately the same number of points in each cell; the space along each dimension is split into cells with
* equi-frequent points
*/
private[va] object EquifrequentMarksGenerator extends MarksGenerator with Serializable with Logging {
val SAMPLING_FREQUENCY = 10000 //number of buckets for the histogram (not the number of marks!)
/**
*
* @param samples training samples
* @param maxMarks maximal number of marks
* @return
*/
private[va] def getMarks(samples: Seq[IndexingTaskTuple], maxMarks: Seq[Int]): Marks = {
log.trace("get equifrequent marks for VA-File")
val sampleSize = samples.length
val min = getMin(samples.map(_.ap_indexable))
val max = getMax(samples.map(_.ap_indexable))
val dimensionality = min.length
val dimData = (0 until dimensionality).map(dim => Distribution(min(dim), max(dim), SAMPLING_FREQUENCY))
samples.foreach { sample =>
var i = 0
while (i < dimensionality) {
dimData(i).add(sample.ap_indexable(i))
i += 1
}
}
(0 until dimensionality).map({ dim =>
if (maxMarks(dim) > 2) {
val hist = dimData(dim).histogram
val marks = new Array[VectorBase](maxMarks(dim) - 1)
var k = 0
var sum = 0
for (j <- 1 until (maxMarks(dim) - 1)) {
var n = (hist.sum - sum) / (maxMarks(dim) - 1 - j)
while ((j % 2 == 1 && k < hist.length && n > 0) || (j % 2 == 0 && k < hist.length && n > hist(k))) {
sum += hist(k)
n -= hist(k)
k += 1
}
marks(j) = min(dim) + k.toFloat * (max(dim) - min(dim)) / SAMPLING_FREQUENCY.toFloat
}
marks.toSeq ++ Seq(max(dim))
} else {
Seq(min(dim), max(dim))
}
})
}
/**
*
* @param data
* @return
*/
private def getMin(data: Seq[MathVector]): MathVector = {
val dimensionality = data.head.size
val base: MathVector = Vector.conv_draw2vec(Seq.fill(dimensionality)(Vector.maxValue))
data.foldLeft(base)((baseV, newV) => min(baseV, newV))
}
/**
*
* @param data
* @return
*/
private def getMax(data: Seq[MathVector]): MathVector = {
val dimensionality = data.head.size
val base: MathVector = Vector.conv_draw2vec(Seq.fill(dimensionality)(Vector.minValue))
data.foldLeft(base)((baseV, newV) => max(baseV, newV))
}
/**
*
* @param min
* @param max
* @param sampling_frequency
*/
private case class Distribution(min: VectorBase, max: VectorBase, sampling_frequency: Int) {
val data = new ListBuffer[VectorBase]()
/**
*
* @param item
*/
def add(item: VectorBase): Unit = data += item
/**
*
* @return
*/
def histogram: IndexedSeq[Int] = {
val counts = data
.map(x => {
var j = (((x - min) / (max - min)) * sampling_frequency).floor.toInt
if (j < 0) {
j = 0
}
if (j >= sampling_frequency) {
j = sampling_frequency - 1
}
j
})
.groupBy(x => x).map { case (key, value) => (key, value.size) }
(0 until sampling_frequency).map(counts.getOrElse(_, 0))
}
}
}
|
dbisUnibas/ADAMpro
|
src/main/scala/org/vitrivr/adampro/data/index/structures/va/marks/EquifrequentMarksGenerator.scala
|
Scala
|
mit
| 3,862 |
/*
* Copyright 2015 Heiko Seeberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.heikoseeberger.akkahttpjackson
import akka.actor.ActorSystem
import akka.http.scaladsl.marshalling.Marshal
import akka.http.scaladsl.model.ContentTypes.`application/json`
import akka.http.scaladsl.model._
import akka.http.scaladsl.unmarshalling.Unmarshaller.UnsupportedContentTypeException
import akka.http.scaladsl.unmarshalling.{ Unmarshal, Unmarshaller }
import akka.stream.ActorMaterializer
import org.scalatest.{ AsyncWordSpec, BeforeAndAfterAll, Matchers }
import scala.collection.immutable.Seq
import scala.concurrent.Await
import scala.concurrent.duration.DurationInt
object JacksonSupportSpec {
final case class Foo(bar: String) {
require(bar == "bar", "bar must be 'bar'!")
}
}
class JacksonSupportSpec extends AsyncWordSpec with Matchers with BeforeAndAfterAll {
import JacksonSupport._
import JacksonSupportSpec._
private implicit val system = ActorSystem()
private implicit val mat = ActorMaterializer()
"JacksonSupport" should {
import system.dispatcher
"should enable marshalling and unmarshalling of case classes" in {
val foo = Foo("bar")
Marshal(foo)
.to[RequestEntity]
.flatMap(Unmarshal(_).to[Foo])
.map(_ shouldBe foo)
}
"provide proper error messages for requirement errors" in {
val entity =
HttpEntity(MediaTypes.`application/json`, """{ "bar": "baz" }""")
Unmarshal(entity)
.to[Foo]
.failed
.map(
_.getMessage should include("requirement failed: bar must be 'bar'!")
)
}
"fail with NoContentException when unmarshalling empty entities" in {
val entity = HttpEntity.empty(`application/json`)
Unmarshal(entity)
.to[Foo]
.failed
.map(_ shouldBe Unmarshaller.NoContentException)
}
"fail with UnsupportedContentTypeException when Content-Type is not `application/json`" in {
val entity = HttpEntity("""{ "bar": "bar" }""")
Unmarshal(entity)
.to[Foo]
.failed
.map(_ shouldBe UnsupportedContentTypeException(`application/json`))
}
"allow unmarshalling with passed in Content-Types" in {
val foo = Foo("bar")
val `application/json-home` =
MediaType.applicationWithFixedCharset("json-home", HttpCharsets.`UTF-8`, "json-home")
object CustomJacksonSupport extends JacksonSupport {
override def unmarshallerContentTypes: Seq[ContentTypeRange] =
Seq(`application/json`, `application/json-home`)
}
import CustomJacksonSupport._
val entity =
HttpEntity(`application/json-home`, """{ "bar": "bar" }""")
Unmarshal(entity)
.to[Foo]
.map(_ shouldBe foo)
}
}
override protected def afterAll() = {
Await.ready(system.terminate(), 42.seconds)
super.afterAll()
}
}
|
el-dom/akka-http-json
|
akka-http-jackson/src/test/scala/de/heikoseeberger/akkahttpjackson/JacksonSupportSpec.scala
|
Scala
|
apache-2.0
| 3,434 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.io._
import java.nio.ByteBuffer
import java.util.UUID
import java.util.concurrent.Semaphore
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import org.mockito.ArgumentMatchers.{any, eq => meq}
import org.mockito.Mockito.{mock, times, verify, when}
import org.mockito.invocation.InvocationOnMock
import org.scalatest.PrivateMethodTester
import org.apache.spark.{SparkFunSuite, TaskContext}
import org.apache.spark.network._
import org.apache.spark.network.buffer.{FileSegmentManagedBuffer, ManagedBuffer}
import org.apache.spark.network.shuffle.{BlockFetchingListener, DownloadFileManager}
import org.apache.spark.network.util.LimitedInputStream
import org.apache.spark.shuffle.FetchFailedException
import org.apache.spark.util.Utils
class ShuffleBlockFetcherIteratorSuite extends SparkFunSuite with PrivateMethodTester {
private def doReturn(value: Any) = org.mockito.Mockito.doReturn(value, Seq.empty: _*)
// Some of the tests are quite tricky because we are testing the cleanup behavior
// in the presence of faults.
/** Creates a mock [[BlockTransferService]] that returns data from the given map. */
private def createMockTransfer(data: Map[BlockId, ManagedBuffer]): BlockTransferService = {
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any())).thenAnswer(
(invocation: InvocationOnMock) => {
val blocks = invocation.getArguments()(3).asInstanceOf[Array[String]]
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
for (blockId <- blocks) {
if (data.contains(BlockId(blockId))) {
listener.onBlockFetchSuccess(blockId, data(BlockId(blockId)))
} else {
listener.onBlockFetchFailure(blockId, new BlockNotFoundException(blockId))
}
}
})
transfer
}
// Create a mock managed buffer for testing
def createMockManagedBuffer(size: Int = 1): ManagedBuffer = {
val mockManagedBuffer = mock(classOf[ManagedBuffer])
val in = mock(classOf[InputStream])
when(in.read(any())).thenReturn(1)
when(in.read(any(), any(), any())).thenReturn(1)
when(mockManagedBuffer.createInputStream()).thenReturn(in)
when(mockManagedBuffer.size()).thenReturn(size)
mockManagedBuffer
}
test("successful 3 local reads + 2 remote reads") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure blockManager.getBlockData would return the blocks
val localBlocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer())
localBlocks.foreach { case (blockId, buf) =>
doReturn(buf).when(blockManager).getBlockData(meq(blockId))
}
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val remoteBlocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 3, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 4, 0) -> createMockManagedBuffer())
val transfer = createMockTransfer(remoteBlocks)
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(localBmId, localBlocks.keys.map(blockId => (blockId, 1L, 0)).toSeq),
(remoteBmId, remoteBlocks.keys.map(blockId => (blockId, 1L, 1)).toSeq)
).toIterator
val taskContext = TaskContext.empty()
val metrics = taskContext.taskMetrics.createTempShuffleReadMetrics()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => in,
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
metrics,
false)
// 3 local blocks fetched in initialization
verify(blockManager, times(3)).getBlockData(any())
for (i <- 0 until 5) {
assert(iterator.hasNext, s"iterator should have 5 elements but actually has $i elements")
val (blockId, inputStream) = iterator.next()
// Make sure we release buffers when a wrapped input stream is closed.
val mockBuf = localBlocks.getOrElse(blockId, remoteBlocks(blockId))
// Note: ShuffleBlockFetcherIterator wraps input streams in a BufferReleasingInputStream
val wrappedInputStream = inputStream.asInstanceOf[BufferReleasingInputStream]
verify(mockBuf, times(0)).release()
val delegateAccess = PrivateMethod[InputStream](Symbol("delegate"))
verify(wrappedInputStream.invokePrivate(delegateAccess()), times(0)).close()
wrappedInputStream.close()
verify(mockBuf, times(1)).release()
verify(wrappedInputStream.invokePrivate(delegateAccess()), times(1)).close()
wrappedInputStream.close() // close should be idempotent
verify(mockBuf, times(1)).release()
verify(wrappedInputStream.invokePrivate(delegateAccess()), times(1)).close()
}
// 3 local blocks, and 2 remote blocks
// (but from the same block manager so one call to fetchBlocks)
verify(blockManager, times(3)).getBlockData(any())
verify(transfer, times(1)).fetchBlocks(any(), any(), any(), any(), any(), any())
}
test("fetch continuous blocks in batch successful 3 local reads + 2 remote reads") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure blockManager.getBlockData would return the merged block
val localBlocks = Seq[BlockId](
ShuffleBlockId(0, 0, 0),
ShuffleBlockId(0, 0, 1),
ShuffleBlockId(0, 0, 2))
val mergedLocalBlocks = Map[BlockId, ManagedBuffer](
ShuffleBlockBatchId(0, 0, 0, 3) -> createMockManagedBuffer())
mergedLocalBlocks.foreach { case (blockId, buf) =>
doReturn(buf).when(blockManager).getBlockData(meq(blockId))
}
// Make sure remote blocks would return the merged block
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val remoteBlocks = Seq[BlockId](
ShuffleBlockId(0, 3, 0),
ShuffleBlockId(0, 3, 1))
val mergedRemoteBlocks = Map[BlockId, ManagedBuffer](
ShuffleBlockBatchId(0, 3, 0, 2) -> createMockManagedBuffer())
val transfer = createMockTransfer(mergedRemoteBlocks)
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(localBmId, localBlocks.map(blockId => (blockId, 1L, 0))),
(remoteBmId, remoteBlocks.map(blockId => (blockId, 1L, 1)))
).toIterator
val taskContext = TaskContext.empty()
val metrics = taskContext.taskMetrics.createTempShuffleReadMetrics()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => in,
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
metrics,
true)
// 3 local blocks batch fetched in initialization
verify(blockManager, times(1)).getBlockData(any())
for (i <- 0 until 2) {
assert(iterator.hasNext, s"iterator should have 2 elements but actually has $i elements")
val (blockId, inputStream) = iterator.next()
// Make sure we release buffers when a wrapped input stream is closed.
val mockBuf = mergedLocalBlocks.getOrElse(blockId, mergedRemoteBlocks(blockId))
// Note: ShuffleBlockFetcherIterator wraps input streams in a BufferReleasingInputStream
val wrappedInputStream = inputStream.asInstanceOf[BufferReleasingInputStream]
verify(mockBuf, times(0)).release()
val delegateAccess = PrivateMethod[InputStream]('delegate)
verify(wrappedInputStream.invokePrivate(delegateAccess()), times(0)).close()
wrappedInputStream.close()
verify(mockBuf, times(1)).release()
verify(wrappedInputStream.invokePrivate(delegateAccess()), times(1)).close()
wrappedInputStream.close() // close should be idempotent
verify(mockBuf, times(1)).release()
verify(wrappedInputStream.invokePrivate(delegateAccess()), times(1)).close()
}
// 2 remote blocks batch fetched
// (but from the same block manager so one call to fetchBlocks)
verify(blockManager, times(1)).getBlockData(any())
verify(transfer, times(1)).fetchBlocks(any(), any(), any(), any(), any(), any())
}
test("release current unexhausted buffer in case the task completes early") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer())
// Semaphore to coordinate event sequence in two different threads.
val sem = new Semaphore(0)
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first two blocks, and wait till task completion before returning the 3rd one
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, blocks(ShuffleBlockId(0, 0, 0)))
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 1, 0).toString, blocks(ShuffleBlockId(0, 1, 0)))
sem.acquire()
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 2, 0).toString, blocks(ShuffleBlockId(0, 2, 0)))
}
})
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1L, 0)).toSeq)).toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => in,
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics(),
false)
verify(blocks(ShuffleBlockId(0, 0, 0)), times(0)).release()
iterator.next()._2.close() // close() first block's input stream
verify(blocks(ShuffleBlockId(0, 0, 0)), times(1)).release()
// Get the 2nd block but do not exhaust the iterator
val subIter = iterator.next()._2
// Complete the task; then the 2nd block buffer should be exhausted
verify(blocks(ShuffleBlockId(0, 1, 0)), times(0)).release()
taskContext.markTaskCompleted(None)
verify(blocks(ShuffleBlockId(0, 1, 0)), times(1)).release()
// The 3rd block should not be retained because the iterator is already in zombie state
sem.release()
verify(blocks(ShuffleBlockId(0, 2, 0)), times(0)).retain()
verify(blocks(ShuffleBlockId(0, 2, 0)), times(0)).release()
}
test("fail all blocks if any of the remote request fails") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer()
)
// Semaphore to coordinate event sequence in two different threads.
val sem = new Semaphore(0)
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first block, and then fail.
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, blocks(ShuffleBlockId(0, 0, 0)))
listener.onBlockFetchFailure(
ShuffleBlockId(0, 1, 0).toString, new BlockNotFoundException("blah"))
listener.onBlockFetchFailure(
ShuffleBlockId(0, 2, 0).toString, new BlockNotFoundException("blah"))
sem.release()
}
})
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1L, 0)).toSeq))
.toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => in,
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics(),
false)
// Continue only after the mock calls onBlockFetchFailure
sem.acquire()
// The first block should be returned without an exception, and the last two should throw
// FetchFailedExceptions (due to failure)
iterator.next()
intercept[FetchFailedException] { iterator.next() }
intercept[FetchFailedException] { iterator.next() }
}
private def mockCorruptBuffer(size: Long = 1L, corruptAt: Int = 0): ManagedBuffer = {
val corruptStream = new CorruptStream(corruptAt)
val corruptBuffer = mock(classOf[ManagedBuffer])
when(corruptBuffer.size()).thenReturn(size)
when(corruptBuffer.createInputStream()).thenReturn(corruptStream)
corruptBuffer
}
private class CorruptStream(corruptAt: Long = 0L) extends InputStream {
var pos = 0
var closed = false
override def read(): Int = {
if (pos >= corruptAt) {
throw new IOException("corrupt")
} else {
pos += 1
pos
}
}
override def read(dest: Array[Byte], off: Int, len: Int): Int = {
super.read(dest, off, len)
}
override def close(): Unit = { closed = true }
}
test("retry corrupt blocks") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer()
)
// Semaphore to coordinate event sequence in two different threads.
val sem = new Semaphore(0)
val corruptLocalBuffer = new FileSegmentManagedBuffer(null, new File("a"), 0, 100)
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first block, and then fail.
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, blocks(ShuffleBlockId(0, 0, 0)))
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 1, 0).toString, mockCorruptBuffer())
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 2, 0).toString, corruptLocalBuffer)
sem.release()
}
})
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1L, 0)).toSeq)).toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => new LimitedInputStream(in, 100),
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
true,
taskContext.taskMetrics.createTempShuffleReadMetrics(),
false)
// Continue only after the mock calls onBlockFetchFailure
sem.acquire()
// The first block should be returned without an exception
val (id1, _) = iterator.next()
assert(id1 === ShuffleBlockId(0, 0, 0))
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first block, and then fail.
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 1, 0).toString, mockCorruptBuffer())
sem.release()
}
})
// The next block is corrupt local block (the second one is corrupt and retried)
intercept[FetchFailedException] { iterator.next() }
sem.acquire()
intercept[FetchFailedException] { iterator.next() }
}
test("big blocks are also checked for corruption") {
val streamLength = 10000L
val blockManager = mock(classOf[BlockManager])
val localBlockManagerId = BlockManagerId("local-client", "local-client", 1)
doReturn(localBlockManagerId).when(blockManager).blockManagerId
// This stream will throw IOException when the first byte is read
val corruptBuffer1 = mockCorruptBuffer(streamLength, 0)
val blockManagerId1 = BlockManagerId("remote-client-1", "remote-client-1", 1)
val shuffleBlockId1 = ShuffleBlockId(0, 1, 0)
val blockLengths1 = Seq[Tuple3[BlockId, Long, Int]](
(shuffleBlockId1, corruptBuffer1.size(), 1)
)
val streamNotCorruptTill = 8 * 1024
// This stream will throw exception after streamNotCorruptTill bytes are read
val corruptBuffer2 = mockCorruptBuffer(streamLength, streamNotCorruptTill)
val blockManagerId2 = BlockManagerId("remote-client-2", "remote-client-2", 2)
val shuffleBlockId2 = ShuffleBlockId(0, 2, 0)
val blockLengths2 = Seq[Tuple3[BlockId, Long, Int]](
(shuffleBlockId2, corruptBuffer2.size(), 2)
)
val transfer = createMockTransfer(
Map(shuffleBlockId1 -> corruptBuffer1, shuffleBlockId2 -> corruptBuffer2))
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(blockManagerId1, blockLengths1),
(blockManagerId2, blockLengths2)
).toIterator
val taskContext = TaskContext.empty()
val maxBytesInFlight = 3 * 1024
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => new LimitedInputStream(in, streamLength),
maxBytesInFlight,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
true,
taskContext.taskMetrics.createTempShuffleReadMetrics(),
false)
// We'll get back the block which has corruption after maxBytesInFlight/3 because the other
// block will detect corruption on first fetch, and then get added to the queue again for
// a retry
val (id, st) = iterator.next()
assert(id === shuffleBlockId2)
// The other block will throw a FetchFailedException
intercept[FetchFailedException] {
iterator.next()
}
// Following will succeed as it reads part of the stream which is not corrupt. This will read
// maxBytesInFlight/3 bytes from the portion copied into memory, and remaining from the
// underlying stream
new DataInputStream(st).readFully(
new Array[Byte](streamNotCorruptTill), 0, streamNotCorruptTill)
// Following will fail as it reads the remaining part of the stream which is corrupt
intercept[FetchFailedException] { st.read() }
// Buffers are mocked and they return the original input corrupt streams
assert(corruptBuffer1.createInputStream().asInstanceOf[CorruptStream].closed)
assert(corruptBuffer2.createInputStream().asInstanceOf[CorruptStream].closed)
}
test("ensure big blocks available as a concatenated stream can be read") {
val tmpDir = Utils.createTempDir()
val tmpFile = new File(tmpDir, "someFile.txt")
val os = new FileOutputStream(tmpFile)
val buf = ByteBuffer.allocate(10000)
for (i <- 1 to 2500) {
buf.putInt(i)
}
os.write(buf.array())
os.close()
val managedBuffer = new FileSegmentManagedBuffer(null, tmpFile, 0, 10000)
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
doReturn(managedBuffer).when(blockManager).getBlockData(ShuffleBlockId(0, 0, 0))
val localBlockLengths = Seq[Tuple3[BlockId, Long, Int]](
(ShuffleBlockId(0, 0, 0), 10000, 0)
)
val transfer = createMockTransfer(Map(ShuffleBlockId(0, 0, 0) -> managedBuffer))
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(localBmId, localBlockLengths)
).toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => new LimitedInputStream(in, 10000),
2048,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
true,
taskContext.taskMetrics.createTempShuffleReadMetrics(),
false)
val (id, st) = iterator.next()
// Check that the test setup is correct -- make sure we have a concatenated stream.
assert (st.asInstanceOf[BufferReleasingInputStream].delegate.isInstanceOf[SequenceInputStream])
val dst = new DataInputStream(st)
for (i <- 1 to 2500) {
assert(i === dst.readInt())
}
assert(dst.read() === -1)
dst.close()
}
test("retry corrupt blocks (disabled)") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 2, 0) -> createMockManagedBuffer()
)
// Semaphore to coordinate event sequence in two different threads.
val sem = new Semaphore(0)
val transfer = mock(classOf[BlockTransferService])
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
Future {
// Return the first block, and then fail.
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, blocks(ShuffleBlockId(0, 0, 0)))
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 1, 0).toString, mockCorruptBuffer())
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 2, 0).toString, mockCorruptBuffer())
sem.release()
}
})
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1L, 0)).toSeq))
.toIterator
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => new LimitedInputStream(in, 100),
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics(),
false)
// Continue only after the mock calls onBlockFetchFailure
sem.acquire()
// The first block should be returned without an exception
val (id1, _) = iterator.next()
assert(id1 === ShuffleBlockId(0, 0, 0))
val (id2, _) = iterator.next()
assert(id2 === ShuffleBlockId(0, 1, 0))
val (id3, _) = iterator.next()
assert(id3 === ShuffleBlockId(0, 2, 0))
}
test("Blocks should be shuffled to disk when size of the request is above the" +
" threshold(maxReqSizeShuffleToMem).") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
val diskBlockManager = mock(classOf[DiskBlockManager])
val tmpDir = Utils.createTempDir()
doReturn{
val blockId = TempLocalBlockId(UUID.randomUUID())
(blockId, new File(tmpDir, blockId.name))
}.when(diskBlockManager).createTempLocalBlock()
doReturn(diskBlockManager).when(blockManager).diskBlockManager
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val remoteBlocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer())
val transfer = mock(classOf[BlockTransferService])
var tempFileManager: DownloadFileManager = null
when(transfer.fetchBlocks(any(), any(), any(), any(), any(), any()))
.thenAnswer((invocation: InvocationOnMock) => {
val listener = invocation.getArguments()(4).asInstanceOf[BlockFetchingListener]
tempFileManager = invocation.getArguments()(5).asInstanceOf[DownloadFileManager]
Future {
listener.onBlockFetchSuccess(
ShuffleBlockId(0, 0, 0).toString, remoteBlocks(ShuffleBlockId(0, 0, 0)))
}
})
def fetchShuffleBlock(
blocksByAddress: Iterator[(BlockManagerId, Seq[(BlockId, Long, Int)])]): Unit = {
// Set `maxBytesInFlight` and `maxReqsInFlight` to `Int.MaxValue`, so that during the
// construction of `ShuffleBlockFetcherIterator`, all requests to fetch remote shuffle blocks
// are issued. The `maxReqSizeShuffleToMem` is hard-coded as 200 here.
val taskContext = TaskContext.empty()
new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress,
(_, in) => in,
maxBytesInFlight = Int.MaxValue,
maxReqsInFlight = Int.MaxValue,
maxBlocksInFlightPerAddress = Int.MaxValue,
maxReqSizeShuffleToMem = 200,
detectCorrupt = true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics(),
false)
}
val blocksByAddress1 = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(remoteBmId, remoteBlocks.keys.map(blockId => (blockId, 100L, 0)).toSeq)).toIterator
fetchShuffleBlock(blocksByAddress1)
// `maxReqSizeShuffleToMem` is 200, which is greater than the block size 100, so don't fetch
// shuffle block to disk.
assert(tempFileManager == null)
val blocksByAddress2 = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(remoteBmId, remoteBlocks.keys.map(blockId => (blockId, 300L, 0)).toSeq)).toIterator
fetchShuffleBlock(blocksByAddress2)
// `maxReqSizeShuffleToMem` is 200, which is smaller than the block size 300, so fetch
// shuffle block to disk.
assert(tempFileManager != null)
}
test("fail zero-size blocks") {
val blockManager = mock(classOf[BlockManager])
val localBmId = BlockManagerId("test-client", "test-client", 1)
doReturn(localBmId).when(blockManager).blockManagerId
// Make sure remote blocks would return
val remoteBmId = BlockManagerId("test-client-1", "test-client-1", 2)
val blocks = Map[BlockId, ManagedBuffer](
ShuffleBlockId(0, 0, 0) -> createMockManagedBuffer(),
ShuffleBlockId(0, 1, 0) -> createMockManagedBuffer()
)
val transfer = createMockTransfer(blocks.mapValues(_ => createMockManagedBuffer(0)))
val blocksByAddress = Seq[(BlockManagerId, Seq[(BlockId, Long, Int)])](
(remoteBmId, blocks.keys.map(blockId => (blockId, 1L, 0)).toSeq))
val taskContext = TaskContext.empty()
val iterator = new ShuffleBlockFetcherIterator(
taskContext,
transfer,
blockManager,
blocksByAddress.toIterator,
(_, in) => in,
48 * 1024 * 1024,
Int.MaxValue,
Int.MaxValue,
Int.MaxValue,
true,
false,
taskContext.taskMetrics.createTempShuffleReadMetrics(),
false)
// All blocks fetched return zero length and should trigger a receive-side error:
val e = intercept[FetchFailedException] { iterator.next() }
assert(e.getMessage.contains("Received a zero-size buffer"))
}
}
|
caneGuy/spark
|
core/src/test/scala/org/apache/spark/storage/ShuffleBlockFetcherIteratorSuite.scala
|
Scala
|
apache-2.0
| 29,769 |
package com.sksamuel.elastic4s.searches.queries
import com.sksamuel.elastic4s.searches.QueryDefinition
import org.elasticsearch.common.geo.{GeoDistance, GeoPoint}
import org.elasticsearch.index.query.{GeoDistanceRangeQueryBuilder, GeoValidationMethod, QueryBuilders}
case class GeoDistanceRangeQueryDefinition(field: String,
geopoint: GeoPoint,
geoDistance: Option[GeoDistance] = None,
queryName: Option[String] = None,
from: Option[Any] = None,
to: Option[Any] = None,
ignoreUnmapped: Option[Boolean] = None,
validationMethod: Option[GeoValidationMethod] = None,
boost: Option[Float] = None,
includeLower: Option[Boolean] = None,
includeUpper: Option[Boolean] = None) extends QueryDefinition {
def builder: GeoDistanceRangeQueryBuilder = {
val builder = QueryBuilders.geoDistanceRangeQuery(field, geopoint)
geoDistance.foreach(builder.geoDistance)
includeLower.foreach(builder.includeLower)
includeUpper.foreach(builder.includeUpper)
from.foreach {
case number: Number => builder.from(number)
case str: String => builder.from(str)
}
to.foreach {
case number: Number => builder.to(number)
case str: String => builder.to(str)
}
boost.foreach(builder.boost)
queryName.foreach(builder.queryName)
validationMethod.foreach(builder.setValidationMethod)
ignoreUnmapped.foreach(builder.ignoreUnmapped)
queryName.foreach(builder.to)
builder
}
def geoDistance(geoDistance: GeoDistance): GeoDistanceRangeQueryDefinition = copy(geoDistance = Some(geoDistance))
def from(from: Number): GeoDistanceRangeQueryDefinition = copy(from = Some(from))
def from(from: String): GeoDistanceRangeQueryDefinition = copy(from = Some(from))
def to(from: Number): GeoDistanceRangeQueryDefinition = copy(to = Some(to))
def to(from: String): GeoDistanceRangeQueryDefinition = copy(to = Some(to))
def includeLower(includeLower: Boolean): GeoDistanceRangeQueryDefinition = copy(includeLower = Some(includeLower))
def includeUpper(includeUpper: Boolean): GeoDistanceRangeQueryDefinition = copy(includeUpper = Some(includeUpper))
def ignoreUnmapped(ignoreUnmapped: Boolean): GeoDistanceRangeQueryDefinition =
copy(ignoreUnmapped = Option(ignoreUnmapped))
def validationMethod(validationMethod: GeoValidationMethod): GeoDistanceRangeQueryDefinition =
copy(validationMethod = Option(validationMethod))
def boost(boost: Float): GeoDistanceRangeQueryDefinition = copy(boost = Option(boost))
def queryName(queryName: String): GeoDistanceRangeQueryDefinition = copy(queryName = Some(queryName))
}
|
ulric260/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/queries/GeoDistanceRangeQueryDefinition.scala
|
Scala
|
apache-2.0
| 3,010 |
/*
* Copyright 2020 ACINQ SAS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.acinq.eclair.wire
import kamon.Kamon
object Monitoring {
object Metrics {
val DecodeDuration = Kamon.timer("scodec.decode.time")
val EncodeDuration = Kamon.timer("scodec.encode.time")
}
object Tags {
val MessageType = "type"
}
}
|
ACINQ/eclair
|
eclair-core/src/main/scala/fr/acinq/eclair/wire/Monitoring.scala
|
Scala
|
apache-2.0
| 859 |
/**
* Copyright (C) 2009-2015 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.routing
import scala.annotation.tailrec
import scala.collection.immutable
import scala.concurrent.forkjoin.ThreadLocalRandom
import com.typesafe.config.Config
import akka.actor.ActorCell
import akka.actor.ActorRefWithCell
import akka.actor.SupervisorStrategy
import akka.dispatch.Dispatchers
import akka.japi.Util.immutableSeq
import akka.actor.ActorSystem
import akka.actor.Props
object SmallestMailboxRoutingLogic {
def apply(): SmallestMailboxRoutingLogic = new SmallestMailboxRoutingLogic
}
/**
* Tries to send to the non-suspended routee with fewest messages in mailbox.
* The selection is done in this order:
* <ul>
* <li>pick any idle routee (not processing message) with empty mailbox</li>
* <li>pick any routee with empty mailbox</li>
* <li>pick routee with fewest pending messages in mailbox</li>
* <li>pick any remote routee, remote actors are consider lowest priority,
* since their mailbox size is unknown</li>
* </ul>
*/
@SerialVersionUID(1L)
class SmallestMailboxRoutingLogic extends RoutingLogic {
override def select(message: Any, routees: immutable.IndexedSeq[Routee]): Routee =
if (routees.isEmpty) NoRoutee
else selectNext(routees)
// Worst-case a 2-pass inspection with mailbox size checking done on second pass, and only until no one empty is found.
// Lowest score wins, score 0 is autowin
// If no actor with score 0 is found, it will return that, or if it is terminated, a random of the entire set.
// Why? Well, in case we had 0 viable actors and all we got was the default, which is the DeadLetters, anything else is better.
// Order of interest, in ascending priority:
// 1. The NoRoutee
// 2. A Suspended ActorRef
// 3. An ActorRef with unknown mailbox size but with one message being processed
// 4. An ActorRef with unknown mailbox size that isn't processing anything
// 5. An ActorRef with a known mailbox size
// 6. An ActorRef without any messages
@tailrec private def selectNext(targets: immutable.IndexedSeq[Routee],
proposedTarget: Routee = NoRoutee,
currentScore: Long = Long.MaxValue,
at: Int = 0,
deep: Boolean = false): Routee = {
if (targets.isEmpty)
NoRoutee
else if (at >= targets.size) {
if (deep) {
if (isTerminated(proposedTarget)) targets(ThreadLocalRandom.current.nextInt(targets.size)) else proposedTarget
} else selectNext(targets, proposedTarget, currentScore, 0, deep = true)
} else {
val target = targets(at)
val newScore: Long =
if (isSuspended(target)) Long.MaxValue - 1 else { //Just about better than the DeadLetters
(if (isProcessingMessage(target)) 1l else 0l) +
(if (!hasMessages(target)) 0l else { //Race between hasMessages and numberOfMessages here, unfortunate the numberOfMessages returns 0 if unknown
val noOfMsgs: Long = if (deep) numberOfMessages(target) else 0
if (noOfMsgs > 0) noOfMsgs else Long.MaxValue - 3 //Just better than a suspended actorref
})
}
if (newScore == 0) target
else if (newScore < 0 || newScore >= currentScore) selectNext(targets, proposedTarget, currentScore, at + 1, deep)
else selectNext(targets, target, newScore, at + 1, deep)
}
}
protected def isTerminated(a: Routee): Boolean = a match {
case ActorRefRoutee(ref) ⇒ ref.isTerminated
case _ ⇒ false
}
/**
* Returns true if the actor is currently processing a message.
* It will always return false for remote actors.
* Method is exposed to subclasses to be able to implement custom
* routers based on mailbox and actor internal state.
*/
protected def isProcessingMessage(a: Routee): Boolean = a match {
case ActorRefRoutee(x: ActorRefWithCell) ⇒
x.underlying match {
case cell: ActorCell ⇒ cell.mailbox.isScheduled && cell.currentMessage != null
case _ ⇒ false
}
case _ ⇒ false
}
/**
* Returns true if the actor currently has any pending messages
* in the mailbox, i.e. the mailbox is not empty.
* It will always return false for remote actors.
* Method is exposed to subclasses to be able to implement custom
* routers based on mailbox and actor internal state.
*/
protected def hasMessages(a: Routee): Boolean = a match {
case ActorRefRoutee(x: ActorRefWithCell) ⇒ x.underlying.hasMessages
case _ ⇒ false
}
/**
* Returns true if the actor is currently suspended.
* It will always return false for remote actors.
* Method is exposed to subclasses to be able to implement custom
* routers based on mailbox and actor internal state.
*/
protected def isSuspended(a: Routee): Boolean = a match {
case ActorRefRoutee(x: ActorRefWithCell) ⇒
x.underlying match {
case cell: ActorCell ⇒ cell.mailbox.isSuspended
case _ ⇒ true
}
case _ ⇒ false
}
/**
* Returns the number of pending messages in the mailbox of the actor.
* It will always return 0 for remote actors.
* Method is exposed to subclasses to be able to implement custom
* routers based on mailbox and actor internal state.
*/
protected def numberOfMessages(a: Routee): Int = a match {
case ActorRefRoutee(x: ActorRefWithCell) ⇒ x.underlying.numberOfMessages
case _ ⇒ 0
}
}
/**
* A router pool that tries to send to the non-suspended routee with fewest messages in mailbox.
* The selection is done in this order:
* <ul>
* <li>pick any idle routee (not processing message) with empty mailbox</li>
* <li>pick any routee with empty mailbox</li>
* <li>pick routee with fewest pending messages in mailbox</li>
* <li>pick any remote routee, remote actors are consider lowest priority,
* since their mailbox size is unknown</li>
* </ul>
*
* The configuration parameter trumps the constructor arguments. This means that
* if you provide `nrOfInstances` during instantiation they will be ignored if
* the router is defined in the configuration file for the actor being used.
*
* <h1>Supervision Setup</h1>
*
* Any routees that are created by a router will be created as the router's children.
* The router is therefore also the children's supervisor.
*
* The supervision strategy of the router actor can be configured with
* [[#withSupervisorStrategy]]. If no strategy is provided, routers default to
* a strategy of “always escalate”. This means that errors are passed up to the
* router's supervisor for handling.
*
* The router's supervisor will treat the error as an error with the router itself.
* Therefore a directive to stop or restart will cause the router itself to stop or
* restart. The router, in turn, will cause its children to stop and restart.
*
* @param nrOfInstances initial number of routees in the pool
*
* @param resizer optional resizer that dynamically adjust the pool size
*
* @param supervisorStrategy strategy for supervising the routees, see 'Supervision Setup'
*
* @param routerDispatcher dispatcher to use for the router head actor, which handles
* supervision, death watch and router management messages
*/
@SerialVersionUID(1L)
final case class SmallestMailboxPool(
override val nrOfInstances: Int, override val resizer: Option[Resizer] = None,
override val supervisorStrategy: SupervisorStrategy = Pool.defaultSupervisorStrategy,
override val routerDispatcher: String = Dispatchers.DefaultDispatcherId,
override val usePoolDispatcher: Boolean = false)
extends Pool with PoolOverrideUnsetConfig[SmallestMailboxPool] {
def this(config: Config) =
this(
nrOfInstances = config.getInt("nr-of-instances"),
resizer = DefaultResizer.fromConfig(config),
usePoolDispatcher = config.hasPath("pool-dispatcher"))
/**
* Java API
* @param nr initial number of routees in the pool
*/
def this(nr: Int) = this(nrOfInstances = nr)
override def createRouter(system: ActorSystem): Router = new Router(SmallestMailboxRoutingLogic())
override def nrOfInstances(sys: ActorSystem) = this.nrOfInstances
/**
* Setting the supervisor strategy to be used for the “head” Router actor.
*/
def withSupervisorStrategy(strategy: SupervisorStrategy): SmallestMailboxPool = copy(supervisorStrategy = strategy)
/**
* Setting the resizer to be used.
*/
def withResizer(resizer: Resizer): SmallestMailboxPool = copy(resizer = Some(resizer))
/**
* Setting the dispatcher to be used for the router head actor, which handles
* supervision, death watch and router management messages.
*/
def withDispatcher(dispatcherId: String): SmallestMailboxPool = copy(routerDispatcher = dispatcherId)
/**
* Uses the resizer and/or the supervisor strategy of the given RouterConfig
* if this RouterConfig doesn't have one, i.e. the resizer defined in code is used if
* resizer was not defined in config.
*/
override def withFallback(other: RouterConfig): RouterConfig = this.overrideUnsetConfig(other)
}
|
jmnarloch/akka.js
|
akka-js-actor/jvm/src/main/scala/akka/routing/SmallestMailbox.scala
|
Scala
|
bsd-3-clause
| 9,302 |
package parsers
import org.specs2.mutable._
class Exercise5Spec extends Specification {
val ex5 = new exercises.Exercise5 {}
import ex5._
val parser = new LessParser
"Less parser" should {
"parse a selector" >> {
parser.parse(parser.selector, "div > span.foo").get must equalTo (Selector("div > span.foo"))
}
"parse a rule" >> {
parser.parse(parser.property, "color: white;").get must equalTo (Property("color: white;"))
}
"parse a block" >> {
val css =
"""|.foo {
| display: none;
|}""".stripMargin
parser.parse(parser.block, css).get must equalTo (Block(Selector(".foo "), List(Property("display: none;"))))
}
"parse nested blocks" >> {
val css =
"""|.foo {
| display: block;
| .bar {
| color: white;
| }
| font-family: serif;
|}""".stripMargin
parser.parse(parser.block, css).get must equalTo (
Block(
Selector(".foo "),
List(
Property("display: block;"),
Block(
Selector(".bar "),
List(
Property("color: white;")
)
),
Property("font-family: serif;")
)
)
)
}
}
}
|
julienrf/scala-lessons
|
highlights/parsers/code/src/test/scala/parsers/Exercise5Spec.scala
|
Scala
|
mit
| 1,318 |
package iwct.graphx.networkflow
/**
* Created by yuhc on 3/25/15.
*/
object MaxFlow {
}
|
sjtu-iiot/graphx-algorithm
|
src/main/scala/org/apache/spark/graphx/iiot/networkflow/MaxFlow.scala
|
Scala
|
gpl-2.0
| 92 |
package org.nisshiee.crowd4s
import scalaz._, Scalaz._
import org.json4s._
import org.json4s.jackson.JsonMethods._
import scala.util.control.Exception.allCatch
trait GetGroupList {
import GetGroupList._
def getDirectGroupList(username: String)(implicit conn: CrowdConnection, http: CrowdHttp) =
http.get(directPath, Map("username" -> username)) flatMap parseResponse
def getNestedGroupList(username: String)(implicit conn: CrowdConnection, http: CrowdHttp) =
http.get(nestedPath, Map("username" -> username)) flatMap parseResponse
}
object GetGroupList {
import ResponseParseHelper._
val directPath = "/rest/usermanagement/1/user/group/direct.json"
val nestedPath = "/rest/usermanagement/1/user/group/nested.json"
def parseResponse = parseBasicGetResponse(parseGroupList)
def parseGroupList(json: String): Validation[JsonParseError, Seq[String]] = {
implicit val formats = DefaultFormats
type JsonVld[A] = Validation[JsonParseError, A]
implicit val semigroup = Semigroup.firstSemigroup[JsonParseError]
allCatch opt {
parse(json) \\ "groups" match {
case JArray(l) => (l map parseGroupAst).sequence[JsonVld, String]
case _ => JsonParseError.failure
}
} getOrElse JsonParseError.failure
}
def parseGroupAst(ast: JValue): Validation[JsonParseError, String] = ast \\ "name" match {
case JString(s) => s.success
case _ => JsonParseError.failure
}
}
|
nisshiee/crowd4s
|
src/main/scala/functions/GetGroupList.scala
|
Scala
|
mit
| 1,440 |
package org.phenoscape.kb.ingest.xenbase
import scala.collection.JavaConversions._
import scala.collection.Map
import scala.collection.mutable
import scala.io.Source
import org.apache.commons.lang3.StringUtils
import org.phenoscape.kb.ingest.util.Vocab
import org.phenoscape.kb.ingest.util.Vocab._
import org.phenoscape.kb.ingest.util.OBOUtil
import org.phenoscape.kb.ingest.util.OntUtil
import org.phenoscape.scowl.Functional._
import org.phenoscape.scowl._
import org.semanticweb.owlapi.model.OWLAxiom
import org.semanticweb.owlapi.model.OWLNamedIndividual
object XenbaseExpressionToOWL {
val laevis = Individual(Vocab.XENOPUS_LAEVIS)
val tropicalis = Individual(Vocab.XENOPUS_TROPICALIS)
def convert(genepageMappingsFile: Source, laevisExpressionFile: Source, tropicalisExpressionFile: Source): Set[OWLAxiom] = {
val mappings = indexGenepageMappings(genepageMappingsFile)
convert(laevisExpressionFile, mappings, laevis) ++ convert(tropicalisExpressionFile, mappings, tropicalis)
}
def indexGenepageMappings(mappings: Source): Map[String, String] = {
val index = mutable.Map.empty[String, String]
for (mapping <- mappings.getLines) {
val items = mapping.split("\\t", -1)
val genepageID = StringUtils.stripToNull(items(0))
for {
geneIDs <- Option(StringUtils.stripToNull(items(1)))
geneID <- geneIDs.split(",")
} {
index(StringUtils.stripToNull(geneID)) = genepageID
}
}
index
}
def convert(expressionData: Source, genepageMappings: Map[String, String], species: OWLNamedIndividual): Set[OWLAxiom] = {
expressionData.getLines.flatMap(translate(_, genepageMappings, species)).toSet[OWLAxiom] +
(laevis Annotation (rdfsLabel, "Xenopus laevis")) +
(tropicalis Annotation (rdfsLabel, "Xenopus tropicalis"))
}
def translate(expressionLine: String, genepageMappings: Map[String, String], species: OWLNamedIndividual): Set[OWLAxiom] = {
val items = expressionLine.split("\\t")
if (StringUtils.stripToEmpty(items(3)) == "unspecified") {
Set.empty
} else {
val axioms = mutable.Set.empty[OWLAxiom]
val expression = OntUtil.nextIndividual()
axioms.add(Declaration(expression))
axioms.add(expression Type GeneExpression)
val structureItems = items(3).split(",", -1)
for (structureItem <- structureItems) {
val structureID = StringUtils.stripToNull(structureItem.trim().split(" ")(0))
val structureType = Class(OBOUtil.iriForTermID(structureID))
val structure = OntUtil.nextIndividual()
axioms.add(Declaration(structure))
axioms.add(structure Type structureType)
axioms.add(expression Fact (occurs_in, structure))
}
val evidenceText = StringUtils.stripToEmpty(items(7))
if (evidenceText.contains("XB-IMG")) {
val image = Individual(OBOUtil.xenbaseImageIRI(evidenceText))
axioms.add(expression Fact (dcSource, image))
}
val genepageID = genepageMappings(StringUtils.stripToNull(items(0)))
val geneIRI = XenbaseGenesToOWL.getGeneIRI(genepageID)
val gene = Individual(geneIRI)
axioms.add(Declaration(gene))
axioms.add(expression Fact (associated_with_gene, gene))
axioms.add(expression Fact (associated_with_taxon, species))
axioms.toSet
}
}
}
|
calliem/phenoscape-kb-ingest
|
src/main/scala/org/phenoscape/kb/ingest/xenbase/XenbaseExpressionToOWL.scala
|
Scala
|
mit
| 3,338 |
package moe.brianhsu.easytaipei
import com.google.android.gms.maps.model.LatLng
import com.google.maps.android.clustering.ClusterItem
case class MarkerItem(title: String, lat: Double, lng: Double, snippet: Option[String] = None) extends ClusterItem {
val position = new LatLng(lat, lng)
override def getPosition = position
}
|
brianhsu/EasyTaipei
|
src/main/scala/dataset/MarkerItem.scala
|
Scala
|
gpl-2.0
| 332 |
import com.twitter.finagle.{Http, Service}
import com.twitter.finagle.http.{Request, Response}
import com.twitter.util.Await
//#app
object Proxy extends App {
val client: Service[Request, Response] =
Http.newService("www.google.com:80")
val server = Http.serve(":8080", client)
Await.ready(server)
}
//#app
|
lukiano/finagle
|
doc/src/sphinx/code/quickstart/Proxy.scala
|
Scala
|
apache-2.0
| 319 |
/**
*
* Copyright (C) 2017 University of Bamberg, Software Technologies Research Group
* <https://www.uni-bamberg.de/>, <http://www.swt-bamberg.de/>
*
* This file is part of the Data Structure Investigator (DSI) project, which received financial support by the
* German Research Foundation (DFG) under grant no. LU 1748/4-1, see
* <http://www.swt-bamberg.de/dsi/>.
*
* DSI is licensed under the GNU GENERAL PUBLIC LICENSE (Version 3), see
* the LICENSE file at the project's top-level directory for details or consult <http://www.gnu.org/licenses/>.
*
* DSI is free software: you can redistribute it and/or modify it under the
* terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or any later version.
*
* DSI is a RESEARCH PROTOTYPE and distributed WITHOUT ANY
* WARRANTY, without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* The following people contributed to the conception and realization of the present DSI distribution (in
* alphabetic order by surname):
*
* - Jan H. Boockmann
* - Gerald Lüttgen
* - Thomas Rupprecht
* - David H. White
*
*/
/**
* @author DSI
*
* DsOliOffsGroups.scala created on Jun 19, 2015
*
* Description: Store the connection configurations and the
* set of associated cells and strands
*/
package dsnaming
import boxcalculation.DsOliCell
import scala.collection.mutable.ListBuffer
import scala.collection.mutable.Set
import boxcalculation.DsOliBox
/**
* @author DSI
*
*/
class DsOliOffsGroups() {
// Key: connection configuration -> Value: set of cells and strands for this connection configuration
val conConfToCells = new ListBuffer[(DsOliConConf, Set[(DsOliCell, DsOliCell, DsOliBox, DsOliBox)])]
/**
* Add a connection configuration
*
* @param conConf the connection configuration
* @param cellSet the set of cells and strands for this connection configuration
* @return Boolean indicating if it was possible to add the connection configuration
*/
def addCC(conConf: DsOliConConf, cellSet: (DsOliCell, DsOliCell, DsOliBox, DsOliBox)): Boolean = {
val conConfEntry = conConfToCells.filter { case (cc, _) => cc == conConf }
// If connection configuration not present yet, create new else append
if (conConfEntry.size == 0) {
conConfToCells.append((conConf, Set[(DsOliCell, DsOliCell, DsOliBox, DsOliBox)](cellSet)))
true
} else {
conConfEntry.head._2.add(cellSet)
}
}
}
|
uniba-swt/DSIsrc
|
src/dsnaming/DsOliOffsGroups.scala
|
Scala
|
gpl-3.0
| 2,585 |
/*
* Copyright 2013-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.factory
import laika.io.Input
import laika.rewrite.DocumentCursor
import laika.tree.Documents.Document
import laika.tree.Elements.RewriteRule
/** Responsible for creating parser instances for a specific markup format.
* A parser is simply a function of type `Input => Document`.
*
* @author Jens Halm
*/
trait ParserFactory {
/** The file suffixes recognized by this parser.
* When transforming entire directories only files with
* names ending in one of the specified suffixes will
* be considered.
*
* It is recommended not to support `txt`
* or similarly common suffixes as this might interfere
* with other installed formats.
*/
def fileSuffixes: Set[String]
/** The parser-specific rewrite rules that need to be
* applied by default before rendering any document
* produced by this parser.
*
* Providing additional rewrite rules is only necessary
* if the parser produces tree elements that require
* rewriting and are unknown to the default rewrite rules.
*/
def rewriteRules: Seq[DocumentCursor => RewriteRule]
/** Creates a new parser instance.
* Such an instance is expected to be stateless and thread-safe,
* thus capable of repeated and parallel executions.
*/
def newParser: Input => Document
}
|
amuramatsu/Laika
|
core/src/main/scala/laika/factory/ParserFactory.scala
|
Scala
|
apache-2.0
| 1,951 |
import scala.annotation.tailrec
object P7 {
implicit def listToList[A](list: List[List[A]]) = new BetterList(list)
class BetterList[A](list: List[List[A]]) {
def flat: List[A] = if (list.isEmpty) Nil else {
@tailrec
def flatten0(l: List[List[A]], acc: List[A]): List[A] = l match {
case x :: xs => if (l.isEmpty) acc else flatten0(xs, acc ::: x)
case _ => acc
}
flatten0(list, Nil)
}
}
def main(args: Array[String]) {
val l = List(List(1), List(4,5), List(), List(9,1))
val newList: List[Int] = l.flat
println(newList)
println(newList == List(1,4,5,9,1))
}
}
|
matiwinnetou/scala99
|
src/main/scala/P7.scala
|
Scala
|
apache-2.0
| 641 |
package edu.gemini.spModel.core
import scalaz._
import Scalaz._
import org.scalacheck.Properties
import org.scalacheck.Prop.forAll
import org.scalacheck.Prop._
import org.scalacheck.Gen
import org.specs2.ScalaCheck
import org.specs2.mutable.Specification
import AlmostEqual.AlmostEqualOps
object InterpolateSpec extends Specification with ScalaCheck with Arbitraries with Helpers {
val test = ==>>(
10L -> 10.0,
20L -> 20.0,
30L -> 20.0,
40L -> 15.0,
50L -> 10.0
)
val min = test.findMin.get._1
val max = test.findMax.get._1
"iLookup" should {
"find all values" in {
test.keys.traverse(test.iLookup) must_== Some(test.values)
}
"interpolate between adjacent points" in {
val keyPairs = test.keys zip test.keys.tail
val valuePairs = test.values zip test.values.tail
val interpolated = keyPairs.traverse { case (a, b) => test.iLookup((a + b) / 2) }
val computed =
(keyPairs zip valuePairs) map { case ((k1, k2), (v1, v2)) =>
val f = ((k2 - k1) / 2.0) / (k2 - k1)
v1 + (v2 - v1) * f
}
interpolated must_== Some(computed)
}
"interpolate a value everywhere on the min/max range" in {
(min |-> max).traverse(test.iLookup).isDefined
}
"interpolate no values outside the min/max range" ! forAll { (n: Long) =>
test.iLookup(n).filter(_ => n < min || n > max).isEmpty
}
}
"Interpolate" should {
"work in either direction" ! forAll { (a: Int, b: Int) =>
val x = Interpolate[Long, Double].interpolate((10L, a.toDouble), (20L, b.toDouble), 12L)
val y = Interpolate[Long, Double].interpolate((10L, b.toDouble), (20L, a.toDouble), 18L)
x ~= y
}
"be invariant at min" ! forAll { (a: Int, b: Int) =>
val x = Interpolate[Long, Double].interpolate((10L, a.toDouble), (20L, b.toDouble), 10L)
x ~= Some(a.toDouble)
}
"be invariant at max" ! forAll { (a: Int, b: Int) =>
val x = Interpolate[Long, Double].interpolate((10L, a.toDouble), (20L, b.toDouble), 20L)
x ~= Some(b.toDouble)
}
"work off the end" ! forAll { (a: Int, b: Int) =>
val x = Interpolate[Long, Double].interpolate((10L, a.toDouble), (20L, b.toDouble), 30L)
x ~= Some(b.toDouble + (b.toDouble - a.toDouble))
}
"interoplate a constant value for constaint domain" ! forAll { (a: Int) =>
(0 to 30).map { n =>
Interpolate[Long, Double].interpolate((10L, a.toDouble), (20L, a.toDouble), n)
}.forall(_ == Some(a))
}
"yield None in the degenerate case" ! forAll { (a: Int, b: Int) =>
(a != b) ==> {
val x = Interpolate[Long, Double].interpolate((10L, a.toDouble), (10L, b.toDouble), 10L)
x == None
}
}
}
"Interpolate[Coordinates]" should {
"be consistent with Coordinates.interpolate" ! forAll { (a: Coordinates, b: Coordinates, n1: Short, n2: Short, c: Short) =>
val f = ((c.toDouble - n1.toDouble) / (n2.toDouble - n1.toDouble))
val c1 = Interpolate[Long, Coordinates].interpolate((n1.toLong, a), (n2.toLong, b), c.toLong)
val c2 = Some(f).filterNot(f => f.isInfinity || f.isNaN).map(a.interpolate(b, _))
c1 ~= c2
}
}
}
|
spakzad/ocs
|
bundle/edu.gemini.spModel.core/src/test/scala/edu/gemini/spModel/core/InterpolateSpec.scala
|
Scala
|
bsd-3-clause
| 3,235 |
/*
* This file is part of the "silex" library of helpers for Apache Spark.
*
* Copyright (c) 2015 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.c
*/
package com.redhat.et.silex.rdd.drop
import com.redhat.et.silex.testing.PerTestSparkContext
import org.scalatest._
class DropRDDFunctionsSpec extends FunSuite with PerTestSparkContext {
import com.redhat.et.silex.rdd.drop.implicits._
test("drop") {
val rdd = context.makeRDD(Array(1, 2, 3, 4, 5, 6), 2)
assert(rdd.drop(0).collect() === Array(1, 2, 3, 4, 5, 6))
assert(rdd.drop(1).collect() === Array(2, 3, 4, 5, 6))
assert(rdd.drop(2).collect() === Array(3, 4, 5, 6))
assert(rdd.drop(3).collect() === Array(4, 5, 6))
assert(rdd.drop(4).collect() === Array(5, 6))
assert(rdd.drop(5).collect() === Array(6))
assert(rdd.drop(6).collect() === Array())
assert(rdd.drop(7).collect() === Array())
}
test("dropRight") {
val rdd = context.makeRDD(Array(1, 2, 3, 4, 5, 6), 2)
assert(rdd.dropRight(0).collect() === Array(1, 2, 3, 4, 5, 6))
assert(rdd.dropRight(1).collect() === Array(1, 2, 3, 4, 5))
assert(rdd.dropRight(2).collect() === Array(1, 2, 3, 4))
assert(rdd.dropRight(3).collect() === Array(1, 2, 3))
assert(rdd.dropRight(4).collect() === Array(1, 2))
assert(rdd.dropRight(5).collect() === Array(1))
assert(rdd.dropRight(6).collect() === Array())
assert(rdd.dropRight(7).collect() === Array())
}
test("dropWhile") {
val rdd = context.makeRDD(Array(1, 2, 3, 4, 5, 6), 2)
assert(rdd.dropWhile(_ <= 0).collect() === Array(1, 2, 3, 4, 5, 6))
assert(rdd.dropWhile(_ <= 1).collect() === Array(2, 3, 4, 5, 6))
assert(rdd.dropWhile(_ <= 2).collect() === Array(3, 4, 5, 6))
assert(rdd.dropWhile(_ <= 3).collect() === Array(4, 5, 6))
assert(rdd.dropWhile(_ <= 4).collect() === Array(5, 6))
assert(rdd.dropWhile(_ <= 5).collect() === Array(6))
assert(rdd.dropWhile(_ <= 6).collect() === Array())
assert(rdd.dropWhile(_ <= 7).collect() === Array())
}
test("empty input RDD") {
val rdd = context.emptyRDD[Int]
assert(rdd.drop(0).collect() === Array())
assert(rdd.drop(1).collect() === Array())
assert(rdd.dropRight(0).collect() === Array())
assert(rdd.dropRight(1).collect() === Array())
assert(rdd.dropWhile((x:Int)=>false).collect() === Array())
assert(rdd.dropWhile((x:Int)=>true).collect() === Array())
}
test("filtered and unioned input") {
val consecutive = context.makeRDD(Array(0, 1, 2, 3, 4, 5, 6, 7, 8), 3)
val rdd0 = consecutive.filter((x:Int)=>(x % 3)==0)
val rdd1 = consecutive.filter((x:Int)=>(x % 3)==1)
val rdd2 = consecutive.filter((x:Int)=>(x % 3)==2)
// input RDD: 0, 3, 6, 1, 4, 7, 2, 5, 8
assert((rdd0 ++ rdd1 ++ rdd2).drop(6).collect() === Array(2, 5, 8))
assert((rdd0 ++ rdd1 ++ rdd2).dropRight(6).collect() === Array(0, 3, 6))
assert((rdd0 ++ rdd1 ++ rdd2).dropWhile(_ < 7).collect() === Array(7, 2, 5, 8))
}
}
|
jyt109/silex
|
src/test/scala/com/redhat/et/silex/rdd/drop/drop.scala
|
Scala
|
apache-2.0
| 3,507 |
package reactivemongo.api
import scala.util.{ Try, Failure, Success }
import scala.concurrent.{ Await, ExecutionContext }
import scala.concurrent.duration.{ Duration, FiniteDuration, SECONDS }
import com.typesafe.config.Config
import akka.actor.ActorRef
import reactivemongo.core.nodeset.Authenticate
import reactivemongo.util.LazyLogger
/**
* @param config a custom configuration (otherwise the default options are used)
* @param classLoader a classloader used to load the actor system
*
* @define parsedURIParam the URI parsed by [[reactivemongo.api.MongoConnection.parseURI]]
* @define connectionNameParam the name for the connection pool
* @define strictUriParam if true the parsed URI must be strict, without ignored/unsupported options
* @define nbChannelsParam the number of channels to open per node
* @define optionsParam the options for the new connection pool
* @define nodesParam The list of node names (e.g. ''node1.foo.com:27017''); Port is optional (27017 is used by default)
* @define authParam the list of authentication instructions
* @define seeConnectDBTutorial See [[http://reactivemongo.org/releases/0.12/documentation/tutorial/connect-database.html how to connect to the database]]
* @define uriStrictParam the strict URI, that will be parsed by [[reactivemongo.api.MongoConnection.parseURI]]
*/
@deprecated("Use `reactivemongo.api.AsyncDriver", "0.19.4")
class MongoDriver(
protected val config: Option[Config] = None,
protected val classLoader: Option[ClassLoader] = None) extends Driver {
import MongoDriver.logger
/** Keep a list of all connections so that we can terminate the actors */
private[reactivemongo] def connections: Iterable[MongoConnection] = connectionMonitors.values
private[reactivemongo] case class AddConnection(
name: String,
nodes: Seq[String],
options: MongoConnectionOptions,
mongosystem: ActorRef)
private[reactivemongo] class SupervisorActor()
extends akka.actor.Actor with Product
with Serializable with java.io.Serializable {
def receive: Receive = throw new UnsupportedOperationException()
def canEqual(that: Any): Boolean = false
def productArity: Int = 0
def productElement(n: Int): Any = throw new UnsupportedOperationException()
}
/**
* Closes this driver (and all its connections and resources).
* Awaits the termination until the timeout is expired.
*/
def close(timeout: FiniteDuration = FiniteDuration(2, SECONDS)): Unit =
Await.result(askClose(timeout)(ExecutionContext.global), timeout) // Unsafe
/**
* Creates a new MongoConnection.
*
* $seeConnectDBTutorial
*
* @param nodes $nodesParam
* @param options $optionsParam
* @param authentications $authParam
* @param name $connectionNameParam
*/
@deprecated("Use `connection` without `authentications` (but possibly without `options.credentials`)", "0.14.0")
def connection(nodes: Seq[String], options: MongoConnectionOptions = MongoConnectionOptions.default, authentications: Seq[Authenticate] = Seq.empty, name: Option[String] = None): MongoConnection = {
val credentials = options.credentials ++ authentications.map { a =>
a.db -> MongoConnectionOptions.Credential(a.user, a.password)
}
Await.result(askConnection(nodes, options.copy(
credentials = credentials), name), Duration.Inf)
}
/**
* Creates a new MongoConnection.
*
* $seeConnectDBTutorial
*
* @param nodes $nodesParam
*/
def connection(nodes: Seq[String]): MongoConnection = Await.result(
askConnection(nodes, MongoConnectionOptions.default, Option.empty), Duration.Inf)
/**
* Creates a new MongoConnection.
*
* $seeConnectDBTutorial
*
* @param nodes $nodesParam
* @param options $optionsParam
*/
def connection(nodes: Seq[String], options: MongoConnectionOptions): MongoConnection = Await.result(askConnection(nodes, options, Option.empty), Duration.Inf)
/**
* Creates a new MongoConnection.
*
* $seeConnectDBTutorial
*
* @param nodes $nodesParam
* @param options $optionsParam
* @param name $connectionNameParam
*/
def connection(
nodes: Seq[String],
options: MongoConnectionOptions,
name: String): MongoConnection = Await.result(
askConnection(nodes, options, Some(name)), Duration.Inf)
/**
* Creates a new MongoConnection from URI.
*
* $seeConnectDBTutorial
*
* @param uriStrict $uriStrictParam
*/
def connection(uriStrict: String): Try[MongoConnection] =
connection(uriStrict, name = None, strictUri = true)
/**
* Creates a new MongoConnection from URI.
*
* $seeConnectDBTutorial
*
* @param uri the URI to be parsed by [[reactivemongo.api.MongoConnection.parseURI]]
* @param name $connectionNameParam
* @param strictUri $strictUriParam
*/
def connection(uri: String, name: Option[String], strictUri: Boolean): Try[MongoConnection] = MongoConnection.parseURI(uri).flatMap(connection(_, name, strictUri))
/**
* Creates a new MongoConnection from URI.
*
* $seeConnectDBTutorial
*
* @param parsedURI $parsedURIParam
* @param name $connectionNameParam
*/
def connection(parsedURI: MongoConnection.ParsedURI, name: Option[String]): MongoConnection = connection(parsedURI, name, strictUri = false).get // Unsafe
/**
* Creates a new MongoConnection from URI.
*
* $seeConnectDBTutorial
*
* @param parsedURI The URI parsed by [[reactivemongo.api.MongoConnection.parseURI]]
* @param name $connectionNameParam
* @param strictUri $strictUriParam
*/
def connection(parsedURI: MongoConnection.ParsedURI, name: Option[String], strictUri: Boolean): Try[MongoConnection] = {
if (!parsedURI.ignoredOptions.isEmpty && strictUri) {
Failure(new IllegalArgumentException(s"The connection URI contains unsupported options: ${parsedURI.ignoredOptions.mkString(", ")}"))
} else {
if (!parsedURI.ignoredOptions.isEmpty) {
logger.warn(s"Some options were ignored because they are not supported (yet): ${parsedURI.ignoredOptions.mkString(", ")}")
}
Success(connection(parsedURI.hosts.map(h => h._1 + ':' + h._2), parsedURI.options, parsedURI.authenticate.toSeq, name))
}
}
/**
* Creates a new MongoConnection from URI.
*
* $seeConnectDBTutorial
*
* @param parsedURI $parsedURIParam
* @param strictUri $strictUriParam
*/
def connection(parsedURI: MongoConnection.ParsedURI, strictUri: Boolean): Try[MongoConnection] = connection(parsedURI, None, strictUri)
/**
* Creates a new MongoConnection from URI.
*
* $seeConnectDBTutorial
*
* @param parsedURI $parsedURIParam
*/
@deprecated("Use a safe `connection` as `Try`", "0.17.0")
def connection(parsedURI: MongoConnection.ParsedURI): MongoConnection =
connection(parsedURI, None, false).get // Unsafe
}
/** The driver factory */
@deprecated("Use `reactivemongo.api.AsyncDriver", "0.19.4")
object MongoDriver {
private val logger = LazyLogger("reactivemongo.api.MongoDriver")
/** Creates a new [[MongoDriver]] with a new ActorSystem. */
def apply(): MongoDriver = new MongoDriver()
/** Creates a new [[MongoDriver]] with the given `config`. */
def apply(config: Config): MongoDriver = new MongoDriver(Some(config), None)
/** Creates a new [[MongoDriver]] with the given `config`. */
def apply(config: Config, classLoader: ClassLoader): MongoDriver =
new MongoDriver(Some(config), Some(classLoader))
}
|
cchantep/ReactiveMongo
|
driver/src/main/scala/api/MongoDriver.scala
|
Scala
|
apache-2.0
| 7,487 |
package mesosphere.marathon.core.launcher
import org.rogach.scallop.ScallopConf
trait OfferProcessorConfig extends ScallopConf {
//scalastyle:off magic.number
lazy val offerMatchingTimeout = opt[Int]("offer_matching_timeout",
descr = "Offer matching timeout (ms). Stop trying to match additional tasks for this offer after this time.",
default = Some(1000))
}
|
sepiroth887/marathon
|
src/main/scala/mesosphere/marathon/core/launcher/OfferProcessorConfig.scala
|
Scala
|
apache-2.0
| 375 |
import org.scalatest.{FlatSpec, Matchers}
class MemoSpec extends FlatSpec with Matchers {
import Lib._
lazy val fib: (Int) => Int = Memo(n => if (n <= 1) n else fib(n - 1) + fib(n - 2), identity[Int], Cache.simple)
lazy val fac: (Int) => Int = Memo(n => if (n <= 2) n else n * fac(n - 1), identity[Int], Cache.LRU(1))
"Memo" should "memoize the results of pure functions" in {
def _f(n: Int): Int = if (n <= 1) n else _f(n - 1) + _f(n - 2)
val baseTime = time(_f(40))._2
val (_, t1) = time(fib(45))
val (_, t2) = time(fib(46))
Seq(baseTime, t1, t2).sorted shouldBe Seq(t2, t1, baseTime)
t1.toDouble should be < (baseTime / 10.0)
t2.toDouble should be < (t1 / 10.0)
}
}
|
sstergou/Memo
|
src/test/scala/MemoSpec.scala
|
Scala
|
gpl-3.0
| 714 |
class A { def m() { if (true) { var x = 1 } else x = 2 } }
object Main { def main(args: Array[String]) { } }
|
tobast/compil-petitscala
|
tests/typing/bad/testfile-context_ifelse-1.scala
|
Scala
|
gpl-3.0
| 109 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index.geotools
import java.time.{Instant, ZoneOffset}
import java.util.{Locale, List => jList}
import com.typesafe.scalalogging.LazyLogging
import org.geotools.data._
import org.geotools.data.simple.{SimpleFeatureSource, SimpleFeatureWriter}
import org.geotools.feature.{FeatureTypes, NameImpl}
import org.geotools.util.factory.Hints
import org.locationtech.geomesa.index.geotools.GeoMesaDataStoreFactory.NamespaceConfig
import org.locationtech.geomesa.index.metadata.GeoMesaMetadata._
import org.locationtech.geomesa.index.metadata.HasGeoMesaMetadata
import org.locationtech.geomesa.index.planning.QueryInterceptor.QueryInterceptorFactory
import org.locationtech.geomesa.index.utils.{DistributedLocking, Releasable}
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes.Configs
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes.InternalConfigs.TableSharingPrefix
import org.locationtech.geomesa.utils.geotools.converters.FastConverter
import org.locationtech.geomesa.utils.geotools.{FeatureUtils, GeoToolsDateFormat, SimpleFeatureTypes}
import org.locationtech.geomesa.utils.index.{GeoMesaSchemaValidator, ReservedWordCheck}
import org.locationtech.geomesa.utils.io.CloseWithLogging
import org.opengis.feature.`type`.Name
import org.opengis.feature.simple.SimpleFeatureType
import org.opengis.filter.Filter
import scala.util.control.NonFatal
/**
* Abstract base class for data store implementations using metadata to track schemas
*/
abstract class MetadataBackedDataStore(config: NamespaceConfig) extends DataStore
with HasGeoMesaMetadata[String] with DistributedLocking with LazyLogging {
import scala.collection.JavaConverters._
// TODO: GEOMESA-2360 - Remove global axis order hint from MetadataBackedDataStore
Hints.putSystemDefault(Hints.FORCE_LONGITUDE_FIRST_AXIS_ORDER, true)
protected [geomesa] val interceptors: QueryInterceptorFactory = QueryInterceptorFactory(this)
// hooks to allow extended functionality
/**
* Called just before persisting schema metadata. Allows for validation or configuration of user data
*
* @param sft simple feature type
* @throws java.lang.IllegalArgumentException if schema is invalid and shouldn't be written
*/
@throws(classOf[IllegalArgumentException])
protected def preSchemaCreate(sft: SimpleFeatureType): Unit
/**
* Called just before updating schema metadata. Allows for validation or configuration of user data
*
* @param sft simple feature type
* @param previous previous feature type before changes
* @throws java.lang.IllegalArgumentException if schema is invalid and shouldn't be updated
*/
@throws(classOf[IllegalArgumentException])
protected def preSchemaUpdate(sft: SimpleFeatureType, previous: SimpleFeatureType): Unit
/**
* Called after schema metadata has been persisted. Allows for creating tables, etc
*
* @param sft simple feature type
*/
protected def onSchemaCreated(sft: SimpleFeatureType): Unit
/**
* Called after schema metadata has been persisted. Allows for creating tables, etc
*
* @param sft simple feature type
* @param previous previous feature type before changes
*/
protected def onSchemaUpdated(sft: SimpleFeatureType, previous: SimpleFeatureType): Unit
/**
* Called after deleting schema metadata. Allows for deleting tables, etc
*
* @param sft simple feature type
*/
protected def onSchemaDeleted(sft: SimpleFeatureType): Unit
// methods from org.geotools.data.DataStore
/**
* @see org.geotools.data.DataStore#getTypeNames()
* @return existing simple feature type names
*/
override def getTypeNames: Array[String] = metadata.getFeatureTypes
/**
* @see org.geotools.data.DataAccess#getNames()
* @return existing simple feature type names
*/
override def getNames: jList[Name] = {
val names = new java.util.ArrayList[Name]
config.namespace match {
case None => getTypeNames.foreach(name => names.add(new NameImpl(name)))
case Some(ns) => getTypeNames.foreach(name => names.add(new NameImpl(ns, name)))
}
names
}
/**
* Validates the schema and writes metadata to catalog.If the schema already exists,
* continue without error.
*
* This method uses distributed locking to ensure a schema is only created once.
*
* @see org.geotools.data.DataAccess#createSchema(org.opengis.feature.type.FeatureType)
* @param schema type to create
*/
override def createSchema(schema: SimpleFeatureType): Unit = {
if (getSchema(schema.getTypeName) == null) {
val lock = acquireCatalogLock()
try {
// check a second time now that we have the lock
if (getSchema(schema.getTypeName) == null) {
// ensure that we have a mutable type so we can set user data
val sft = SimpleFeatureTypes.mutable(schema)
// inspect and update the simple feature type for various components
// do this before anything else so that any modifications will be in place
GeoMesaSchemaValidator.validate(sft)
// set the enabled indices
preSchemaCreate(sft)
try {
// write out the metadata to the catalog table
// compute the metadata values - IMPORTANT: encode type has to be called after all user data is set
val metadataMap = Map(
AttributesKey -> SimpleFeatureTypes.encodeType(sft, includeUserData = true),
StatsGenerationKey -> GeoToolsDateFormat.format(Instant.now().atOffset(ZoneOffset.UTC))
)
metadata.insert(sft.getTypeName, metadataMap)
// reload the sft so that we have any default metadata,
// then copy over any additional keys that were in the original sft.
// avoid calling getSchema directly, as that may trigger a remote version
// check for indices that haven't been created yet
val attributes = metadata.readRequired(sft.getTypeName, AttributesKey)
val reloadedSft = SimpleFeatureTypes.createType(sft.getTypeName, attributes)
val userData = new java.util.HashMap[AnyRef, AnyRef]()
userData.putAll(reloadedSft.getUserData)
reloadedSft.getUserData.putAll(sft.getUserData)
reloadedSft.getUserData.putAll(userData)
// create the tables
onSchemaCreated(reloadedSft)
} catch {
case NonFatal(e) =>
// If there was an error creating a schema, clean up.
try {
metadata.delete(sft.getTypeName)
} catch {
case NonFatal(e2) => e.addSuppressed(e2)
}
throw e
}
}
} finally {
lock.release()
}
}
}
/**
* @see org.geotools.data.DataAccess#getSchema(org.opengis.feature.type.Name)
* @param name feature type name
* @return feature type, or null if it does not exist
*/
override def getSchema(name: Name): SimpleFeatureType = getSchema(name.getLocalPart)
/**
* @see org.geotools.data.DataStore#getSchema(java.lang.String)
* @param typeName feature type name
* @return feature type, or null if it does not exist
*/
override def getSchema(typeName: String): SimpleFeatureType = {
metadata.read(typeName, AttributesKey) match {
case None => null
case Some(spec) => SimpleFeatureTypes.createImmutableType(config.namespace.orNull, typeName, spec)
}
}
/**
* Allows the following modifications to the schema:
* renaming the feature type
* renaming attributes
* appending new attributes
* enabling/disabling indices through RichSimpleFeatureType.setIndexVersion
* modifying keywords through user-data
*
* Other modifications are not supported.
*
* @see org.geotools.data.DataStore#updateSchema(java.lang.String, org.opengis.feature.simple.SimpleFeatureType)
* @param typeName simple feature type name
* @param sft new simple feature type
*/
override def updateSchema(typeName: String, sft: SimpleFeatureType): Unit =
updateSchema(new NameImpl(typeName), sft)
/**
* Allows the following modifications to the schema:
* renaming the feature type
* renaming attributes
* appending new attributes
* enabling/disabling indices through RichSimpleFeatureType.setIndexVersion
* modifying keywords through user-data
*
* Other modifications are not supported.
*
* @see org.geotools.data.DataAccess#updateSchema(org.opengis.feature.type.Name, org.opengis.feature.type.FeatureType)
* @param typeName simple feature type name
* @param schema new simple feature type
*/
override def updateSchema(typeName: Name, schema: SimpleFeatureType): Unit = {
// validate that the type name has not changed, or that the new name is not already in use
if (typeName.getLocalPart != schema.getTypeName && getTypeNames.contains(schema.getTypeName)) {
throw new IllegalArgumentException(
s"Updated type name already exists: '$typeName' changed to '${schema.getTypeName}'")
}
val lock = acquireCatalogLock()
try {
// get previous schema and user data
val previousSft = getSchema(typeName)
if (previousSft == null) {
throw new IllegalArgumentException(s"Schema '$typeName' does not exist")
}
GeoMesaSchemaValidator.validate(schema)
validateSchemaUpdate(previousSft, schema)
val sft = SimpleFeatureTypes.mutable(schema)
// validation and normalization of the schema
preSchemaUpdate(sft, previousSft)
// if all is well, update the metadata - first back it up
if (FastConverter.convertOrElse[java.lang.Boolean](sft.getUserData.get(Configs.UpdateBackupMetadata), true)) {
metadata.backup(typeName.getLocalPart)
}
// rename the old rows if the type name has changed
if (typeName.getLocalPart != schema.getTypeName) {
metadata.scan(typeName.getLocalPart, "", cache = false).foreach { case (k, v) =>
metadata.insert(sft.getTypeName, k, v)
metadata.remove(typeName.getLocalPart, k)
}
}
// now insert the new spec string
metadata.insert(sft.getTypeName, AttributesKey, SimpleFeatureTypes.encodeType(sft, includeUserData = true))
onSchemaUpdated(sft, previousSft)
} finally {
lock.release()
}
}
/**
* Deletes the schema metadata
*
* @see org.geotools.data.DataStore#removeSchema(java.lang.String)
* @param typeName simple feature type name
*/
override def removeSchema(typeName: String): Unit = {
val lock = acquireCatalogLock()
try {
Option(getSchema(typeName)).foreach { sft =>
onSchemaDeleted(sft)
metadata.delete(typeName)
}
} finally {
lock.release()
}
}
/**
* @see org.geotools.data.DataAccess#removeSchema(org.opengis.feature.type.Name)
* @param typeName simple feature type name
*/
override def removeSchema(typeName: Name): Unit = removeSchema(typeName.getLocalPart)
/**
* @see org.geotools.data.DataStore#getFeatureSource(java.lang.String)
* @param typeName simple feature type name
* @return featureStore, suitable for reading and writing
*/
override def getFeatureSource(typeName: Name): SimpleFeatureSource = getFeatureSource(typeName.getLocalPart)
/**
* Create a general purpose writer that is capable of updates and deletes.
* Does <b>not</b> allow inserts. Will return all existing features.
*
* @see org.geotools.data.DataStore#getFeatureWriter(java.lang.String, org.geotools.data.Transaction)
* @param typeName feature type name
* @param transaction transaction (currently ignored)
* @return feature writer
*/
override def getFeatureWriter(typeName: String, transaction: Transaction): SimpleFeatureWriter =
getFeatureWriter(typeName, Filter.INCLUDE, transaction)
/**
* Create a general purpose writer that is capable of updates and deletes.
* Does <b>not</b> allow inserts.
*
* @see org.geotools.data.DataStore#getFeatureWriter(java.lang.String, org.opengis.filter.Filter,
* org.geotools.data.Transaction)
* @param typeName feature type name
* @param filter cql filter to select features for update/delete
* @param transaction transaction (currently ignored)
* @return feature writer
*/
override def getFeatureWriter(typeName: String, filter: Filter, transaction: Transaction): SimpleFeatureWriter
/**
* Creates a feature writer only for writing - does not allow updates or deletes.
*
* @see org.geotools.data.DataStore#getFeatureWriterAppend(java.lang.String, org.geotools.data.Transaction)
* @param typeName feature type name
* @param transaction transaction (currently ignored)
* @return feature writer
*/
override def getFeatureWriterAppend(typeName: String, transaction: Transaction): SimpleFeatureWriter
/**
* @see org.geotools.data.DataAccess#getInfo()
* @return service info
*/
override def getInfo: ServiceInfo = {
val info = new DefaultServiceInfo()
info.setDescription(s"Features from ${getClass.getSimpleName}")
info.setSchema(FeatureTypes.DEFAULT_NAMESPACE)
info
}
/**
* We always return null, which indicates that we are handling transactions ourselves.
*
* @see org.geotools.data.DataStore#getLockingManager()
* @return locking manager - null
*/
override def getLockingManager: LockingManager = null
/**
* Cleanup any open connections, etc. Equivalent to java.io.Closeable.close()
*
* @see org.geotools.data.DataAccess#dispose()
*/
override def dispose(): Unit = {
CloseWithLogging(metadata)
CloseWithLogging(interceptors)
}
// end methods from org.geotools.data.DataStore
/**
* Validate a call to updateSchema, throwing errors on failed validation
*
* @param existing existing schema
* @param schema updated sft
*/
protected def validateSchemaUpdate(existing: SimpleFeatureType, schema: SimpleFeatureType): Unit = {
// validate that default geometry and date have not changed (rename is ok)
if (schema.getGeomIndex != existing.getGeomIndex) {
throw new UnsupportedOperationException("Changing the default geometry attribute is not supported")
} else if (schema.getDtgIndex != existing.getDtgIndex) {
throw new UnsupportedOperationException("Changing the default date attribute is not supported")
}
// check that unmodifiable user data has not changed
MetadataBackedDataStore.UnmodifiableUserDataKeys.foreach { key =>
if (schema.userData[Any](key) != existing.userData[Any](key)) {
throw new UnsupportedOperationException(s"Updating '$key' is not supported")
}
}
// validate that attributes weren't removed
if (existing.getAttributeCount > schema.getAttributeCount) {
throw new UnsupportedOperationException("Removing attributes from the schema is not supported")
}
// check for column type changes
existing.getAttributeDescriptors.asScala.zipWithIndex.foreach { case (prev, i) =>
val binding = schema.getDescriptor(i).getType.getBinding
if (!binding.isAssignableFrom(prev.getType.getBinding)) {
throw new UnsupportedOperationException(
s"Incompatible schema column type change: ${schema.getDescriptor(i).getLocalName} " +
s"from ${prev.getType.getBinding.getName} to ${binding.getName}")
}
}
// check for reserved words - only check for new/renamed attributes
val reserved = schema.getAttributeDescriptors.asScala.map(_.getLocalName).exists { name =>
existing.getDescriptor(name) == null && FeatureUtils.ReservedWords.contains(name.toUpperCase(Locale.US))
}
if (reserved) {
ReservedWordCheck.validateAttributeNames(schema)
}
}
/**
* Acquires a distributed lock for all data stores sharing this catalog table.
* Make sure that you 'release' the lock in a finally block.
*/
protected [geomesa] def acquireCatalogLock(): Releasable = {
import org.locationtech.geomesa.index.DistributedLockTimeout
val dsTypeName = getClass.getSimpleName.replaceAll("[^A-Za-z]", "")
val path = s"/org.locationtech.geomesa/ds/$dsTypeName/${config.catalog}"
val timeout = DistributedLockTimeout.toDuration.getOrElse {
// note: should always be a valid fallback value so this exception should never be triggered
throw new IllegalArgumentException(s"Couldn't convert '${DistributedLockTimeout.get}' to a duration")
}
acquireDistributedLock(path, timeout.toMillis).getOrElse {
throw new RuntimeException(s"Could not acquire distributed lock at '$path' within $timeout")
}
}
}
object MetadataBackedDataStore {
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes.Configs._
private val UnmodifiableUserDataKeys =
Set(
TableSharing,
TableSharingPrefix,
IndexVisibilityLevel,
IndexZ3Interval,
IndexS3Interval,
IndexXzPrecision,
IndexZShards,
IndexIdShards,
IndexAttributeShards
)
}
|
locationtech/geomesa
|
geomesa-index-api/src/main/scala/org/locationtech/geomesa/index/geotools/MetadataBackedDataStore.scala
|
Scala
|
apache-2.0
| 17,854 |
package ch3
import org.learningconcurrency._
import ch3._
/**
* 预测多个线程对集合状态产生影响,使用两个线程向ArrayBuffer集合中添加数字
*/
object CollectionsBad extends App {
import scala.collection._
/**
* 标准集合的实现代码中没有使用任何同步机制,可变集合的基础数据结构可能会非常复杂.
*/
val buffer = mutable.ArrayBuffer[Int]()
def add(numbers: Seq[Int]) = execute {
buffer ++= numbers
log(s"buffer = $buffer")
}
/**
* 这个例子不会输出含有20个不同数值元素的ArrayBuffer对象,而会在每次运行时输出不同的随机结果或者抛出异常
* ForkJoinPool-1-worker-5: buffer = ArrayBuffer(10, 11, 12, 13, 4, 5, 16, 7, 8, 9)
ForkJoinPool-1-worker-3: buffer = ArrayBuffer(10, 11, 12, 13, 4, 5, 16, 7, 8, 9)
ForkJoinPool-1-worker-3: buffer = ArrayBuffer(10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
ForkJoinPool-1-worker-5: buffer = ArrayBuffer(10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
*/
add(0 until 10)
add(10 until 20)
Thread.sleep(500)
}
/**
* 同步可变的集合
*/
object CollectionsSynchronized extends App {
import scala.collection._
val buffer = new mutable.BufferProxy[Int] with mutable.SynchronizedBuffer[Int] {
val self = mutable.ArrayBuffer[Int]()
}
execute {
buffer ++= (0 until 10)
log(s"buffer = $buffer")
}
execute {
buffer ++= (10 until 20)
log(s"buffer = $buffer")
}
/**
ForkJoinPool-1-worker-5: buffer = ArrayBuffer(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19)
ForkJoinPool-1-worker-3: buffer = ArrayBuffer(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19)
*/
Thread.sleep(500)
}
object MiscSyncVars extends App {
import scala.concurrent._
val sv = new SyncVar[String]
execute {
Thread.sleep(500)
log("sending a message")
sv.put("This is secret.")
}
log(s"get = ${sv.get}")
log(s"take = ${sv.take()}")
execute {
Thread.sleep(500)
log("sending another message")
sv.put("Secrets should not be logged!")
}
log(s"take = ${sv.take()}")
log(s"take = ${sv.take(timeout = 1000)}")
}
object MiscDynamicVars extends App {
import scala.util.DynamicVariable
val dynlog = new DynamicVariable[String => Unit](log)
def secretLog(msg: String) = println(s"(unknown thread): $msg")
execute {
dynlog.value("Starting asynchronous execution.")
dynlog.withValue(secretLog) {
dynlog.value("Nobody knows who I am.")
}
dynlog.value("Ending asynchronous execution.")
}
dynlog.value("is calling the log method!")
}
/**
* main线程创建了一个含有5500个元素的队列,它执行了一个创建迭代器的并发任务,并逐个显示这些元素.与此同时,main线程还按
* 照遍历顺序,从该队列中逐个删除所有元素.
* 顺序队列与并发队列之间的一个主要差异是并发队列拥有弱一致性迭代器
*/
object CollectionsIterators extends App {
import java.util.concurrent._
/**
* BlockingQueue接口还为顺序队列中已经存在的方法,额外提供了阻塞线程的版本
*/
val queue = new LinkedBlockingQueue[String]
//一个队列就是一个先入先出(FIFO)的数据结构,
//如果想在一个满的队列中加入一个新项,多出的项就会被拒绝,
//这时新的 offer 方法就可以起作用了。它不是对调用 add() 方法抛出一个 unchecked 异常,而只是得到由 offer() 返回的 false
for (i <- 1 to 5500) queue.offer(i.toString)//入列
execute {
val it = queue.iterator //通过iterator创建迭代器,一旦被创建完成就会遍历队列中的元素,如果遍历操作结束前对队列执行了入列
//或出列操作,那么该遍历操作就会完全失效
while (it.hasNext) log(it.next())
}
for (i <- 1 to 5500) log(queue.poll())//出列
}
/**
* ConcurrentHashMap操作,不堵塞线程
*/
object CollectionsConcurrentMap extends App {
import java.util.concurrent.ConcurrentHashMap
import scala.collection._
import scala.collection.convert.decorateAsScala._
import scala.annotation.tailrec
val emails = new ConcurrentHashMap[String, List[String]]().asScala
execute {
emails("James Gosling") = List("[email protected]")
log(s"emails = $emails")
}
execute {
emails.putIfAbsent("Alexey Pajitnov", List("[email protected]"))
log(s"emails = $emails")
}
execute {
emails.putIfAbsent("Alexey Pajitnov", List("[email protected]"))
log(s"emails = $emails")
}
}
object CollectionsConcurrentMapIncremental extends App {
import java.util.concurrent.ConcurrentHashMap
import scala.collection._
import scala.collection.convert.decorateAsScala._
import scala.annotation.tailrec
val emails = new ConcurrentHashMap[String, List[String]]().asScala
@tailrec def addEmail(name: String, address: String) {
emails.get(name) match {
case Some(existing) =>
//
if (!emails.replace(name, existing, address :: existing)) addEmail(name, address)
case None =>
//putIfAbsent()方法用于在 map 中进行添加,这个方法以要添加到 ConcurrentMap实现中的键的值为参数,就像普通的 put() 方法,
//但是只有在 map 不包含这个键时,才能将键加入到 map 中。如果 map 已经包含这个键,那么这个键的现有值就会保留。
if (emails.putIfAbsent(name, address :: Nil) != None) addEmail(name, address)
}
}
execute {
addEmail("Yukihiro Matsumoto", "[email protected]")
log(s"emails = $emails")
}
execute {
addEmail("Yukihiro Matsumoto", "[email protected]")
log(s"emails = $emails")
}
}
/**
* 创建一个将姓名与数字对应起来的并发映射,例如:Janice会与0对应
* 如果迭代器具有一致性,我们就会看到初始时映射中会含有3个名字,而且根据第一个任务添加名字的数量
* John对应的数字会是0至n之间的值,该结果可能是John1,John2,John3,但输出John 8和John 5之类随机不连续名字
*/
object CollectionsConcurrentMapBulk extends App {
import scala.collection._
import scala.collection.convert.decorateAsScala._
import java.util.concurrent.ConcurrentHashMap
val names = new ConcurrentHashMap[String, Int]().asScala
names("Johnny") = 0
names("Jane") = 0
names("Jack") = 0
execute {
for (n <- 0 until 10) names(s"John $n") = n
}
execute {
for (n <- names) log(s"name: $n")
}
/**
* ForkJoinPool-1-worker-3: name: (Jane,0)
* ForkJoinPool-1-worker-3: name: (Jack,0)
* ForkJoinPool-1-worker-3: name: (John 8,8)
* ForkJoinPool-1-worker-3: name: (John 0,0)
* ForkJoinPool-1-worker-3: name: (John 5,5)
* ForkJoinPool-1-worker-3: name: (Johnny,0)
* ForkJoinPool-1-worker-3: name: (John 6,6)
* ForkJoinPool-1-worker-3: name: (John 4,4)
*/
Thread.sleep(500)
}
/**
* TrieMap和ConcurrentHashMap区别,如果应用程序 需要使用一致性迭代器使用TrieMap集合
* 如果应用程序无须 使用一致性迭代器并且极少执行修改并发映射的操作,就应该使用ConcurrentHashMap集合
* 因为对它们执行查询操作可以获得较快的速度
*/
object CollectionsTrieMapBulk extends App {
import scala.collection._
/**
* TrieMap永远不会出现上面的例子,TrieMap并发集合运行同一个程序,并在输出这些名字前按字母顺序对它们进行排序
* TrieMap确保执行删除或复制文件操作的线程,无法干扰执行读取文件操作的线程
*/
val names = new concurrent.TrieMap[String, Int]
names("Janice") = 0
names("Jackie") = 0
names("Jill") = 0
execute {
for (n <- 10 until 100) names(s"John $n") = n
}
execute {
log("snapshot time!")
for (n <- names.map(_._1).toSeq.sorted) log(s"name: $n")
}
Thread.sleep(500)
}
|
tophua/spark1.52
|
examples/src/main/scala/scalaDemo/threadConcurrency/ch3/Collections.scala
|
Scala
|
apache-2.0
| 7,925 |
package breeze.classify;
/*
Copyright 2009 David Hall, Daniel Ramage
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import breeze.data._
import breeze.linalg._
/**
* Represents a classifier from observations of type T to labels of type L.
* Implementers should only need to implement score.
*
* @author dlwh
*/
trait Classifier[L,-T] extends (T=>L) { outer =>
/** Return the most likely label */
def apply(o :T) = classify(o);
/** Return the most likely label */
def classify(o :T) = scores(o).argmax;
/** For the observation, return the score for each label that has a nonzero
* score.
*/
def scores(o: T): Counter[L,Double]
/**
* Transforms output labels L=>M. if f(x) is not one-to-one then the max of score
* from the L's are used.
*/
def map[M](f: L=>M):Classifier[M,T] = new Classifier[M,T] {
def scores(o: T): Counter[M,Double] = {
val ctr = Counter[M,Double]()
val otherCtr = outer.scores(o)
for( x <- otherCtr.keysIterator) {
val y = f(x)
ctr(y) = ctr(y) max otherCtr(x)
}
ctr;
}
}
}
object Classifier {
trait Trainer[L,T] {
type MyClassifier <: Classifier[L,T]
def train(data: Iterable[Example[L,T]]):MyClassifier;
}
}
|
tjhunter/scalanlp-core
|
learn/src/main/scala/breeze/classify/Classifier.scala
|
Scala
|
apache-2.0
| 1,736 |
package java.io
class IOException(s: String, e: Throwable) extends Exception(s, e) {
def this(e: Throwable) = this(null, e)
def this(s: String) = this(s, null)
def this() = this(null, null)
}
class EOFException(s: String) extends IOException(s) {
def this() = this(null)
}
class UTFDataFormatException(s: String) extends IOException(s) {
def this() = this(null)
}
class UnsupportedEncodingException(s: String) extends IOException(s) {
def this() = this(null)
}
|
jmnarloch/scala-js
|
javalib/src/main/scala/java/io/Throwables.scala
|
Scala
|
bsd-3-clause
| 477 |
package org.jetbrains.plugins.scala
package caches
import java.util.concurrent.atomic.AtomicReference
import java.util.concurrent.{ConcurrentHashMap, ConcurrentMap}
import com.intellij.openapi.project.Project
import com.intellij.openapi.roots.ProjectRootManager
import com.intellij.openapi.util._
import com.intellij.psi._
import com.intellij.psi.impl.compiled.ClsFileImpl
import com.intellij.psi.util._
import org.jetbrains.plugins.scala.caches.ProjectUserDataHolder._
import org.jetbrains.plugins.scala.caches.stats.{CacheCapabilities, CacheTracker}
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.impl.{ScalaFileImpl, ScalaPsiManager}
import scala.util.control.ControlThrowable
/**
* User: Alexander Podkhalyuzin
* Date: 08.06.2009
*/
object CachesUtil {
/**
* Do not delete this type alias, it is used by [[org.jetbrains.plugins.scala.macroAnnotations.CachedWithRecursionGuard]]
*
* @see [[CachesUtil.getOrCreateKey]] for more info
*/
type CachedMap[Data, Result] = CachedValue[ConcurrentMap[Data, Result]]
type CachedRef[Result] = CachedValue[AtomicReference[Result]]
private val keys = new ConcurrentHashMap[String, Key[_]]()
/**
* IMPORTANT:
* Cached annotations (CachedWithRecursionGuard, CachedMappedWithRecursionGuard, and CachedInUserData)
* rely on this method, even though it shows that it is unused
*
* If you change this method in any way, please make sure it's consistent with the annotations
*
* Do not use this method directly. You should use annotations instead
*/
def getOrCreateKey[T](id: String): Key[T] = {
keys.get(id) match {
case null =>
val newKey = Key.create[T](id)
val race = keys.putIfAbsent(id, newKey)
if (race != null) race.asInstanceOf[Key[T]]
else newKey
case v => v.asInstanceOf[Key[T]]
}
}
def libraryAwareModTracker(element: PsiElement): ModificationTracker = {
val rootManager = ProjectRootManager.getInstance(element.getProject)
element.getContainingFile match {
case file: ScalaFile if file.isCompiled && rootManager.getFileIndex.isInLibrary(file.getVirtualFile) => rootManager
case _: ClsFileImpl => rootManager
case _ => BlockModificationTracker(element)
}
}
case class ProbablyRecursionException[Data](elem: PsiElement,
data: Data,
key: Key[_],
set: Set[ScFunction]) extends ControlThrowable
//used in caching macro annotations
def getOrCreateCachedMap[Dom: ProjectUserDataHolder, Data, Result](elem: Dom,
key: Key[CachedMap[Data, Result]],
cacheTypeId: String,
cacheTypeName: String,
dependencyItem: () => Object): ConcurrentMap[Data, Result] = {
import CacheCapabilties._
val cachedValue = elem.getUserData(key) match {
case null =>
val manager = CachedValuesManager.getManager(elem.getProject)
val provider = new CachedValueProvider[ConcurrentMap[Data, Result]] {
override def compute(): CachedValueProvider.Result[ConcurrentMap[Data, Result]] =
new CachedValueProvider.Result(new ConcurrentHashMap(), dependencyItem())
}
val newValue = CacheTracker.track(cacheTypeId, cacheTypeName) {
manager.createCachedValue(provider, false)
}
elem.putUserDataIfAbsent(key, newValue)
case d => d
}
cachedValue.getValue
}
//used in caching macro annotations
def getOrCreateCachedRef[Dom: ProjectUserDataHolder, Result >: Null](elem: Dom,
key: Key[CachedRef[Result]],
cacheTypeId: String,
cacheTypeName: String,
dependencyItem: () => Object): AtomicReference[Result] = {
import CacheCapabilties._
val cachedValue = elem.getUserData(key) match {
case null =>
val manager = CachedValuesManager.getManager(elem.getProject)
val provider = new CachedValueProvider[AtomicReference[Result]] {
override def compute(): CachedValueProvider.Result[AtomicReference[Result]] =
new CachedValueProvider.Result(new AtomicReference[Result](), dependencyItem())
}
val newValue = CacheTracker.track(cacheTypeId, cacheTypeName) {
manager.createCachedValue(provider, false)
}
elem.putUserDataIfAbsent(key, newValue)
case d => d
}
cachedValue.getValue
}
//used in CachedWithRecursionGuard
def handleRecursiveCall[Data, Result](e: PsiElement,
data: Data,
key: Key[_],
defaultValue: => Result): Result = {
val function = PsiTreeUtil.getContextOfType(e, true, classOf[ScFunction])
if (function == null || function.isProbablyRecursive) {
defaultValue
} else {
function.isProbablyRecursive = true
throw ProbablyRecursionException(e, data, key, Set(function))
}
}
//Tuple2 class doesn't have half-specialized variants, so (T, Long) almost always have boxed long inside
case class Timestamped[@specialized(Boolean, Int, AnyRef) T](data: T, modCount: Long)
def fileModCount(file: PsiFile): Long = fileModTracker(file).getModificationCount
def fileModTracker(file: PsiFile): ModificationTracker =
if (file == null)
ModificationTracker.NEVER_CHANGED
else
new ModificationTracker {
private val topLevel = scalaTopLevelModTracker(file.getProject)
override def getModificationCount: Long = topLevel.getModificationCount + file.getModificationStamp
}
/**
* see [[org.jetbrains.plugins.scala.lang.psi.impl.ScalaFileImpl.getContextModificationStamp]]
*/
def fileContextModTracker(file: ScalaFileImpl): ModificationTracker =
if (file == null)
ModificationTracker.NEVER_CHANGED
else
new ModificationTracker {
private val topLevel = scalaTopLevelModTracker(file.getProject)
override def getModificationCount: Long = topLevel.getModificationCount + file.getContextModificationStamp
}
def scalaTopLevelModTracker(project: Project): ModificationTracker =
ScalaPsiManager.instance(project).TopLevelModificationTracker
object CacheCapabilties {
implicit def concurrentMapCacheCapabilities[Data, Result]: CacheCapabilities[CachedValue[ConcurrentMap[Data, Result]]] =
new CacheCapabilities[CachedValue[ConcurrentMap[Data, Result]]] {
private def realCache(cache: CacheType) = cache.getUpToDateOrNull.nullSafe.map(_.get())
override def cachedEntitiesCount(cache: CacheType): Int = realCache(cache).fold(0)(_.size())
override def clear(cache: CacheType): Unit = realCache(cache).foreach(_.clear())
}
implicit def atomicRefCacheCapabilities[Data, Result >: Null]: CacheCapabilities[CachedValue[AtomicReference[Result]]] =
new CacheCapabilities[CachedValue[AtomicReference[Result]]] {
private def realCache(cache: CacheType) = cache.getUpToDateOrNull.nullSafe.map(_.get())
override def cachedEntitiesCount(cache: CacheType): Int = realCache(cache).fold(0)(c => if (c.get() == null) 0 else 1)
override def clear(cache: CacheType): Unit = realCache(cache).foreach(_.set(null))
}
implicit def timestampedMapCacheCapabilities[M >: Null <: ConcurrentMap[_, _]]: CacheCapabilities[AtomicReference[Timestamped[M]]] =
new CacheCapabilities[AtomicReference[Timestamped[M]]] {
override def cachedEntitiesCount(cache: CacheType): Int = cache.get().data.nullSafe.fold(0)(_.size())
override def clear(cache: CacheType): Unit = cache.set(Timestamped(null.asInstanceOf[M], -1))
}
implicit def timestampedSingleValueCacheCapabilities[T]: CacheCapabilities[AtomicReference[Timestamped[T]]] =
new CacheCapabilities[AtomicReference[Timestamped[T]]] {
override def cachedEntitiesCount(cache: CacheType): Int = if (cache.get().data == null) 0 else 1
override def clear(cache: CacheType): Unit = cache.set(Timestamped(null.asInstanceOf[T], -1))
}
}
}
|
JetBrains/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/caches/CachesUtil.scala
|
Scala
|
apache-2.0
| 8,793 |
package com.github.j5ik2o.spetstore.domain.basic
/**
* 連絡先を表す値オブジェクト。
*
* @param email メールアドレス
* @param phone 電話番号
*/
case class Contact(email: String, phone: String)
|
j5ik2o/spetstore-cqrs-es-akka
|
domain/src/main/scala/com/github/j5ik2o/spetstore/domain/basic/Contact.scala
|
Scala
|
mit
| 223 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.tail.internal
import cats.effect.Sync
import cats.syntax.all._
import monix.execution.internal.collection.ChunkedArrayStack
import monix.tail.Iterant
import monix.tail.Iterant.{Concat, Halt, Last, Next, NextBatch, NextCursor, Scope, Suspend}
private[tail] object IterantInterleave {
/**
* Implementation for `Iterant.interleave`.
*/
def apply[F[_], A](l: Iterant[F, A], r: Iterant[F, A])(implicit F: Sync[F]): Iterant[F, A] =
Suspend(F.delay(new Loop().apply(l, r)))
private final class Loop[F[_], A](implicit F: Sync[F]) extends ((Iterant[F, A], Iterant[F, A]) => Iterant[F, A]) {
loop =>
def apply(lh: Iterant[F, A], rh: Iterant[F, A]): Iterant[F, A] =
lhLoop.visit(lh, rh)
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
// Used by Concat:
private[this] var _lhStack: ChunkedArrayStack[F[Iterant[F, A]]] = _
private[this] var _rhStack: ChunkedArrayStack[F[Iterant[F, A]]] = _
private def lhStackPush(ref: F[Iterant[F, A]]): Unit = {
if (_lhStack == null) _lhStack = ChunkedArrayStack()
_lhStack.push(ref)
}
private def lhStackPop(): F[Iterant[F, A]] =
if (_lhStack == null) null.asInstanceOf[F[Iterant[F, A]]]
else _lhStack.pop()
private def rhStackPush(ref: F[Iterant[F, A]]): Unit = {
if (_rhStack == null) _rhStack = ChunkedArrayStack()
_rhStack.push(ref)
}
private def rhStackPop(): F[Iterant[F, A]] =
if (_rhStack == null) null.asInstanceOf[F[Iterant[F, A]]]
else _rhStack.pop()
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
private[this] val lhLoop = new LHLoop
private[this] val rhLoop = new RHLoop
//-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
private final class LHLoop extends Iterant.Visitor[F, A, Iterant[F, A]] {
protected var rhRef: F[Iterant[F, A]] = _
def continue(lh: F[Iterant[F, A]], rh: F[Iterant[F, A]]): F[Iterant[F, A]] = {
rhRef = rh
lh.map(this)
}
def continueRight(lhRest: F[Iterant[F, A]]) =
rhLoop.continue(lhRest, rhRef)
def visit(lh: Iterant[F, A], rh: Iterant[F, A]): Iterant[F, A] = {
rhRef = F.pure(rh)
this.apply(lh)
}
def visit(ref: Next[F, A]): Iterant[F, A] =
Next(ref.item, continueRight(ref.rest))
def visit(ref: NextBatch[F, A]): Iterant[F, A] =
visit(ref.toNextCursor())
def visit(ref: NextCursor[F, A]): Iterant[F, A] = {
val cursor = ref.cursor
if (cursor.hasNext()) {
val item = cursor.next()
val rest: F[Iterant[F, A]] = if (cursor.hasNext()) F.pure(ref) else ref.rest
Next(item, continueRight(rest))
} else {
Suspend(ref.rest.map(this))
}
}
def visit(ref: Suspend[F, A]): Iterant[F, A] =
Suspend(ref.rest.map(this))
def visit(ref: Concat[F, A]): Iterant[F, A] = {
lhStackPush(ref.rh)
Suspend(ref.lh.map(this))
}
def visit[S](ref: Scope[F, S, A]): Iterant[F, A] =
ref.runMap(this)
def visit(ref: Last[F, A]): Iterant[F, A] =
lhStackPop() match {
case null =>
Next(ref.item, continueRight(F.pure(Iterant.empty)))
case xs =>
Next(ref.item, continueRight(xs))
}
def visit(ref: Halt[F, A]): Iterant[F, A] =
ref.e match {
case None =>
lhStackPop() match {
case null => ref
case xs => Suspend(xs.map(this))
}
case _ =>
ref
}
def fail(e: Throwable): Iterant[F, A] =
Iterant.raiseError(e)
}
private final class RHLoop extends Iterant.Visitor[F, A, Iterant[F, A]] {
protected var lhRef: F[Iterant[F, A]] = _
def continue(lh: F[Iterant[F, A]], rh: F[Iterant[F, A]]): F[Iterant[F, A]] = {
lhRef = lh
rh.map(this)
}
def continueLeft(rhRest: F[Iterant[F, A]]) =
lhLoop.continue(lhRef, rhRest)
def visit(ref: Next[F, A]): Iterant[F, A] =
Next(ref.item, continueLeft(ref.rest))
def visit(ref: NextBatch[F, A]): Iterant[F, A] =
visit(ref.toNextCursor())
def visit(ref: NextCursor[F, A]): Iterant[F, A] = {
val cursor = ref.cursor
if (cursor.hasNext()) {
val item = cursor.next()
val rest: F[Iterant[F, A]] = if (cursor.hasNext()) F.pure(ref) else ref.rest
Next(item, continueLeft(rest))
} else {
Suspend(ref.rest.map(this))
}
}
def visit(ref: Suspend[F, A]): Iterant[F, A] =
Suspend(ref.rest.map(this))
def visit(ref: Concat[F, A]): Iterant[F, A] = {
rhStackPush(ref.rh)
Suspend(ref.lh.map(this))
}
def visit[S](ref: Scope[F, S, A]): Iterant[F, A] =
ref.runMap(this)
def visit(ref: Last[F, A]): Iterant[F, A] =
rhStackPop() match {
case null =>
Next(ref.item, continueLeft(F.pure(Iterant.empty)))
case xs =>
Next(ref.item, continueLeft(xs))
}
def visit(ref: Halt[F, A]): Iterant[F, A] =
ref.e match {
case None =>
rhStackPop() match {
case null => ref
case xs => Suspend(xs.map(this))
}
case _ =>
ref
}
def fail(e: Throwable): Iterant[F, A] =
Iterant.raiseError(e)
}
}
}
|
monifu/monifu
|
monix-tail/shared/src/main/scala/monix/tail/internal/IterantInterleave.scala
|
Scala
|
apache-2.0
| 6,177 |
package com.alexitc.coinalerts.controllers
import com.alexitc.coinalerts.commons.{CustomPlayAPISpec, DataHelper, RandomDataGenerator}
import com.alexitc.coinalerts.models._
import play.api.Application
import play.api.i18n.Lang
import play.api.libs.json.JsValue
import play.api.test.Helpers._
class UsersControllerSpec extends CustomPlayAPISpec {
import CustomPlayAPISpec._
val application: Application = guiceApplicationBuilder.build()
"POST /users" should {
def createUserJson(email: String, password: String = "stupidpwd") = {
s"""{ "email": "$email", "password": "$password", "reCaptchaResponse": "none"}"""
}
def callCreateUser(json: String) = {
POST("/users", Some(json))
}
"Allow to create a new user" in {
val email = "[email protected]"
val response = callCreateUser(createUserJson(email))
status(response) mustEqual CREATED
val json = contentAsJson(response)
(json \\ "id").as[String] must not be empty
(json \\ "email").as[String] mustEqual email
}
"Return CONFLICT when the email is already registered" in {
val email = "[email protected]"
var response = callCreateUser(createUserJson(email))
status(response) mustEqual CREATED
response = callCreateUser(createUserJson(email))
status(response) mustEqual CONFLICT
}
"Retrun BAD REQUEST when the email or password are invalid" in {
val validEmail = "[email protected]"
val invalidEmail = "notanEmail@nothing"
var response = callCreateUser(createUserJson(invalidEmail))
status(response) mustEqual BAD_REQUEST
response = callCreateUser(createUserJson(validEmail, "short"))
status(response) mustEqual BAD_REQUEST
}
"Return BAD REQUEST when the input JSON is malformed" in {
val invalidJson = """ }{" """
val response = callCreateUser(invalidJson)
status(response) mustEqual BAD_REQUEST
}
}
"POST /users/verify-email/:token" should {
def verifyEmailUrl(token: UserVerificationToken) = {
s"/users/verify-email/${token.string}"
}
"Allow to verify a user based on the token" in {
val email = RandomDataGenerator.email
val user = userDataHandler.create(email, RandomDataGenerator.hiddenPassword).get
val token = userDataHandler.createVerificationToken(user.id).get
val response = POST(verifyEmailUrl(token))
status(response) mustEqual OK
}
}
"POST /users/login" should {
val LoginUrl = "/users/login"
"Allow to login with correct credentials" in {
val email = RandomDataGenerator.email
val password = RandomDataGenerator.password
val user = DataHelper.createVerifiedUser(email, password)
val json =
s"""
|{ "email": "${email.string}", "password": "${password.string}", "reCaptchaResponse": "none" }
""".stripMargin
val response = POST(LoginUrl, Some(json))
status(response) mustEqual OK
}
"Fail to login an unverified user" in {
val email = RandomDataGenerator.email
val password = RandomDataGenerator.password
val _ = userDataHandler.create(email, UserHiddenPassword.fromPassword(password)).get
val json =
s"""
|{ "email": "${email.string}", "password": "${password.string}", "reCaptchaResponse": "none" }
""".stripMargin
val response = POST(LoginUrl, Some(json))
status(response) mustEqual BAD_REQUEST
}
"Fail to login with incorrect password" in {
val json =
s"""
|{ "email": "[email protected]", "password": "hmmm", "reCaptchaResponse": "none" }
""".stripMargin
val response = POST(LoginUrl, Some(json))
status(response) mustEqual BAD_REQUEST
}
}
"GET /users/me" should {
val url = "/users/me"
"Retrieve info from the logged in user" in {
val email = RandomDataGenerator.email
val password = RandomDataGenerator.password
val user = DataHelper.createVerifiedUser(email, password)
val token = jwtService.createToken(user)
val response = GET(url, token.toHeader)
status(response) mustEqual OK
(contentAsJson(response) \\ "id").as[String] mustEqual user.id.string
}
"Fail when Authorization token is not present" in {
val response = GET(url)
status(response) mustEqual UNAUTHORIZED
}
"Fail when the token is not of type = Bearer" in {
val header = "OAuth Xys"
val response = GET(url, AUTHORIZATION -> header)
status(response) mustEqual UNAUTHORIZED
}
"Fail when the token is incorrect" in {
val header = "Bearer Xys"
val response = GET(url, AUTHORIZATION -> header)
status(response) mustEqual UNAUTHORIZED
}
}
"GET /users/me/preferences" should {
val url = "/users/me/preferences"
"retrieve the preferences" in {
val user = DataHelper.createVerifiedUser()
val token = jwtService.createToken(user)
val response = GET(url, token.toHeader)
status(response) mustEqual OK
val json = contentAsJson(response)
(json \\ "userId").as[String] mustEqual user.id.string
(json \\ "lang").as[String] mustEqual "en"
}
}
"PUT /users/me/preferences" should {
val url = "/users/me/preferences"
"update the preferences" in {
val user = DataHelper.createVerifiedUser()
val token = jwtService.createToken(user)
val lang = "es"
val body =
s"""
|{ "lang": "$lang" }
""".stripMargin
val response = PUT(url, Some(body), token.toHeader)
status(response) mustEqual OK
val json = contentAsJson(response)
(json \\ "userId").as[String] mustEqual user.id.string
(json \\ "lang").as[String] mustEqual lang
}
"fail to set an unsupported language" in {
val user = DataHelper.createVerifiedUser()
val token = jwtService.createToken(user)
val lang = "ru"
val body =
s"""
|{ "lang": "$lang" }
""".stripMargin
val response = PUT(url, Some(body), token.toHeader)
status(response) mustEqual BAD_REQUEST
val json = contentAsJson(response)
val errorList = (json \\ "errors").as[List[JsValue]]
errorList.size mustEqual 1
val error = errorList.head
(error \\ "type").as[String] mustEqual "field-validation-error"
(error \\ "field").as[String] mustEqual "lang"
(error \\ "message").as[String].nonEmpty mustEqual true
}
"returns a response in a Spanish" in {
val user = DataHelper.createVerifiedUser()
val token = jwtService.createToken(user)
val lang = Lang("es")
val body =
s"""
|{ "lang": "ru" }
""".stripMargin
userDataHandler.setUserPreferences(user.id, SetUserPreferencesModel.default.copy(lang = lang))
val response = PUT(url, Some(body), token.toHeader, "Accept-Language" -> lang.code)
val json = contentAsJson(response)
val errorList = (json \\ "errors").as[List[JsValue]]
val error = errorList.head
(error \\ "message").as[String] mustEqual "Idioma no disponible"
}
}
}
|
AlexITC/crypto-coin-alerts
|
alerts-server/test/com/alexitc/coinalerts/controllers/UsersControllerSpec.scala
|
Scala
|
gpl-3.0
| 7,148 |
package org.lexicon.utils
import org.scalatest.FunSuite
import org.junit.Assert._
import FileHelper._
class TextAnalyzerTest extends FunSuite {
import TextAnalyzerObj._
test("Common test") {
val hello = "Hello"
val world = "World"
val res = splitText(s"$hello, $world! How do you do?")
assertEquals("Expect 6 word to be created", 6, res.length)
assertEquals(s"First word should be $hello", hello, res(0))
assertEquals(s"Second word should be $world", world, res(1))
val map = buildMap(res.toList)
assertEquals("Map to contain 5 records", 5, map.size)
assertEquals("Hello 1", 1, map.get(hello.toLowerCase).get.count)
assertEquals("Do 2", 2, map.get("do").get.count)
val map2 = buildMap(splitText("Do you know, who is who?").toList)
val map3 = buildMap(splitText("How are you?").toList)
val resMap = mergeMaps(map, map2, map3)
println(toDescList(resMap))
assertEquals("Do 3", 3, resMap.get("do").get.count)
}
test("Based on resource") {
val str = getResource("tolst_war_peace.txt")
val res = toDescList(buildMap(splitText(str).toList))
println(res)
}
}
|
rumoku/lexicon
|
word-count/src/test/scala/org/lexicon/utils/TextAnalyzerTest.scala
|
Scala
|
apache-2.0
| 1,140 |
package org.jetbrains.plugins.scala.lang.macros.expansion
import java.io._
import java.util.regex.Pattern
import com.intellij.internal.statistic.UsageTrigger
import com.intellij.notification.{NotificationGroup, NotificationType}
import com.intellij.openapi.actionSystem.{AnAction, AnActionEvent}
import com.intellij.openapi.application.PathManager
import com.intellij.openapi.diagnostic.Logger
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.fileEditor.FileEditorManager
import com.intellij.openapi.project.Project
import com.intellij.openapi.roots.ProjectRootManager
import com.intellij.openapi.util.Key
import com.intellij.openapi.vfs.VirtualFileManager
import com.intellij.openapi.wm.ToolWindowId
import com.intellij.psi._
import com.intellij.psi.codeStyle.CodeStyleManager
import com.intellij.psi.impl.source.tree.LeafPsiElement
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugin.scala.util.MacroExpansion
import org.jetbrains.plugins.scala.ScalaBundle
import org.jetbrains.plugins.scala.extensions.{PsiElementExt, inWriteCommandAction}
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScAnnotation, ScBlock, ScMethodCall}
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScAnnotationsHolder
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition
import org.jetbrains.plugins.scala.lang.psi.api.{ScalaFile, ScalaRecursiveElementVisitor}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import scala.annotation.tailrec
import scala.collection.mutable.ArrayBuffer
import scala.meta.intellij.MetaExpansionsManager
class MacroExpandAction extends AnAction {
import MacroExpandAction._
override def actionPerformed(e: AnActionEvent): Unit = {
UsageTrigger.trigger(ScalaBundle.message("macro.expand.action.id"))
val sourceEditor = FileEditorManager.getInstance(e.getProject).getSelectedTextEditor
val psiFile = PsiDocumentManager.getInstance(e.getProject).getPsiFile(sourceEditor.getDocument).asInstanceOf[ScalaFile]
val offset = sourceEditor.getCaretModel.getOffset
val annot = PsiTreeUtil.getParentOfType(psiFile.findElementAt(offset), classOf[ScAnnotation], false)
if (annot != null)
expandMetaAnnotation(annot)
// expandSerialized(e, sourceEditor, psiFile)
}
def expandMacroUnderCursor(expansion: ResolvedMacroExpansion)(implicit e: AnActionEvent): Project = {
inWriteCommandAction(e.getProject) {
try {
applyExpansion(expansion)
} catch {
case _: UnresolvedExpansion =>
LOG.warn(s"unable to expand ${expansion.expansion.place}, cannot resolve place, skipping")
}
e.getProject
}
}
def expandAllMacroInCurrentFile(expansions: Seq[ResolvedMacroExpansion])(implicit e: AnActionEvent): Project = {
inWriteCommandAction(e.getProject) {
applyExpansions(expansions.toList)
e.getProject
}
}
@throws[UnresolvedExpansion]
def applyExpansion(resolved: ResolvedMacroExpansion)(implicit e: AnActionEvent): Unit = {
if (resolved.psiElement.isEmpty)
throw new UnresolvedExpansion
if (resolved.expansion.body.isEmpty) {
LOG.warn(s"got empty expansion at ${resolved.expansion.place}, skipping")
return
}
resolved.psiElement.get.getElement match {
case (annot: ScAnnotation) =>
expandAnnotation(annot, resolved.expansion)
case (mc: ScMethodCall) =>
expandMacroCall(mc, resolved.expansion)
case (_) => () // unreachable
}
}
def applyExpansions(expansions: Seq[ResolvedMacroExpansion], triedResolving: Boolean = false)(implicit e: AnActionEvent): Unit = {
expansions match {
case x :: xs =>
try {
applyExpansion(x)
applyExpansions(xs)
}
catch {
case _: UnresolvedExpansion if !triedResolving =>
applyExpansions(tryResolveExpansionPlace(x.expansion) :: xs, triedResolving = true)
case _: UnresolvedExpansion if triedResolving =>
LOG.warn(s"unable to expand ${x.expansion.place}, cannot resolve place, skipping")
applyExpansions(xs)
}
case Nil =>
}
}
def expandMacroCall(call: ScMethodCall, expansion: MacroExpansion)(implicit e: AnActionEvent): PsiElement = {
val blockImpl = ScalaPsiElementFactory.createBlockExpressionWithoutBracesFromText(expansion.body)(PsiManager.getInstance(e.getProject))
val element = call.getParent.addAfter(blockImpl, call)
element match {
case ScBlock(x, _*) => x.putCopyableUserData(EXPANDED_KEY, UndoExpansionData(call.getText))
case _ => // unreachable
}
call.delete()
reformatCode(element)
}
def tryResolveExpansionPlace(expansion: MacroExpansion)(implicit e: AnActionEvent): ResolvedMacroExpansion = {
ResolvedMacroExpansion(expansion, getRealOwner(expansion).map(new IdentitySmartPointer[PsiElement](_)))
}
def tryResolveExpansionPlaces(expansions: Seq[MacroExpansion])(implicit e: AnActionEvent): Seq[ResolvedMacroExpansion] = {
expansions.map(tryResolveExpansionPlace)
}
def isMacroAnnotation(expansion: MacroExpansion)(implicit e: AnActionEvent): Boolean = {
getRealOwner(expansion) match {
case Some(_: ScAnnotation) => true
case Some(_: ScMethodCall) => false
case Some(_) => false
case None => false
}
}
def getRealOwner(expansion: MacroExpansion)(implicit e: AnActionEvent): Option[PsiElement] = {
val virtualFile = VirtualFileManager.getInstance().findFileByUrl("file://" + expansion.place.sourceFile)
val psiFile = PsiManager.getInstance(e.getProject).findFile(virtualFile)
psiFile.findElementAt(expansion.place.offset) match {
// macro method call has offset pointing to '(', not method name
case e: LeafPsiElement if e.findReferenceAt(0) == null =>
def walkUp(elem: PsiElement = e): Option[PsiElement] = elem match {
case null => None
case m: ScMethodCall => Some(m)
case e: PsiElement => walkUp(e.getParent)
}
walkUp()
// handle macro calls with incorrect offset pointing to macro annotation
// most likely it means given call is located inside another macro expansion
case e: LeafPsiElement if expansion.place.macroApplication.matches("^[^\\\\)]+\\\\)$") =>
val pos = e.getContainingFile.getText.indexOf(expansion.place.macroApplication)
if (pos != -1)
Some(e.getContainingFile.findElementAt(pos))
else
None
// macro annotations
case e: LeafPsiElement =>
def walkUp(elem: PsiElement = e): Option[PsiElement] = elem match {
case null => None
case a: ScAnnotation => Some(a)
case e: PsiElement => walkUp(e.getParent)
}
walkUp()
case _ => None
}
}
def ensugarExpansion(text: String): String = {
@tailrec
def applyRules(rules: Seq[(String, String)], input: String = text): String = {
def pat(p: String) = Pattern.compile(p, Pattern.DOTALL | Pattern.MULTILINE)
rules match {
case (pattern, replacement) :: xs => applyRules(xs, pat(pattern).matcher(input).replaceAll(replacement))
case Nil => input
}
}
val rules = Seq(
"\\\\<init\\\\>" -> "this", // replace constructor names
" *\\\\<[a-z]+\\\\> *" -> "", // remove compiler attributes
"super\\\\.this\\\\(\\\\);" -> "this();", // replace super constructor calls
"def this\\\\(\\\\) = \\\\{\\\\s*this\\\\(\\\\);\\\\s*\\\\(\\\\)\\\\s*\\\\};" -> "", // remove invalid super constructor calls
"_root_." -> "" // _root_ package is obsolete
)
applyRules(rules)
}
def deserializeExpansions(implicit event: AnActionEvent): Seq[MacroExpansion] = {
val file = new File(PathManager.getSystemPath + s"/expansion-${event.getProject.getName}")
if (!file.exists()) return Seq.empty
val fs = new BufferedInputStream(new FileInputStream(file))
val os = new ObjectInputStream(fs)
val res = scala.collection.mutable.ListBuffer[MacroExpansion]()
while (fs.available() > 0) {
res += os.readObject().asInstanceOf[MacroExpansion]
}
res
}
private def suggestUsingCompilerFlag(e: AnActionEvent, file: PsiFile): Unit = {
import org.jetbrains.plugins.scala.project._
import scala.collection._
val module = ProjectRootManager.getInstance(e.getProject).getFileIndex.getModuleForFile(file.getVirtualFile)
if (module == null) return
val state = module.scalaCompilerSettings.getState
val options = state.additionalCompilerOptions.to[mutable.ListBuffer]
if (!options.contains(MacroExpandAction.MACRO_DEBUG_OPTION)) {
options += MacroExpandAction.MACRO_DEBUG_OPTION
state.additionalCompilerOptions = options.toArray
module.scalaCompilerSettings.loadState(state)
windowGroup.createNotification(
"""Macro debugging options have been enabled for current module
|Please recompile the file to gather macro expansions""".stripMargin, NotificationType.INFORMATION)
.notify(e.getProject)
}
}
private def expandSerialized(e: AnActionEvent, sourceEditor: Editor, psiFile: PsiFile): Unit = {
implicit val currentEvent: AnActionEvent = e
suggestUsingCompilerFlag(e, psiFile)
val expansions = deserializeExpansions(e)
val filtered = expansions.filter { exp =>
psiFile.getVirtualFile.getPath == exp.place.sourceFile
}
val ensugared = filtered.map(e => MacroExpansion(e.place, ensugarExpansion(e.body)))
val resolved = tryResolveExpansionPlaces(ensugared)
// if macro is under cursor, expand it, otherwise expand all macros in current file
resolved
.find(_.expansion.place.line == sourceEditor.getCaretModel.getLogicalPosition.line + 1)
.fold(expandAllMacroInCurrentFile(resolved))(expandMacroUnderCursor)
}
case class ResolvedMacroExpansion(expansion: MacroExpansion, psiElement: Option[SmartPsiElementPointer[PsiElement]])
class UnresolvedExpansion extends Exception
}
object MacroExpandAction {
case class UndoExpansionData(original: String, companion: Option[String] = None)
val MACRO_DEBUG_OPTION = "-Ymacro-debug-lite"
val EXPANDED_KEY = new Key[UndoExpansionData]("MACRO_EXPANDED_KEY")
private val LOG = Logger.getInstance(getClass)
val windowGroup: NotificationGroup = NotificationGroup.toolWindowGroup("macroexpand", ToolWindowId.PROJECT_VIEW)
val messageGroup: NotificationGroup = NotificationGroup.toolWindowGroup("macroexpand", ToolWindowId.MESSAGES_WINDOW)
def expandMetaAnnotation(annot: ScAnnotation): Unit = {
import scala.meta._
val result = MetaExpansionsManager.runMetaAnnotation(annot)
result match {
case Right(tree) =>
val removeCompanionObject = tree match {
case Term.Block(Seq(Defn.Class(_, Type.Name(value1), _, _, _), Defn.Object(_, Term.Name(value2), _))) =>
value1 == value2
case Term.Block(Seq(Defn.Trait(_, Type.Name(value1), _, _, _), Defn.Object(_, Term.Name(value2), _))) =>
value1 == value2
case _ => false
}
inWriteCommandAction(annot.getProject) {
expandAnnotation(annot, MacroExpansion(null, tree.toString.trim, removeCompanionObject))
}
case Left(errorMsg) =>
messageGroup.createNotification(
s"Macro expansion failed: $errorMsg", NotificationType.ERROR
).notify(annot.getProject)
}
}
def expandAnnotation(place: ScAnnotation, expansion: MacroExpansion): Unit = {
import place.projectContext
def filter(elt: PsiElement) = elt.isInstanceOf[LeafPsiElement]
// we can only macro-annotate scala code
place.getParent.getParent match {
case holder: ScAnnotationsHolder =>
val body = expansion.body
val newPsi = ScalaPsiElementFactory.createBlockExpressionWithoutBracesFromText(body)
reformatCode(newPsi)
newPsi.firstChild match {
case Some(block: ScBlock) => // insert content of block expression(annotation can generate >1 expression)
val children = block.getChildren.dropWhile(filter).reverse.dropWhile(filter).reverse
val savedCompanion = if (expansion.removeCompanionObject) {
val companion = holder match {
case td: ScTypeDefinition => td.baseCompanionModule
case _ => None
}
companion.map { o =>
o.getParent.getNode.removeChild(o.getNode)
o.getText
}
} else None
block.children
.find(_.isInstanceOf[ScalaPsiElement])
.foreach { p =>
p.putCopyableUserData(EXPANDED_KEY, UndoExpansionData(holder.getText, savedCompanion))
}
holder.getParent.addRangeAfter(children.head, children.last, holder)
holder.getParent.getNode.removeChild(holder.getNode)
case Some(psi: PsiElement) => // defns/method bodies/etc...
val result = holder.replace(psi)
result.putCopyableUserData(EXPANDED_KEY, UndoExpansionData(holder.getText))
case None => LOG.warn(s"Failed to parse expansion: $body")
}
case other => LOG.warn(s"Unexpected annotated element: $other at ${other.getText}")
}
}
private def reformatCode(psi: PsiElement): PsiElement = {
val res = CodeStyleManager.getInstance(psi.getProject).reformat(psi)
val tobeDeleted = new ArrayBuffer[PsiElement]
val v = new PsiElementVisitor {
override def visitElement(element: PsiElement): Unit = {
if (element.getNode.getElementType == ScalaTokenTypes.tSEMICOLON) {
val file = element.getContainingFile
val nextLeaf = file.findElementAt(element.getTextRange.getEndOffset)
if (nextLeaf.isInstanceOf[PsiWhiteSpace] && nextLeaf.getText.contains("\\n")) {
tobeDeleted += element
}
}
element.acceptChildren(this)
}
}
v.visitElement(res)
tobeDeleted.foreach(_.delete())
res
}
}
|
loskutov/intellij-scala
|
src/org/jetbrains/plugins/scala/lang/macros/expansion/MacroExpandAction.scala
|
Scala
|
apache-2.0
| 14,194 |
package com.getjenny.starchat.resources
/**
* Created by Angelo Leto <[email protected]> on 14/11/16.
*/
import akka.http.scaladsl.model.StatusCodes
import akka.http.scaladsl.server.Route
import akka.pattern.CircuitBreaker
import com.getjenny.starchat.entities.io.{IndexManagementResponse, Permissions, ReturnMessageData}
import com.getjenny.starchat.routing._
import com.getjenny.starchat.services.SystemIndexManagementService
import scala.util.{Failure, Success}
trait SystemIndexManagementResource extends StarChatResource {
private[this] val systemIndexManagementService: SystemIndexManagementService.type = SystemIndexManagementService
private[this] val SystemIndexManagement = "system_index_management"
def systemGetIndexesRoutes: Route = handleExceptions(routesExceptionHandler) {
pathPrefix("system_indices") {
pathEnd {
get {
authenticateBasicAsync(realm = authRealm,
authenticator = authenticator.authenticator) { user =>
authorizeAsync(_ =>
authenticator.hasPermissions(user, "admin", Permissions.read)) {
val breaker: CircuitBreaker = StarChatCircuitBreaker.getCircuitBreaker()
onCompleteWithBreakerFuture(breaker)(systemIndexManagementService.indices) {
case Success(t) =>
completeResponse(StatusCodes.OK, StatusCodes.BadRequest, Option {
t
})
case Failure(e) => completeResponse(StatusCodes.BadRequest,
Option {
ReturnMessageData(code = 100, message = e.getMessage)
})
}
}
}
}
}
}
}
def systemIndexManagementRoutes: Route = handleExceptions(routesExceptionHandler) {
pathPrefix(SystemIndexManagement) {
path(Segment) { operation: String =>
post {
authenticateBasicAsync(realm = authRealm,
authenticator = authenticator.authenticator) { user =>
authorizeAsync(_ =>
authenticator.hasPermissions(user, "admin", Permissions.write)) {
parameters('indexSuffix.as[String].*) { indexSuffix =>
operation match {
case "refresh" =>
val breaker: CircuitBreaker = StarChatCircuitBreaker.getCircuitBreaker()
onCompleteWithBreakerFuture(breaker)(systemIndexManagementService.refresh(indexSuffix.toSet)) {
case Success(t) => completeResponse(StatusCodes.OK, StatusCodes.BadRequest, Option {
t
})
case Failure(e) => completeResponse(StatusCodes.BadRequest,
Option {
IndexManagementResponse(message = e.getMessage)
})
}
case "create" =>
val breaker: CircuitBreaker = StarChatCircuitBreaker.getCircuitBreaker()
onCompleteWithBreakerFuture(breaker)(systemIndexManagementService.create(indexSuffix.toSet)) {
case Success(t) => completeResponse(StatusCodes.Created, StatusCodes.BadRequest, Option {
t
})
case Failure(e) => completeResponse(StatusCodes.BadRequest,
Option {
IndexManagementResponse(message = e.getMessage)
})
}
case _ => completeResponse(StatusCodes.BadRequest,
Option {
IndexManagementResponse(message = "index(system) Operation not supported: " + operation)
})
}
}
}
}
}
} ~
pathEnd {
get {
authenticateBasicAsync(realm = authRealm,
authenticator = authenticator.authenticator) { user =>
authorizeAsync(_ =>
authenticator.hasPermissions(user, "admin", Permissions.read)) {
val breaker: CircuitBreaker = StarChatCircuitBreaker.getCircuitBreaker()
onCompleteWithBreakerFuture(breaker)(systemIndexManagementService.check()) {
case Success(t) => completeResponse(StatusCodes.OK, StatusCodes.BadRequest, Option {
t
})
case Failure(e) => completeResponse(StatusCodes.BadRequest,
Option {
IndexManagementResponse(message = e.getMessage)
})
}
}
}
} ~
delete {
authenticateBasicAsync(realm = authRealm,
authenticator = authenticator.authenticator) { user =>
authorizeAsync(_ =>
authenticator.hasPermissions(user, "admin", Permissions.write)) {
parameters('indexSuffix.as[String].*) { indexSuffix =>
val breaker: CircuitBreaker = StarChatCircuitBreaker.getCircuitBreaker()
onCompleteWithBreakerFuture(breaker)(systemIndexManagementService.remove(indexSuffix.toSet)) {
case Success(t) => completeResponse(StatusCodes.OK, StatusCodes.BadRequest, Option {
t
})
case Failure(e) => completeResponse(StatusCodes.BadRequest,
Option {
IndexManagementResponse(message = e.getMessage)
})
}
}
}
}
} ~
put {
authenticateBasicAsync(realm = authRealm,
authenticator = authenticator.authenticator) { user =>
authorizeAsync(_ =>
authenticator.hasPermissions(user, "admin", Permissions.write)) {
parameters('indexSuffix.as[String].*) { indexSuffix =>
val breaker: CircuitBreaker = StarChatCircuitBreaker.getCircuitBreaker()
onCompleteWithBreakerFuture(breaker)(systemIndexManagementService.update(indexSuffix.toSet)) {
case Success(t) => completeResponse(StatusCodes.OK, StatusCodes.BadRequest, Option {
t
})
case Failure(e) => completeResponse(StatusCodes.BadRequest,
Option {
IndexManagementResponse(message = e.getMessage)
})
}
}
}
}
}
}
}
}
}
|
GetJenny/starchat
|
src/main/scala/com/getjenny/starchat/resources/SystemIndexManagementResource.scala
|
Scala
|
gpl-2.0
| 6,734 |
package org.odfi.indesign.module.git
import org.odfi.indesign.core.module.IndesignModule
import org.odfi.indesign.core.harvest.Harvest
import org.odfi.indesign.core.harvest.fs.FileSystemHarvester
object GitModule extends IndesignModule {
this.onInit {
//Harvest.registerAutoHarvesterClass(classOf[FileSystemHarvester], classOf[GitHarvester])
//Harvest.registerAutoHarvesterClass(classOf[GitHarvester], classOf[MavenProjectHarvester])
//WWWViewHarvester.deliverDirect(new GitWWWView)
}
}
|
opendesignflow/indesign
|
indesign-git/src/main/scala/org/odfi/indesign/module/git/GitModule.scala
|
Scala
|
gpl-3.0
| 505 |
package io.github.andrebeat.pool
class SimplePoolSpec extends PoolSpec[SimplePool] {
def pool[A <: AnyRef](
capacity: Int,
factory: () => A,
referenceType: ReferenceType = ReferenceType.Strong,
reset: A => Unit = { _: A => () },
dispose: A => Unit = { _: A => () },
healthCheck: A => Boolean = { _: A => true }) =
SimplePool(capacity, referenceType, factory, reset, dispose, healthCheck)
}
|
andrebeat/scala-pool
|
src/test/scala/io/github/andrebeat/pool/SimplePoolSpec.scala
|
Scala
|
mit
| 421 |
package com.github.mdr.mash.assist
import com.github.mdr.mash.compiler.{ CompilationSettings, CompilationUnit, Compiler }
import com.github.mdr.mash.inference.Type
import com.github.mdr.mash.lexer.{ MashLexer, Token }
import com.github.mdr.mash.parser.AbstractSyntax._
import com.github.mdr.mash.parser.ConcreteSyntax
import com.github.mdr.mash.runtime.MashValue
import com.github.mdr.mash.utils.Utils._
import com.github.mdr.mash.utils.{ Region, Utils }
object InvocationAssistance {
def getCallingSyntaxOfNearestFunction(s: String,
pos: Int,
bindings: Map[String, MashValue],
mish: Boolean): Option[AssistanceState] =
for {
functionType ← getTypeOfNearestFunction(s, pos, bindings, mish)
assistanceState ← assistInvocation(functionType)
} yield assistanceState
private[assist] def getTypeOfNearestFunction(s: String,
pos: Int,
bindings: Map[String, MashValue],
mish: Boolean): Option[Type] = {
val tokens = MashLexer.tokenise(s, forgiving = true, mish = mish).rawTokens
val settings = CompilationSettings(inferTypes = true)
val expr = Compiler.compileForgiving(CompilationUnit(s, mish = mish), bindings, settings)
for {
invocationExpr ← findInnermostInvocationContaining(expr, tokens, pos)
functionType ← getFunctionType(invocationExpr)
} yield functionType
}
private def getFunctionType(expr: Expr): Option[Type] = expr match {
case InvocationExpr(f, _, _, _) ⇒ f.typeOpt
case _ ⇒ expr.preInvocationTypeOpt orElse expr.typeOpt
}
private def findInnermostInvocationContaining(program: AstNode, tokens: Seq[Token], pos: Int): Option[Expr] = {
val enclosingInvocations = program.findAll {
case expr: InvocationExpr if expandedRegionContains(expr, tokens, pos) ⇒ expr
case expr: Expr if expr.preInvocationTypeOpt.isDefined && expandedRegionContains(expr, tokens, pos) ⇒ expr
case expr: Expr if hasFunctionType(expr) && expandedRegionContains(expr, tokens, pos) ⇒ expr
}
def size(expr: Expr): Int = expr.sourceInfoOpt.flatMap(_.node.regionOpt).map(_.length).getOrElse(Integer.MAX_VALUE)
minBy(enclosingInvocations, size)
}
private def hasFunctionType(e: Expr): Boolean =
e.typeOpt.collect {
case Type.BuiltinFunction(_) | Type.BoundBuiltinMethod(_, _) ⇒ true
case _: Type.UserDefinedFunction | _: Type.BoundUserDefinedMethod ⇒ true
}.isDefined
private def expandedRegionContains(expr: Expr, tokens: Seq[Token], pos: Int): Boolean =
expr.sourceInfoOpt.exists(info ⇒ rightExpandedRegion(info.node, tokens) contains pos)
/**
* Expand the region of the node to include any whitespace / comment / EOF tokens to the right of it.
*/
private def rightExpandedRegion(node: ConcreteSyntax.AstNode, allTokens: Seq[Token]): Region = {
val rightmostTokenOpt = node.tokens.lastOption.map { lastToken ⇒
val tokensAfterLast = getTokensAfterLast(lastToken, allTokens)
findRightmostToken(tokensAfterLast).getOrElse(lastToken)
}
// Grow EOF tokens to pick up cursors at the end of the buffer, which is one more than the size of the buffer contents.
def getRegion(token: Token) = token.region.when(token.isEof, _ grow 1)
(rightmostTokenOpt.map(getRegion) getOrElse Region(0, 0)) merge (node.regionOpt getOrElse Region(0, 0))
}
private def getTokensAfterLast(lastToken: Token, remainingTokens: Seq[Token]): Seq[Token] =
Utils.indexOf(remainingTokens, lastToken)
.map(i => remainingTokens.drop(i + 1))
.getOrElse(Seq())
private def findRightmostToken(remainingTokens: Seq[Token]): Option[Token] =
remainingTokens.takeWhile(token ⇒ token.isWhitespace || token.isComment || token.isEof).lastOption
private def assistInvocation(functionType: Type): Option[AssistanceState] =
getAssistable(functionType).map(AssistanceState(_))
private def getAssistable(functionType: Type): Option[Assistable] =
functionType match {
case Type.Seq(elementType) ⇒ getAssistable(elementType)
case Type.BuiltinFunction(f) ⇒ Some(Assistable.Function(f))
case f: Type.UserDefinedFunction ⇒ Some(Assistable.FunctionType(f))
case Type.BoundBuiltinMethod(_, method) ⇒ Some(Assistable.Method(method))
case Type.BoundUserDefinedMethod(_, method) ⇒ Some(Assistable.MethodType(method))
case userClass: Type.UserClass ⇒ Some(Assistable.ConstructorType(userClass))
case _ ⇒ None
// TODO: Handle .new calls on builtin classes
}
}
|
mdr/mash
|
src/main/scala/com/github/mdr/mash/assist/InvocationAssistance.scala
|
Scala
|
mit
| 4,926 |
package com.guidewire.tools.marathon.client
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfterAll, SeveredStackTraces, FunSuite}
import org.junit.runner.RunWith
import org.scalatest.matchers.ShouldMatchers
import dispatch._, Defaults._
import play.api.libs.json._
import play.api.libs.functional._
import scalaz._
@RunWith(classOf[JUnitRunner])
class BasicFunctionalitySuite extends FunSuite
with ShouldMatchers
with SeveredStackTraces {
import api._
import ClientScalaTest._
test("Can connect to running Marathon") (ignoreIfHostNotUp { (host, port) =>
val apps =
blockAndValidateSuccess {
Marathon.Apps.query(Connection(host, port))
}
for(app <- apps) {
app should not be null
app.id should not be ""
//println(s"$app")
}
})
test("Can call start on running Marathon") (ignoreIfHostNotUp { (host, port) =>
val List(start, cleanup) =
blockAndValidateSuccess {
implicit val cn = Connection(host, port)
for {
_ <- Marathon.Apps.destroy("scalatest-app-start")
start <- Marathon.Apps.start(App("scalatest-app-start", "echo scalatest-app-start"))
cleanup <- Marathon.Apps.destroy("scalatest-app-start")
} yield List(start, cleanup)
}
withClue(s"[CLEANUP REQUIRED] Unable to fully process test on <$host:$port>: ") {
start.isSuccess should be (true)
cleanup.isSuccess should be (true)
}
})
test("Can call scale on running Marathon") (ignoreIfHostNotUp { (host, port) =>
val result =
blockAndValidateSuccess {
implicit val cn = Connection(host, port)
for {
_ <- Marathon.Apps.destroy("scalatest-app-scale")
created <- Marathon.Apps.start(App("scalatest-app-scale", "echo scalatest-app-scale"))
scale <- Marathon.Apps.scale("scalatest-app-scale", 3)
cleanup <- Marathon.Apps.destroy("scalatest-app-scale")
} yield List(created, scale, cleanup)
}
withClue(s"[CLEANUP REQUIRED] Unable to fully process test on <$host:$port>: ") {
result.forall(_.isSuccess) should be (true)
}
})
test("Can call suspend on running Marathon") (ignoreIfHostNotUp { (host, port) =>
val result =
blockAndValidateSuccess {
implicit val cn = Connection(host, port)
for {
_ <- Marathon.Apps.destroy("scalatest-app-suspend")
created <- Marathon.Apps.start(App("scalatest-app-suspend", "echo scalatest-app-suspend"))
suspend <- Marathon.Apps.suspend("scalatest-app-suspend")
cleanup <- Marathon.Apps.destroy("scalatest-app-suspend")
} yield List(created, suspend, cleanup)
}
withClue(s"[CLEANUP REQUIRED] Unable to fully process test on <$host:$port>: ") {
result.forall(_.isSuccess) should be (true)
}
})
test("Can call destroy (stop) on running Marathon") (ignoreIfHostNotUp { (host, port) =>
val result =
blockAndValidateSuccess {
implicit val cn = Connection(host, port)
for {
_ <- Marathon.Apps.destroy("scalatest-app-destroy")
created <- Marathon.Apps.start(App("scalatest-app-destroy", "echo scalatest-app-destroy"))
cleanup <- Marathon.Apps.destroy("scalatest-app-destroy")
} yield List(created, cleanup)
}
withClue(s"[CLEANUP REQUIRED] Unable to fully process test on <$host:$port>: ") {
result.forall(_.isSuccess) should be (true)
}
})
test("Can call debug isLeader on running Marathon") (ignoreIfHostNotUp { (host, port) =>
blockAndValidateSuccess {
implicit val cn = Connection(host, port)
for {
done <- Marathon.Debug.isLeader
} yield done
}
})
test("Can call debug leaderUrl on running Marathon") (ignoreIfHostNotUp { (host, port) =>
val result =
blockAndValidateSuccess {
implicit val cn = Connection(host, port)
for {
done <- Marathon.Debug.leaderUrl
} yield done
}
result should not be ("")
})
test("Can call endpoints on running Marathon") (ignoreIfHostNotUp { (host, port) =>
val result =
blockAndValidateSuccess {
implicit val cn = Connection(host, port)
for {
done <- Marathon.Endpoints.query
} yield {
done.isSuccess should be (true)
done
}
}
for(endpoint <- result) {
endpoint should not be (null)
endpoint.id should not be ("")
//println(endpoint)
}
})
test("Can call endpoints for app on running Marathon") (ignoreIfHostNotUp { (host, port) =>
val result =
blockAndValidateSuccess {
implicit val cn = Connection(host, port)
for {
_ <- Marathon.Apps.start(App("scalatest-endpoints-queryForApp", "echo scalatest endpoints queryForApp"))
query <- Marathon.Endpoints.queryForApp("scalatest-endpoints-queryForApp")
cleanup <- Marathon.Apps.destroy("scalatest-endpoints-queryForApp")
} yield List(query, cleanup)
}
result.forall(_ ne null) should be (true)
})
test("Can call tasks on running Marathon") (ignoreIfHostNotUp { (host, port) =>
val result =
blockAndValidateSuccess {
implicit val cn = Connection(host, port)
for {
done <- Marathon.Tasks.query
} yield {
done.isSuccess should be (true)
done
}
}
for((id, tasks) <- result) {
id should not be (null)
id should not be ("")
tasks should not be (null)
tasks.foreach { task =>
task.host should not be ("")
}
//println(s"$id: $tasks")
}
})
test("Can kill tasks on running Marathon") (ignoreIfHostNotUp { (host, port) =>
val result =
blockAndValidateSuccess {
implicit val cn = Connection(host, port)
for {
done <- Marathon.Tasks.kill("scalatest-tasks-kill", host)
} yield {
done.isSuccess should be (true)
done
}
}
for(task <- result) {
task.id should not be (null)
task.id should not be ("")
//println(s"$task")
}
})
test("Testing Json serialization idempotency") {
import api.v1._
val ORIGINAL = Seq(App("100", "run", Map(), 1, 4, 200, "", List(), List()))
var appsPayload = ORIGINAL
for(_ <- 0 until 20) {
val json = Json.toJson(appsPayload)
val pp: String = Json.prettyPrint(json)
appsPayload = Json.parse(pp).as[Seq[App]]
appsPayload should equal (ORIGINAL)
}
}
// test("Ports") (ignoreIfHostNotUp { (host, port) =>
// val result =
// blockAndValidateSuccess {
// implicit val cn = Connection(host, port)
// for {
// _ <- Marathon.Apps.destroy("ports-test-1")
// done <- Marathon.Apps.start(App(
// id = "ports-test-1"
// , cmd = "while sleep 1; do echo \\"MY PORTS: $PORTS\\" >> /tmp/ports.log; done"
// , instances = 1
// , cpus = 0.1
// , mem = 32
// , ports = Seq.fill(10)(0) //Asks for 10 ports to use
// ))
// } yield done
// }
// result.success should be (true)
// })
// test("Docker") (ignoreIfHostNotUp { (host, port) =>
// val result =
// blockAndValidateSuccess {
// implicit val cn = Connection(host, port)
// for {
// _ <- Marathon.Apps.destroy("mesos-docker-gitmo-api")
// done <- Marathon.Apps.start(App(
// id = "mesos-docker-gitmo-api"
// , cmd = "http://get.backyard.guidewire.com/backyard/requests.json"
// , instances = 1
// , cpus = 1.0
// , mem = 4096
// , container = Some(Container("docker:///docker-registry.backyard.guidewire.com/gitmo"))
// , executor = "/var/lib/mesos/executors/mesos-docker-gitmo"
// , ports = Seq(0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
// ))
// } yield done
// }
// result.success should be (true)
// })
for(i <- 1 to 3)
test(f"Can deserialize simple app lists (/marathon-apps-query-apps-${i}%03d.json)")(validateResourceParse(f"/marathon-apps-query-apps-${i}%03d.json")(api.v1.parseQueryAppsResponse))
for(i <- 1 to 1)
test(f"Can properly handle errors when POSTing to /app/start (/marathon-apps-multiple-error-response-post-start-${i}%03d.json)")(validateErrorResponseResourceParse(f"/marathon-apps-multiple-error-response-post-start-${i}%03d.json")(api.v1.parsePostAppStartResponse))
}
|
Guidewire/marathon-client
|
src/test/scala/com/guidewire/tools/marathon/client/BasicFunctionalitySuite.scala
|
Scala
|
apache-2.0
| 8,798 |
/*
* Copyright 2017-2020 Aleksey Fomkin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package korolev.util
import korolev.Context.ElementId
import korolev.util.JsCode.{Element, Part}
import scala.annotation.tailrec
sealed trait JsCode {
def ::(s: String): Part = Part(s, this)
def ::(s: ElementId): Element = Element(s, this)
def mkString(elementToId: ElementId => levsha.Id): String = {
@tailrec
def aux(acc: String, jsCode: JsCode): String = jsCode match {
case JsCode.End => acc
case JsCode.Part(x, xs) => aux(acc + x, xs)
case JsCode.Element(x, xs) =>
val id = elementToId(x.asInstanceOf[ElementId])
aux(acc + s"Korolev.element('$id')", xs)
}
aux("", this)
}
}
object JsCode {
case class Part(value: String, tail: JsCode) extends JsCode
case class Element(elementId: ElementId, tail: JsCode) extends JsCode
case object End extends JsCode
def apply(s: String): JsCode = s :: End
def apply(parts: List[String], inclusions: List[Any]): JsCode = {
@tailrec
def combine(acc: JsCode, ps: List[String], is: List[Any]): JsCode = ps match {
case Nil => acc
case px :: pxs =>
is match {
case (ix: ElementId) :: ixs => combine(ix :: px :: acc, pxs, ixs)
case (ix: String) :: ixs => combine(ix :: px :: acc, pxs, ixs)
case ix :: ixs => combine(ix.toString :: px :: acc, pxs, ixs)
case Nil => combine(px :: acc, pxs, Nil)
}
}
@tailrec
def reverse(acc: JsCode, jsCode: JsCode): JsCode = jsCode match {
case Part(x, xs) => reverse(x :: acc, xs)
case Element(x, xs) => reverse(x.asInstanceOf[ElementId] :: acc, xs)
case End => acc
}
reverse(End, combine(End, parts, inclusions))
}
}
|
fomkin/korolev
|
modules/korolev/src/main/scala/korolev/util/JsCode.scala
|
Scala
|
apache-2.0
| 2,281 |
package domain.astimport
/**
* GraPHPizer source code analytics engine
* Copyright (C) 2015 Martin Helmich <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import domain.astimport.DocComment.{VarTag, ReturnTag, ParamTag, Tag}
import scala.util.control.Breaks._
class DocCommentParser {
protected val typePattern = """([a-zA-Z0-9_\\\\]+(?:\\[\\]|<[a-zA-Z0-9_\\\\]+>)?)"""
protected val genericCommentPattern = "@([a-zA-Z0-9]+)\\\\s+(.*)".r
protected val paramCommentPattern = ("""@param\\s+""" + typePattern + """\\s+\\$?([a-zA-Z0-9_]+)\\s*(.*)""").r
protected val returnCommentPattern = ("""@return\\s+""" + typePattern + """\\s*(.*)""").r
protected val varCommentPattern = ("""@var\\s+""" + typePattern + """\\s*(.*)""").r
def parse(contents: String): DocComment = {
val processed = cleanup(contents)
val tags = genericCommentPattern findAllMatchIn processed filter { _ group 1 match {
case "return"|"param" => false
case _ => true
}} map { m =>
Tag(m group 1, m group 2)
}
val params = paramCommentPattern findAllMatchIn processed map { m =>
ParamTag(m group 2, m group 1, m group 3)
} map { t => (t.variable, t) } toMap
val resultTags = returnCommentPattern findAllMatchIn processed map { m =>
ReturnTag(m group 1, m group 2)
} toSeq
val returnTag = if (resultTags.nonEmpty) Option(resultTags.head) else None
val varTags = varCommentPattern findAllMatchIn processed map { m =>
VarTag(m group 1, m group 2)
} toSeq
val varTag = if (varTags.nonEmpty) Option(varTags.head) else None
new DocComment(tags = tags.toSeq, paramTags = params, returnTag = returnTag, varTag = varTag)
}
protected def cleanup(contents: String): String = {
val processLine = (l: String) => {
var processed = l
processed = "^\\\\s*/\\\\*\\\\*\\\\s*".r.replaceAllIn(processed, "")
processed = "\\\\s*\\\\*/\\\\s*$".r.replaceAllIn(processed, "")
processed = "^\\\\s*\\\\*\\\\s*".r.replaceAllIn(processed, "")
processed
}
val processed = contents split "\\n" map { processLine } mkString "\\n"
"""^\\s*""".r.replaceFirstIn("""\\s*$""".r.replaceFirstIn(processed, ""), "")
}
}
|
martin-helmich/graphpizer-server
|
app/domain/astimport/DocCommentParser.scala
|
Scala
|
gpl-3.0
| 2,809 |
package com.twitter.scalding.quotation
class QuotedMacroTest extends Test {
val test = new TestClass
val nullary = test.nullary
val parametrizedNullary = test.parametrizedNullary[Int]
val withParam = test.withParam[Person, String](_.name)._1
val quotedFunction =
Quoted.function[Person, Contact](_.contact)
val nestedQuotedFuction =
Quoted.function[Person, Contact](p => quotedFunction(p))
val person = Person("John", Contact("33223"), None)
class TestClass {
def nullary(implicit q: Quoted) = q
def parametrizedNullary[T](implicit q: Quoted) = q
def withParam[T, U](f: T => U)(implicit q: Quoted) = (q, f)
}
"quoted method" - {
"nullary" in {
nullary.position.toString mustEqual "QuotedMacroTest.scala:7"
nullary.projections.set mustEqual Set.empty
nullary.text mustEqual Some("nullary")
}
"parametrizedNullary" in {
parametrizedNullary.position.toString mustEqual "QuotedMacroTest.scala:8"
parametrizedNullary.projections.set mustEqual Set.empty
parametrizedNullary.text mustEqual Some("parametrizedNullary[Int]")
}
"withParam" in {
withParam.position.toString mustEqual "QuotedMacroTest.scala:9"
withParam.projections.set mustEqual Set(Person.nameProjection)
withParam.text mustEqual Some("withParam[Person, String](_.name)")
}
}
"quoted function" - {
"simple" in {
val q = quotedFunction.quoted
q.position.toString mustEqual "QuotedMacroTest.scala:12"
q.projections.set mustEqual Set(Person.contactProjection)
q.text mustEqual Some("[Person, Contact](_.contact)")
quotedFunction(person) mustEqual person.contact
}
"nested" in {
val q = nestedQuotedFuction.quoted
q.position.toString mustEqual "QuotedMacroTest.scala:15"
q.projections.set mustEqual Set(Person.contactProjection)
q.text mustEqual Some("[Person, Contact](p => quotedFunction(p))")
nestedQuotedFuction(person) mustEqual person.contact
}
}
"invalid quoted method call" in {
"Quoted.method" mustNot compile
}
}
|
twitter/scalding
|
scalding-quotation/src/test/scala/com/twitter/scalding/quotation/QuotedMacroTest.scala
|
Scala
|
apache-2.0
| 2,099 |
/* vim: set ts=2 et sw=2 sts=2 fileencoding=utf-8: */
package exceler.excel
import scala.collection.JavaConverters._
import scala.language.implicitConversions
import scala.util.control.Exception._
import org.apache.poi.ss.usermodel._
import org.apache.poi.xssf.usermodel._
import org.apache.poi.hssf.usermodel._
import org.apache.xmlbeans.XmlObject
import org.openxmlformats.schemas.drawingml.x2006.spreadsheetDrawing.{
CTShape
}
import org.openxmlformats.schemas.drawingml.x2006.main.{
CTSolidColorFillProperties
}
package excellib.Shape {
trait ImplicitConversions {
implicit class ToExcelShapeSheet(val sheet: Sheet)
extends ExcelShapeSheetExtra
implicit class ToExcelShapeXSSFShapeExtra(val shape:XSSFShape)
extends ExcelShapeXSSFShapeExtra
implicit class ToXSSFSimpleShapeExtra(
val shape: XSSFSimpleShape) extends ExcelSimpleShapeExtra
}
object ImplicitConversions extends ImplicitConversions
}
import excellib.Shape.ImplicitConversions._
class ExcelSimpleShape(
drawing:XSSFDrawing,
ctShape:CTShape
) extends XSSFSimpleShape(drawing, ctShape)
trait Helper {
def byte3ToRGB(b3:Array[Byte]):List[Int] = {
assert(b3.length == 3)
b3.toList.map((b:Byte)=>(b.toInt + 256) % 256)
}
def solidColorToRGB(scfp:CTSolidColorFillProperties) = {
if (scfp.isSetSrgbClr)
byte3ToRGB(scfp.getSrgbClr.getVal)
else if (scfp.isSetHslClr) {
println("Hsl")
List(0, 0, 0) // ToDo
}
else if (scfp.isSetPrstClr) {
println("Prst")
List(0, 0, 0) // ToDo
}
else if (scfp.isSetSchemeClr) {
println("Scheme")
List(0, 0, 0) // ToDo
}
else if (scfp.isSetSysClr)
byte3ToRGB(scfp.getSysClr.getLastClr)
else if (scfp.isSetScrgbClr) {
println("Scrgb")
List(0, 0, 0) // ToDo
}
else {
println("???")
List(0, 0, 0) // ToDo
}
}
}
trait ExcelSimpleShapeExtra extends Helper {
val shape:XSSFSimpleShape
def copyFrom(from:XSSFSimpleShape): Unit = {
shape.setBottomInset(from.getBottomInset)
shape.setLeftInset(from.getLeftInset)
shape.setRightInset(from.getRightInset)
shape.setShapeType(from.getShapeType)
shape.setText(from.getText)
shape.setTextAutofit(from.getTextAutofit)
shape.setTextDirection(from.getTextDirection)
shape.setTextHorizontalOverflow(from.getTextHorizontalOverflow)
shape.setTextVerticalOverflow(from.getTextVerticalOverflow)
shape.setTopInset(from.getTopInset)
shape.setVerticalAlignment(from.getVerticalAlignment)
shape.setWordWrap(from.getWordWrap)
shape.setLineWidth(from.getLineWidth)
val lc = from.getLineStyleColor
shape.setLineStyleColor(lc(0), lc(1), lc(2))
val fc = from.getFillColor
shape.setFillColor(fc(0), fc(1), fc(2))
}
// 12700 = 1 pt
def getLineWidth() = {
shape.getCTShape.getSpPr.getLn.getW.toDouble / 12700
}
def getLineStyleColor() = {
solidColorToRGB(shape.getCTShape.getSpPr.getLn.getSolidFill)
}
def getFillColor() = {
solidColorToRGB(shape.getCTShape.getSpPr.getSolidFill)
}
}
trait ExcelShapeSheetExtra {
val sheet:Sheet
def getDrawingPatriarchOption():Option[XSSFDrawing] =
Option(sheet.asInstanceOf[XSSFSheet].getDrawingPatriarch)
def drawingPatriarch():XSSFDrawing = {
this.getDrawingPatriarchOption match {
case Some(d) => d
case None => sheet.asInstanceOf[XSSFSheet].createDrawingPatriarch()
}
}
def getXSSFShapes():List[XSSFShape] = {
this.getDrawingPatriarchOption match {
case Some(drawing) => drawing.asInstanceOf[
XSSFDrawing].getShapes.asScala.toList
case None => List()
}
}
}
trait ExcelShapeXSSFShapeExtra {
val shape:XSSFShape
def toXmlObject():XmlObject = {
shape match {
case x:XSSFSimpleShape =>
x.asInstanceOf[XSSFSimpleShape].getCTShape
case x:XSSFConnector =>
x.asInstanceOf[XSSFConnector].getCTConnector
case x:XSSFGraphicFrame =>
x.asInstanceOf[XSSFGraphicFrame]
.getCTGraphicalObjectFrame
case x:XSSFPicture =>
x.asInstanceOf[XSSFPicture].getCTPicture
case x:XSSFShapeGroup =>
x.asInstanceOf[XSSFShapeGroup].getCTGroupShape
}
}
}
|
wak109/exceler
|
jvm/src/main/scala/ExcelShape.scala
|
Scala
|
mit
| 4,245 |
/*
Copyright 2014 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import com.twitter.algebird.monad.Reader
import com.twitter.scalding.serialization.macros.impl.ordered_serialization.runtime_helpers.MacroEqualityOrderedSerialization
import com.twitter.scalding.serialization.OrderedSerialization
import java.nio.file.FileSystems
import java.nio.file.Path
import org.scalatest.{ Matchers, WordSpec }
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{ Await, Future, ExecutionContext => ConcurrentExecutionContext, Promise }
import scala.util.Random
import scala.util.{ Try, Success, Failure }
import ExecutionContext._
object ExecutionTestJobs {
def wordCount(in: String, out: String) =
TypedPipe.from(TextLine(in))
.flatMap(_.split("\\\\s+"))
.map((_, 1L))
.sumByKey
.writeExecution(TypedTsv(out))
def wordCount2(in: TypedPipe[String]) =
in
.flatMap(_.split("\\\\s+"))
.map((_, 1L))
.sumByKey
.toIterableExecution
def zipped(in1: TypedPipe[Int], in2: TypedPipe[Int]) =
in1.groupAll.sum.values.toIterableExecution
.zip(in2.groupAll.sum.values.toIterableExecution)
def mergeFanout(in: List[Int]): Execution[Iterable[(Int, Int)]] = {
// Force a reduce, so no fancy optimizations kick in
val source = TypedPipe.from(in).groupBy(_ % 3).head
(source.mapValues(_ * 2) ++ (source.mapValues(_ * 3))).toIterableExecution
}
}
class WordCountEc(args: Args) extends ExecutionJob[Unit](args) {
def execution = ExecutionTestJobs.wordCount(args("input"), args("output"))
// In tests, classloader issues with sbt mean we should not
// really use threads, so we run immediately
override def concurrentExecutionContext = new scala.concurrent.ExecutionContext {
def execute(r: Runnable) = r.run
def reportFailure(t: Throwable) = ()
}
}
case class MyCustomType(s: String)
class ExecutionTest extends WordSpec with Matchers {
implicit class ExecutionTestHelper[T](ex: Execution[T]) {
def shouldSucceed(): T = {
val r = ex.waitFor(Config.default, Local(true))
r match {
case Success(s) => s
case Failure(e) => fail(s"Failed running execution, exception:\\n$e")
}
}
def shouldFail(): Unit = {
val r = ex.waitFor(Config.default, Local(true))
assert(r.isFailure)
}
}
"An Execution" should {
"run" in {
ExecutionTestJobs.wordCount2(TypedPipe.from(List("a b b c c c", "d d d d")))
.waitFor(Config.default, Local(false)).get.toMap shouldBe Map("a" -> 1L, "b" -> 2L, "c" -> 3L, "d" -> 4L)
}
"run with zip" in {
(ExecutionTestJobs.zipped(TypedPipe.from(0 until 100), TypedPipe.from(100 until 200))
.shouldSucceed() match {
case (it1, it2) => (it1.head, it2.head)
}) shouldBe ((0 until 100).sum, (100 until 200).sum)
}
"lift to try" in {
val res = ExecutionTestJobs
.wordCount2(TypedPipe.from(List("a", "b")))
.liftToTry
.shouldSucceed()
assert(res.isSuccess)
}
"lift to try on exception" in {
val res = ExecutionTestJobs
.wordCount2(TypedPipe.from(List("a", "b")))
.map(_ => throw new RuntimeException("Something went wrong"))
.liftToTry
.shouldSucceed()
assert(res.isFailure)
}
"merge fanouts without error" in {
def unorderedEq[T](l: Iterable[T], r: Iterable[T]): Boolean =
(l.size == r.size) && (l.toSet == r.toSet)
def correct(l: List[Int]): List[(Int, Int)] = {
val in = l.groupBy(_ % 3).mapValues(_.head)
in.mapValues(_ * 2).toList ++ in.mapValues(_ * 3)
}
val input = (0 to 100).toList
val result = ExecutionTestJobs.mergeFanout(input).waitFor(Config.default, Local(false)).get
val cres = correct(input)
unorderedEq(cres, result.toList) shouldBe true
}
"If either fails, zip fails, else we get success" in {
val neverHappens = Promise[Int]().future
Execution.fromFuture { _ => neverHappens }
.zip(Execution.failed(new Exception("oh no")))
.shouldFail()
Execution.failed(new Exception("oh no"))
.zip(Execution.fromFuture { _ => neverHappens })
.shouldFail()
// If both are good, we succeed:
Execution.from(1)
.zip(Execution.from("1"))
.shouldSucceed() shouldBe (1, "1")
}
"Config transformer will isolate Configs" in {
def doesNotHaveVariable(message: String) = Execution.getConfig.flatMap { cfg =>
if (cfg.get("test.cfg.variable").isDefined)
Execution.failed(new Exception(s"${message}\\n: var: ${cfg.get("test.cfg.variable")}"))
else
Execution.from(())
}
val hasVariable = Execution.getConfig.flatMap { cfg =>
if (cfg.get("test.cfg.variable").isEmpty)
Execution.failed(new Exception("Should see variable inside of transform"))
else
Execution.from(())
}
def addOption(cfg: Config) = cfg.+ ("test.cfg.variable", "dummyValue")
doesNotHaveVariable("Should not see variable before we've started transforming")
.flatMap{ _ => Execution.withConfig(hasVariable)(addOption) }
.flatMap(_ => doesNotHaveVariable("Should not see variable in flatMap's after the isolation"))
.map(_ => true)
.shouldSucceed() shouldBe true
}
"Config transformer will interact correctly with the cache" in {
var incrementIfDefined = 0
var totalEvals = 0
val incrementor = Execution.getConfig.flatMap { cfg =>
totalEvals += 1
if (cfg.get("test.cfg.variable").isDefined)
incrementIfDefined += 1
Execution.from(())
}
def addOption(cfg: Config) = cfg.+ ("test.cfg.variable", "dummyValue")
// Here we run without the option, with the option, and finally without again.
incrementor
.flatMap{ _ => Execution.withConfig(incrementor)(addOption) }
.flatMap(_ => incrementor)
.map(_ => true)
.shouldSucceed() shouldBe true
assert(incrementIfDefined === 1)
// We should evaluate once for the default config, and once for the modified config.
assert(totalEvals === 2)
}
"Config transformer will interact correctly with the cache when writing" in {
import java.io._
val srcF = File.createTempFile("tmpoutputLocation", ".tmp").getAbsolutePath
val sinkF = File.createTempFile("tmpoutputLocation2", ".tmp").getAbsolutePath
def writeNums(nums: List[Int]): Unit = {
val pw = new PrintWriter(new File(srcF))
pw.write(nums.mkString("\\n"))
pw.close
}
writeNums(List(1, 2, 3))
val sink = TypedTsv[Int](sinkF)
val src = TypedTsv[Int](srcF)
val operationTP = (TypedPipe.from(src) ++ TypedPipe.from((1 until 100).toList)).writeExecution(sink).getCounters.map(_._2.toMap)
def addOption(cfg: Config) = cfg.+ ("test.cfg.variable", "dummyValue")
// Here we run without the option, with the option, and finally without again.
val (oldCounters, newCounters) = operationTP
.flatMap{ oc =>
writeNums(List(1, 2, 3, 4, 5, 6, 7))
Execution.withConfig(operationTP)(addOption).map { nc => (oc, nc) }
}
.shouldSucceed()
assert(oldCounters != newCounters, "With new configs given the source changed we shouldn't cache so the counters should be different")
}
}
"ExecutionApp" should {
val parser = new ExecutionApp { def job = Execution.from(()) }
"parse hadoop args correctly" in {
val conf = parser.config(Array("-Dmapred.reduce.tasks=100", "--local"))._1
conf.get("mapred.reduce.tasks") should contain ("100")
conf.getArgs.boolean("local") shouldBe true
val (conf1, Hdfs(_, hconf)) = parser.config(Array("--test", "-Dmapred.reduce.tasks=110", "--hdfs"))
conf1.get("mapred.reduce.tasks") should contain ("110")
conf1.getArgs.boolean("test") shouldBe true
hconf.get("mapred.reduce.tasks") shouldBe "110"
}
}
"An ExecutionJob" should {
"run correctly" in {
JobTest(new WordCountEc(_))
.arg("input", "in")
.arg("output", "out")
.source(TextLine("in"), List((0, "hello world"), (1, "goodbye world")))
.typedSink(TypedTsv[(String, Long)]("out")) { outBuf =>
outBuf.toMap shouldBe Map("hello" -> 1L, "world" -> 2L, "goodbye" -> 1L)
}
.run
.runHadoop
.finish
}
}
"Executions" should {
"evaluate once per run" in {
var first = 0
var second = 0
var third = 0
val e1 = Execution.from({ first += 1; 42 })
val e2 = e1.flatMap { x =>
second += 1
Execution.from(2 * x)
}
val e3 = e1.map { x => third += 1; x * 3 }
/**
* Notice both e3 and e2 need to evaluate e1.
*/
val res = e3.zip(e2)
res.shouldSucceed()
assert((first, second, third) == (1, 1, 1))
}
"Running a large loop won't exhaust boxed instances" in {
var timesEvaluated = 0
import com.twitter.scalding.serialization.macros.impl.BinaryOrdering._
// Attempt to use up 4 boxed classes for every execution
def baseExecution(idx: Int): Execution[Unit] = TypedPipe.from(0 until 1000).map(_.toShort).flatMap { i =>
timesEvaluated += 1
List((i, i), (i, i))
}.sumByKey.map {
case (k, v) =>
(k.toInt, v)
}.sumByKey.map {
case (k, v) =>
(k.toLong, v)
}.sumByKey.map {
case (k, v) =>
(k.toString, v)
}.sumByKey.map {
case (k, v) =>
(MyCustomType(k), v)
}.sumByKey.writeExecution(TypedTsv(s"/tmp/asdf_${idx}"))
implicitly[OrderedSerialization[MyCustomType]] match {
case mos: MacroEqualityOrderedSerialization[_] => assert(mos.uniqueId == "com.twitter.scalding.MyCustomType")
case _ => sys.error("Ordered serialization should have been the MacroEqualityOrderedSerialization for this test")
}
def executionLoop(idx: Int): Execution[Unit] = {
if (idx > 0)
baseExecution(idx).flatMap(_ => executionLoop(idx - 1))
else
Execution.unit
}
executionLoop(55).shouldSucceed()
assert(timesEvaluated == 55 * 1000, "Should run the 55 execution loops for 1000 elements")
}
"evaluate shared portions just once, writeExecution" in {
var timesEvaluated = 0
val baseTp = TypedPipe.from(0 until 1000).flatMap { i =>
timesEvaluated += 1
List(i, i)
}.fork
val fde1 = baseTp.map{ _ * 3 }.writeExecution(TypedTsv("/tmp/asdf"))
val fde2 = baseTp.map{ _ * 5 }.writeExecution(TypedTsv("/tmp/asdf2"))
val res = fde1.zip(fde2)
res.shouldSucceed()
assert(timesEvaluated == 1000, "Should share the common sub section of the graph when we zip two write Executions")
}
"evaluate shared portions just once, forceToDiskExecution" in {
var timesEvaluated = 0
val baseTp = TypedPipe.from(0 until 1000).flatMap { i =>
timesEvaluated += 1
List(i, i)
}.fork
val fde1 = baseTp.map{ _ * 3 }.forceToDiskExecution
val fde2 = baseTp.map{ _ * 5 }.forceToDiskExecution
val res = fde1.zip(fde2)
res.shouldSucceed()
assert(timesEvaluated == 1000, "Should share the common sub section of the graph when we zip two write Executions")
}
"evaluate shared portions just once, forceToDiskExecution with execution cache" in {
var timesEvaluated = 0
val baseTp = TypedPipe.from(0 until 1000).flatMap { i =>
timesEvaluated += 1
List(i, i)
}.fork
val fde1 = baseTp.map{ _ * 3 }.forceToDiskExecution
val fde2 = baseTp.map{ _ * 5 }.forceToDiskExecution
val res = fde1.zip(fde2).flatMap{ _ => fde1 }.flatMap(_.toIterableExecution)
res.shouldSucceed()
assert(timesEvaluated == 1000, "Should share the common sub section of the graph when we zip two write Executions and then flatmap")
}
"Ability to do isolated caches so we don't exhaust memory" in {
def memoryWastingExecutionGenerator(id: Int): Execution[Array[Long]] = Execution.withNewCache(Execution.from(id).flatMap{ idx =>
Execution.from(Array.fill(4000000)(idx.toLong))
})
def writeAll(numExecutions: Int): Execution[Unit] = {
if (numExecutions > 0) {
memoryWastingExecutionGenerator(numExecutions).flatMap { _ =>
writeAll(numExecutions - 1)
}
} else {
Execution.from(())
}
}
writeAll(400).shouldSucceed()
}
"handle failure" in {
val result = Execution.withParallelism(Seq(Execution.failed(new Exception("failed"))), 1)
result.shouldFail()
}
"handle an error running in parallel" in {
val executions = Execution.failed(new Exception("failed")) :: 0.to(10).map(i => Execution.from[Int](i)).toList
val result = Execution.withParallelism(executions, 3)
result.shouldFail()
}
"run in parallel" in {
val executions = 0.to(10).map(i => Execution.from[Int](i)).toList
val result = Execution.withParallelism(executions, 3)
assert(result.shouldSucceed() == 0.to(10).toSeq)
}
"block correctly" in {
var seen = 0
def updateSeen(idx: Int) {
assert(seen === idx)
seen += 1
}
val executions = 0.to(10).map{ i =>
Execution
.from[Int](i)
.map{ i => Thread.sleep(10 - i); i }
.onComplete(t => updateSeen(t.get))
}.toList.reverse
val result = Execution.withParallelism(executions, 1)
assert(result.shouldSucceed() == 0.to(10).reverse)
}
"can hashCode, compare, and run a long sequence" in {
val execution = Execution.sequence((1 to 100000).toList.map(Execution.from(_)))
assert(execution.hashCode == execution.hashCode)
assert(execution == execution)
assert(execution.shouldSucceed() == (1 to 100000).toList)
}
"caches a withId Execution computation" in {
var called = false
val execution = Execution.withId { id =>
assert(!called)
called = true
Execution.from("foobar")
}
val doubleExecution = execution.zip(execution)
assert(doubleExecution.shouldSucceed() == ("foobar", "foobar"))
assert(called)
}
"maintains equality and hashCode after reconstruction" when {
// Make two copies of these. Comparison by reference
// won't match between the two.
val futureF = { _: ConcurrentExecutionContext => Future.successful(10) }
val futureF2 = { _: ConcurrentExecutionContext => Future.successful(10) }
val fnF = { (_: Config, _: Mode) => null }
val fnF2 = { (_: Config, _: Mode) => null }
val withIdF = { _: UniqueID => Execution.unit }
val withIdF2 = { _: UniqueID => Execution.unit }
val mapF = { _: Int => 12 }
val mapF2 = { _: Int => 12 }
def reconstructibleLaws[T](ex: => Execution[T], ex2: Execution[T]): Unit = {
assert(ex == ex)
assert(ex.hashCode == ex.hashCode)
assert(ex != ex2)
}
"Execution.fromFuture" in {
reconstructibleLaws(Execution.fromFuture(futureF), Execution.fromFuture(futureF2))
}
"Execution.fromFn" in {
reconstructibleLaws(Execution.fromFn(fnF), Execution.fromFn(fnF2))
}
"Execution.withId" in {
reconstructibleLaws(Execution.withId(withIdF), Execution.withId(withIdF2))
}
"Execution#map" in {
reconstructibleLaws(
Execution.fromFuture(futureF).map(mapF),
Execution.fromFuture(futureF).map(mapF2))
}
"Execution.zip" in {
reconstructibleLaws(
Execution.zip(Execution.fromFuture(futureF2), Execution.withId(withIdF)),
Execution.zip(Execution.fromFuture(futureF2), Execution.withId(withIdF2)))
}
"Execution.sequence" in {
reconstructibleLaws(
Execution.sequence(Seq(
Execution.fromFuture(futureF),
Execution.withId(withIdF),
Execution.fromFuture(futureF2).map(mapF))),
Execution.sequence(Seq(
Execution.fromFuture(futureF),
Execution.withId(withIdF),
Execution.fromFn(fnF))))
}
}
"Has consistent hashCode and equality for mutable" when {
// These cases are a bit convoluted, but we still
// want equality to be consistent
trait MutableX[T] {
protected var x: Int
def setX(newX: Int): Unit = { x = newX }
def makeExecution: Execution[T]
}
case class FromFutureMutable(var x: Int = 0) extends Function1[ConcurrentExecutionContext, Future[Int]] with MutableX[Int] {
def apply(context: ConcurrentExecutionContext) = Future.successful(x)
def makeExecution = Execution.fromFuture(this)
}
case class FromFnMutable(var x: Int = 0) extends Function2[Config, Mode, Null] with MutableX[Unit] {
def apply(config: Config, mode: Mode) = null
def makeExecution = Execution.fromFn(this)
}
case class WithIdMutable(var x: Int = 0) extends Function1[UniqueID, Execution[Int]] with MutableX[Int] {
def apply(id: UniqueID) = Execution.fromFuture(FromFutureMutable(x))
def makeExecution = Execution.withId(this)
}
val mapFunction = { x: Int => x * x }
case class MapMutable(var x: Int = 0) extends MutableX[Int] {
val m = FromFutureMutable(x)
override def setX(newX: Int) = {
x = newX
m.setX(x)
}
def makeExecution = m.makeExecution.map(mapFunction)
}
case class ZipMutable(var x: Int = 0) extends MutableX[(Int, Int)] {
val m1 = FromFutureMutable(x)
val m2 = WithIdMutable(x)
override def setX(newX: Int) = {
x = newX
m1.setX(x)
m2.setX(x + 20)
}
def makeExecution = m1.makeExecution.zip(m2.makeExecution)
}
case class SequenceMutable(var x: Int = 0) extends MutableX[Seq[Int]] {
val m1 = FromFutureMutable(x)
val m2 = WithIdMutable(x)
override def setX(newX: Int) = {
x = newX
m1.setX(x)
m2.setX(x * 3)
}
def makeExecution = Execution.sequence(Seq(m1.makeExecution, m2.makeExecution))
}
def mutableLaws[T, U <: MutableX[T]](
mutableGen: => U,
expectedOpt: Option[Int => T] = None): Unit = {
expectedOpt.foreach { expected =>
require(expected(10) != expected(20))
}
def validate(ex: Execution[T], seed: Int): Unit = {
expectedOpt.foreach { expected =>
assert(ex.shouldSucceed() == expected(seed))
}
}
val mutable1 = mutableGen
mutable1.setX(10)
val ex1 = mutable1.makeExecution
val mutable2 = mutableGen
mutable2.setX(10)
val ex2 = mutable2.makeExecution
assert(ex1 == ex2)
assert(ex1.hashCode == ex2.hashCode)
validate(ex1, 10)
validate(ex2, 10)
mutable2.setX(20)
// We may have the same hashCode still, but we don't need to
assert(ex1 != ex2)
validate(ex2, 20)
val mutable3 = mutableGen
mutable3.setX(20)
val ex3 = mutable3.makeExecution
assert(ex1 != ex3)
validate(ex3, 20)
mutable3.setX(10)
if (ex1 == ex3) {
// If they are made equal again, the hashCodes must match
assert(ex1.hashCode == ex3.hashCode)
}
validate(ex3, 10)
}
"Execution.fromFuture" in {
mutableLaws(FromFutureMutable(), Some({ x: Int => x }))
}
"Execution.fromFn" in {
mutableLaws(FromFnMutable(), Option.empty[Int => Unit])
}
"Execution.withId" in {
mutableLaws(WithIdMutable(), Some({ x: Int => x }))
}
"Execution#map" in {
mutableLaws(MapMutable(), Some({ x: Int => x * x }))
}
"Execution#zip" in {
mutableLaws(ZipMutable(), Some({ x: Int => (x, x + 20) }))
}
"Execution.sequence" in {
mutableLaws(SequenceMutable(), Some({ x: Int => Seq(x, x * 3) }))
}
}
}
}
|
piyushnarang/scalding
|
scalding-core/src/test/scala/com/twitter/scalding/ExecutionTest.scala
|
Scala
|
apache-2.0
| 20,881 |
package com.twitter.finagle.httpx
import com.twitter.collection.RecordSchema
import com.twitter.finagle.httpx.netty.{HttpRequestProxy, Bijections}
import com.twitter.io.{Charsets, Reader}
import java.net.{InetAddress, InetSocketAddress}
import java.util.{AbstractMap, List => JList, Map => JMap, Set => JSet}
import org.jboss.netty.buffer.{ChannelBuffer, ChannelBuffers}
import org.jboss.netty.channel.Channel
import org.jboss.netty.handler.codec.embedder.{DecoderEmbedder, EncoderEmbedder}
import org.jboss.netty.handler.codec.http._
import scala.beans.BeanProperty
import scala.collection.JavaConverters._
import Bijections._
/**
* Rich HttpRequest.
*
* Use RequestProxy to create an even richer subclass.
*/
abstract class Request extends Message with HttpRequestProxy {
/**
* Arbitrary user-defined context associated with this request object.
* [[com.twitter.collection.RecordSchema.Record RecordSchema.Record]] is
* used here, rather than [[com.twitter.finagle.Context Context]] or similar
* out-of-band mechanisms, to make the connection between the request and its
* associated context explicit.
*/
def ctx: Request.Schema.Record = _ctx
private[this] val _ctx = Request.Schema.newRecord()
def isRequest = true
def params: ParamMap = _params
private[this] lazy val _params: ParamMap = new RequestParamMap(this)
def method: Method = from(getMethod)
def method_=(method: Method) = setMethod(from(method))
def uri: String = getUri()
def uri_=(uri: String) { setUri(uri) }
/** Path from URI. */
@BeanProperty
def path: String = {
val u = getUri
u.indexOf('?') match {
case -1 => u
case n => u.substring(0, n)
}
}
/** File extension. Empty string if none. */
@BeanProperty
def fileExtension: String = {
val p = path
val leaf = p.lastIndexOf('/') match {
case -1 => p
case n => p.substring(n + 1)
}
leaf.lastIndexOf('.') match {
case -1 => ""
case n => leaf.substring(n + 1).toLowerCase
}
}
/** Remote InetSocketAddress */
@BeanProperty
def remoteSocketAddress: InetSocketAddress
/** Remote host - a dotted quad */
@BeanProperty
def remoteHost: String =
remoteAddress.getHostAddress
/** Remote InetAddress */
@BeanProperty
def remoteAddress: InetAddress =
remoteSocketAddress.getAddress
/** Remote port */
@BeanProperty
def remotePort: Int =
remoteSocketAddress.getPort
// The get*Param methods below are for Java compatibility. Note Scala default
// arguments aren't compatible with Java, so we need two versions of each.
/** Get parameter value. Returns value or null. */
def getParam(name: String): String =
params.get(name).orNull
/** Get parameter value. Returns value or default. */
def getParam(name: String, default: String): String =
params.get(name).getOrElse(default)
/** Get Short param. Returns value or 0. */
def getShortParam(name: String): Short =
params.getShortOrElse(name, 0)
/** Get Short param. Returns value or default. */
def getShortParam(name: String, default: Short): Short =
params.getShortOrElse(name, default)
/** Get Int param. Returns value or 0. */
def getIntParam(name: String): Int =
params.getIntOrElse(name, 0)
/** Get Int param. Returns value or default. */
def getIntParam(name: String, default: Int): Int =
params.getIntOrElse(name, default)
/** Get Long param. Returns value or 0. */
def getLongParam(name: String): Long =
params.getLongOrElse(name, 0L)
/** Get Long param. Returns value or default. */
def getLongParam(name: String, default: Long=0L): Long =
params.getLongOrElse(name, default)
/** Get Boolean param. Returns value or false. */
def getBooleanParam(name: String): Boolean =
params.getBooleanOrElse(name, false)
/** Get Boolean param. Returns value or default. */
def getBooleanParam(name: String, default: Boolean): Boolean =
params.getBooleanOrElse(name, default)
/** Get all values of parameter. Returns list of values. */
def getParams(name: String): JList[String] =
params.getAll(name).toList.asJava
/** Get all parameters. */
def getParams(): JList[JMap.Entry[String, String]] =
(params.toList.map { case (k, v) =>
// cast to appease asJava
(new AbstractMap.SimpleImmutableEntry(k, v)).asInstanceOf[JMap.Entry[String, String]]
}).asJava
/** Check if parameter exists. */
def containsParam(name: String): Boolean =
params.contains(name)
/** Get parameters names. */
def getParamNames(): JSet[String] =
params.keySet.asJava
/** Response associated with request */
lazy val response: Response = Response(this)
/** Get response associated with request. */
def getResponse(): Response = response
/** Encode an HTTP message to String */
def encodeString(): String = {
new String(encodeBytes(), "UTF-8")
}
/** Encode an HTTP message to Array[Byte] */
def encodeBytes(): Array[Byte] = {
val encoder = new EncoderEmbedder[ChannelBuffer](new HttpRequestEncoder)
encoder.offer(from[Request, HttpRequest](this))
val buffer = encoder.poll()
val bytes = new Array[Byte](buffer.readableBytes())
buffer.readBytes(bytes)
bytes
}
override def toString =
"Request(\\"" + method + " " + uri + "\\", from " + remoteSocketAddress + ")"
}
object Request {
/**
* [[com.twitter.collection.RecordSchema RecordSchema]] declaration, used
* to generate [[com.twitter.collection.RecordSchema.Record Record]] instances
* for Request.ctx.
*/
val Schema: RecordSchema = new RecordSchema
/** Decode a Request from a String */
def decodeString(s: String): Request = {
decodeBytes(s.getBytes(Charsets.Utf8))
}
/** Decode a Request from Array[Byte] */
def decodeBytes(b: Array[Byte]): Request = {
val decoder = new DecoderEmbedder(
new HttpRequestDecoder(Int.MaxValue, Int.MaxValue, Int.MaxValue))
decoder.offer(ChannelBuffers.wrappedBuffer(b))
val req = decoder.poll().asInstanceOf[HttpRequest]
assert(req ne null)
new Request {
val httpRequest = req
lazy val remoteSocketAddress = new InetSocketAddress(0)
}
}
/**
* Create an HTTP/1.1 GET Request from query string parameters.
*
* @params params a list of key-value pairs representing the query string.
*/
def apply(params: Tuple2[String, String]*): Request =
apply("/", params:_*)
/**
* Create an HTTP/1.1 GET Request from URI and query string parameters.
*
* @params params a list of key-value pairs representing the query string.
*/
def apply(uri: String, params: Tuple2[String, String]*): Request = {
val encoder = new QueryStringEncoder(uri)
params.foreach { case (key, value) =>
encoder.addParam(key, value)
}
apply(Method.Get, encoder.toString)
}
/**
* Create an HTTP/1.1 GET Request from URI string.
* */
def apply(uri: String): Request =
apply(Method.Get, uri)
/**
* Create an HTTP/1.1 GET Request from method and URI string.
*/
def apply(method: Method, uri: String): Request =
apply(Version.Http11, method, uri)
/**
* Create an HTTP/1.1 GET Request from version, method, and URI string.
*/
def apply(version: Version, method: Method, uri: String): Request = {
val reqIn = new DefaultHttpRequest(from(version), from(method), uri)
new Request {
val httpRequest = reqIn
lazy val remoteSocketAddress = new InetSocketAddress(0)
}
}
/**
* Create an HTTP/1.1 GET Request from Version, Method, URI, and Reader.
*
* A [[com.twitter.io.Reader]] is a stream of bytes serialized to HTTP chunks.
* `Reader`s are useful for representing streaming data in the body of the
* request (e.g. a large file, or long lived computation that produces results
* incrementally).
*
* {{{
* val data = Reader.fromStream(File.open("data.txt"))
* val post = Request(Http11, Post, "/upload", data)
*
* client(post) onSuccess {
* case r if r.status == Ok => println("Success!")
* case _ => println("Something went wrong...")
* }
* }}}
*/
def apply(
version: Version,
method: Method,
uri: String,
reader: Reader
): Request = {
val httpReq = new DefaultHttpRequest(from(version), from(method), uri)
httpReq.setChunked(true)
apply(httpReq, reader, new InetSocketAddress(0))
}
private[httpx] def apply(
reqIn: HttpRequest,
readerIn: Reader,
remoteAddr: InetSocketAddress
): Request = new Request {
override val reader = readerIn
val httpRequest = reqIn
lazy val remoteSocketAddress = remoteAddr
}
/** Create Request from HttpRequest and Channel. Used by Codec. */
private[finagle] def apply(httpRequestArg: HttpRequest, channel: Channel): Request =
new Request {
val httpRequest = httpRequestArg
lazy val remoteSocketAddress = channel.getRemoteAddress.asInstanceOf[InetSocketAddress]
}
/** Create a query string from URI and parameters. */
def queryString(uri: String, params: Tuple2[String, String]*): String = {
val encoder = new QueryStringEncoder(uri)
params.foreach { case (key, value) =>
encoder.addParam(key, value)
}
encoder.toString
}
/**
* Create a query string from parameters. The results begins with "?" only if
* params is non-empty.
*/
def queryString(params: Tuple2[String, String]*): String =
queryString("", params: _*)
/** Create a query string from URI and parameters. */
def queryString(uri: String, params: Map[String, String]): String =
queryString(uri, params.toSeq: _*)
/**
* Create a query string from parameters. The results begins with "?" only if
* params is non-empty.
*/
def queryString(params: Map[String, String]): String =
queryString("", params.toSeq: _*)
}
|
travisbrown/finagle
|
finagle-httpx/src/main/scala/com/twitter/finagle/httpx/Request.scala
|
Scala
|
apache-2.0
| 9,929 |
package org.jetbrains .plugins.scala
package refactoring.rename3
/**
* Nikolay.Tropin
* 9/13/13
*/
class ScalaRenameTest extends ScalaRenameTestBase {
def testObjectAndTraitToOpChars() = doTest("+++")
def testObjectAndTrait() = doTest()
def testObjectAndClass() = doTest()
def testObjectAndClassToOpChars() = doTest("+++")
def testObjectAndClassToBackticked() = doTest("`a`")
def testValInClass() = doTest()
def testValInTrait() = doTest()
def testVarAndSetters() = doTest()
def testSettersWithoutVar() = {
try {doTest()}
catch {
case e: RuntimeException if e.getMessage endsWith "is not an identifier." =>
}
}
def testSettersWithoutVar2() = {
try {doTest("NameAfterRename_=")}
catch {
case e: RuntimeException if e.getMessage endsWith "is not an identifier." =>
}
}
def testOverridenVal() = doTest()
def testOverridenClassParameter() = doTest()
def testOverrideDef() = doTest()
def testMethodArgument() = doTest()
def testMultipleBaseMembers() = doTest()
def testTypeAlias() = doTest()
def testOverridenFromJava() = doTest()
def testMethodSameAsJavaKeyword() = doTest()
def testParamSameAsJavaKeyword() = doTest()
def testObjectImport() = doTest()
}
|
ilinum/intellij-scala
|
test/org/jetbrains/plugins/scala/refactoring/rename3/ScalaRenameTest.scala
|
Scala
|
apache-2.0
| 1,267 |
package com.arcusys.valamis.web.servlet.public.policy
import com.arcusys.learn.liferay.util.PortletName
import com.arcusys.valamis.web.portlet.base.{ModifyPermission, ViewPermission}
import com.arcusys.valamis.web.servlet.base.{PermissionSupport, PermissionUtil}
import com.arcusys.valamis.web.servlet.public.LessonServlet
/**
* Created by pkornilov on 1/30/17.
*/
trait LessonPolicy extends PermissionSupport { self: LessonServlet =>
val editMethods = Set("DELETE", "POST", "PUT")
before(request.getMethod == "GET") {
requirePortletPermission(ViewPermission, PortletName.LessonViewer)
}
before(editMethods.contains(request.getMethod)) {
requirePortletPermission(ModifyPermission, PortletName.LessonManager)
}
}
|
arcusys/Valamis
|
valamis-portlets/src/main/scala/com/arcusys/valamis/web/servlet/public/policy/LessonPolicy.scala
|
Scala
|
gpl-3.0
| 740 |
/*-------------------------------------------------------------------------*\
** ScalaCheck **
** Copyright (c) 2007-2011 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
\*-------------------------------------------------------------------------*/
package org.scalacheck
import util.{FreqMap,Buildable}
sealed abstract class Arbitrary[T] {
val arbitrary: Gen[T]
}
/** Defines implicit <code>Arbitrary</code> instances for common types.
* <p>
* ScalaCheck
* uses implicit <code>Arbitrary</code> instances when creating properties
* out of functions with the <code>Prop.property</code> method, and when
* the <code>Arbitrary.arbitrary</code> method is used. For example, the
* following code requires that there exists an implicit
* <code>Arbitrary[MyClass]</code> instance:
* </p>
*
* <p>
* <code>
* val myProp = Prop.forAll { myClass: MyClass =><br />
* ...<br />
* }<br />
*
* val myGen = Arbitrary.arbitrary[MyClass]
* </code>
* </p>
*
* <p>
* The required implicit definition could look like this:
* </p>
*
* <p>
* <code>
* implicit val arbMyClass: Arbitrary[MyClass] = Arbitrary(...)
* </code>
* </p>
*
* <p>
* The factory method <code>Arbitrary(...)</code> takes a generator of type
* <code>Gen[T]</code> and returns an instance of <code>Arbitrary[T]</code>.
* </p>
*
* <p>
* The <code>Arbitrary</code> module defines implicit <code>Arbitrary</code>
* instances for common types, for convenient use in your properties and
* generators.
* </p>
*/
object Arbitrary {
import Gen.{value, choose, sized, listOf, listOf1,
frequency, oneOf, containerOf, resize}
import util.StdRand
import scala.collection.{immutable, mutable}
import java.util.Date
/** Creates an Arbitrary instance */
def apply[T](g: => Gen[T]): Arbitrary[T] = new Arbitrary[T] {
lazy val arbitrary = g
}
/** Returns an arbitrary generator for the type T. */
def arbitrary[T](implicit a: Arbitrary[T]): Gen[T] = a.arbitrary
/**** Arbitrary instances for each AnyVal ****/
/** Arbitrary AnyVal */
implicit lazy val arbAnyVal: Arbitrary[AnyVal] = Arbitrary(oneOf(
arbitrary[Unit], arbitrary[Boolean], arbitrary[Char], arbitrary[Byte],
arbitrary[Short], arbitrary[Int], arbitrary[Long], arbitrary[Float],
arbitrary[Double]
))
/** Arbitrary instance of Boolean */
implicit lazy val arbBool: Arbitrary[Boolean] =
Arbitrary(oneOf(true, false))
/** Arbitrary instance of Int */
implicit lazy val arbInt: Arbitrary[Int] = Arbitrary(
Gen.chooseNum(Int.MinValue, Int.MaxValue)
)
/** Arbitrary instance of Long */
implicit lazy val arbLong: Arbitrary[Long] = Arbitrary(
Gen.chooseNum(Long.MinValue / 2, Long.MaxValue / 2)
)
/** Arbitrary instance of Float */
implicit lazy val arbFloat: Arbitrary[Float] = Arbitrary(
Gen.chooseNum(
Float.MinValue, Float.MaxValue
// I find that including these by default is a little TOO testy.
// Float.Epsilon, Float.NaN, Float.PositiveInfinity, Float.NegativeInfinity
)
)
/** Arbitrary instance of Double */
implicit lazy val arbDouble: Arbitrary[Double] = Arbitrary(
Gen.chooseNum(
Double.MinValue / 2, Double.MaxValue / 2
// As above. Perhaps behind some option?
// Double.Epsilon, Double.NaN, Double.PositiveInfinity, Double.NegativeInfinity
)
)
/** Arbitrary instance of Char */
implicit lazy val arbChar: Arbitrary[Char] = Arbitrary(
Gen.frequency(
(0xD800-Char.MinValue, Gen.choose(Char.MinValue,0xD800-1)),
(Char.MaxValue-0xDFFF, Gen.choose(0xDFFF+1,Char.MaxValue))
)
)
/** Arbitrary instance of Byte */
implicit lazy val arbByte: Arbitrary[Byte] = Arbitrary(
Gen.chooseNum(Byte.MinValue, Byte.MaxValue)
)
/** Arbitrary instance of Short */
implicit lazy val arbShort: Arbitrary[Short] = Arbitrary(
Gen.chooseNum(Short.MinValue, Short.MaxValue)
)
/** Absolutely, totally, 100% arbitrarily chosen Unit. */
implicit lazy val arbUnit: Arbitrary[Unit] = Arbitrary(value(()))
/**** Arbitrary instances of other common types ****/
/** Arbitrary instance of String */
implicit lazy val arbString: Arbitrary[String] =
Arbitrary(arbitrary[List[Char]] map (_.mkString))
/** Arbitrary instance of Date */
implicit lazy val arbDate: Arbitrary[Date] = Arbitrary(for {
l <- arbitrary[Long]
d = new Date
} yield new Date(d.getTime + l))
/** Arbitrary instance of Throwable */
implicit lazy val arbThrowable: Arbitrary[Throwable] =
Arbitrary(value(new Exception))
/** Arbitrary BigInt */
implicit lazy val arbBigInt: Arbitrary[BigInt] = {
def chooseBigInt: Gen[BigInt] = sized((s: Int) => choose(-s, s)) map (x => BigInt(x))
def chooseReallyBigInt = chooseBigInt.combine(choose(32, 128))((x, y) => Some(x.get << y.get))
Arbitrary(
frequency(
(5, chooseBigInt),
(10, chooseReallyBigInt),
(1, BigInt(0)),
(1, BigInt(1)),
(1, BigInt(-1)),
(1, BigInt(Int.MaxValue) + 1),
(1, BigInt(Int.MinValue) - 1),
(1, BigInt(Long.MaxValue)),
(1, BigInt(Long.MinValue)),
(1, BigInt(Long.MaxValue) + 1),
(1, BigInt(Long.MinValue) - 1)
)
)
}
/** Arbitrary BigDecimal */
implicit lazy val arbBigDecimal: Arbitrary[BigDecimal] = {
import java.math.MathContext._
val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
val bdGen = for {
mc <- mcGen
scale <- arbInt.arbitrary
x <- arbBigInt.arbitrary
} yield BigDecimal(x, scale, mc)
Arbitrary(bdGen)
}
/** Arbitrary java.lang.Number */
implicit lazy val arbNumber: Arbitrary[Number] = {
val gen = Gen.oneOf(
arbitrary[Byte], arbitrary[Short], arbitrary[Int], arbitrary[Long],
arbitrary[Float], arbitrary[Double]
)
Arbitrary(gen map (_.asInstanceOf[Number]))
// XXX TODO - restore BigInt and BigDecimal
// Arbitrary(oneOf(arbBigInt.arbitrary :: (arbs map (_.arbitrary) map toNumber) : _*))
}
/** Generates an arbitrary property */
implicit lazy val arbProp: Arbitrary[Prop] =
Arbitrary(frequency(
(5, Prop.proved),
(4, Prop.falsified),
(2, Prop.undecided),
(1, Prop.exception(null))
))
/** Arbitrary instance of test params */
implicit lazy val arbTestParams: Arbitrary[Test.Params] =
Arbitrary(for {
minSuccTests <- choose(10,150)
maxDiscTests <- choose(100,500)
minSize <- choose(0,500)
sizeDiff <- choose(0,500)
maxSize <- choose(minSize, minSize + sizeDiff)
ws <- choose(1,4)
} yield Test.Params(minSuccTests,maxDiscTests,minSize,maxSize,workers = ws))
/** Arbitrary instance of gen params */
implicit lazy val arbGenParams: Arbitrary[Gen.Params] =
Arbitrary(for {
size <- arbitrary[Int] suchThat (_ >= 0)
} yield Gen.Params(size, StdRand))
/** Arbitrary instance of prop params */
implicit lazy val arbPropParams: Arbitrary[Prop.Params] =
Arbitrary(for {
genPrms <- arbitrary[Gen.Params]
} yield Prop.Params(genPrms, FreqMap.empty[immutable.Set[Any]]))
// Higher-order types //
/** Arbitrary instance of Gen */
implicit def arbGen[T](implicit a: Arbitrary[T]): Arbitrary[Gen[T]] =
Arbitrary(frequency(
(5, arbitrary[T] map (value(_))),
(1, Gen.fail)
))
/** Arbitrary instance of option type */
implicit def arbOption[T](implicit a: Arbitrary[T]): Arbitrary[Option[T]] =
Arbitrary(sized(n => if(n == 0) value(None) else resize(n - 1, arbitrary[T]).map(Some(_))))
implicit def arbEither[T, U](implicit at: Arbitrary[T], au: Arbitrary[U]): Arbitrary[Either[T, U]] =
Arbitrary(oneOf(arbitrary[T].map(Left(_)), arbitrary[U].map(Right(_))))
/** Arbitrary instance of immutable map */
implicit def arbImmutableMap[T,U](implicit at: Arbitrary[T], au: Arbitrary[U]
): Arbitrary[immutable.Map[T,U]] = Arbitrary(
for(seq <- arbitrary[Stream[(T,U)]]) yield immutable.Map(seq: _*)
)
/** Arbitrary instance of mutable map */
implicit def arbMutableMap[T,U](implicit at: Arbitrary[T], au: Arbitrary[U]
): Arbitrary[mutable.Map[T,U]] = Arbitrary(
for(seq <- arbitrary[Stream[(T,U)]]) yield mutable.Map(seq: _*)
)
/** Arbitrary instance of any buildable container (such as lists, arrays,
* streams, etc). The maximum size of the container depends on the size
* generation parameter. */
implicit def arbContainer[C[_],T](implicit a: Arbitrary[T], b: Buildable[T,C]
): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T]))
/** Arbitrary instance of any array. */
implicit def arbArray[T](implicit a: Arbitrary[T], c: ClassManifest[T]
): Arbitrary[Array[T]] = Arbitrary(containerOf[Array,T](arbitrary[T]))
// Functions //
/** Arbitrary instance of Function1 */
implicit def arbFunction1[T1,R](implicit a: Arbitrary[R]
): Arbitrary[T1 => R] = Arbitrary(
for(r <- arbitrary[R]) yield (t1: T1) => r
)
/** Arbitrary instance of Function2 */
implicit def arbFunction2[T1,T2,R](implicit a: Arbitrary[R]
): Arbitrary[(T1,T2) => R] = Arbitrary(
for(r <- arbitrary[R]) yield (t1: T1, t2: T2) => r
)
/** Arbitrary instance of Function3 */
implicit def arbFunction3[T1,T2,T3,R](implicit a: Arbitrary[R]
): Arbitrary[(T1,T2,T3) => R] = Arbitrary(
for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3) => r
)
/** Arbitrary instance of Function4 */
implicit def arbFunction4[T1,T2,T3,T4,R](implicit a: Arbitrary[R]
): Arbitrary[(T1,T2,T3,T4) => R] = Arbitrary(
for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4) => r
)
/** Arbitrary instance of Function5 */
implicit def arbFunction5[T1,T2,T3,T4,T5,R](implicit a: Arbitrary[R]
): Arbitrary[(T1,T2,T3,T4,T5) => R] = Arbitrary(
for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) => r
)
// Tuples //
/** Arbitrary instance of 2-tuple */
implicit def arbTuple2[T1,T2](implicit
a1: Arbitrary[T1], a2: Arbitrary[T2]
): Arbitrary[(T1,T2)] =
Arbitrary(for {
t1 <- arbitrary[T1]
t2 <- arbitrary[T2]
} yield (t1,t2))
/** Arbitrary instance of 3-tuple */
implicit def arbTuple3[T1,T2,T3](implicit
a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3]
): Arbitrary[(T1,T2,T3)] =
Arbitrary(for {
t1 <- arbitrary[T1]
t2 <- arbitrary[T2]
t3 <- arbitrary[T3]
} yield (t1,t2,t3))
/** Arbitrary instance of 4-tuple */
implicit def arbTuple4[T1,T2,T3,T4](implicit
a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4]
): Arbitrary[(T1,T2,T3,T4)] =
Arbitrary(for {
t1 <- arbitrary[T1]
t2 <- arbitrary[T2]
t3 <- arbitrary[T3]
t4 <- arbitrary[T4]
} yield (t1,t2,t3,t4))
/** Arbitrary instance of 5-tuple */
implicit def arbTuple5[T1,T2,T3,T4,T5](implicit
a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
a5: Arbitrary[T5]
): Arbitrary[(T1,T2,T3,T4,T5)] =
Arbitrary(for {
t1 <- arbitrary[T1]
t2 <- arbitrary[T2]
t3 <- arbitrary[T3]
t4 <- arbitrary[T4]
t5 <- arbitrary[T5]
} yield (t1,t2,t3,t4,t5))
/** Arbitrary instance of 6-tuple */
implicit def arbTuple6[T1,T2,T3,T4,T5,T6](implicit
a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
a5: Arbitrary[T5], a6: Arbitrary[T6]
): Arbitrary[(T1,T2,T3,T4,T5,T6)] =
Arbitrary(for {
t1 <- arbitrary[T1]
t2 <- arbitrary[T2]
t3 <- arbitrary[T3]
t4 <- arbitrary[T4]
t5 <- arbitrary[T5]
t6 <- arbitrary[T6]
} yield (t1,t2,t3,t4,t5,t6))
/** Arbitrary instance of 7-tuple */
implicit def arbTuple7[T1,T2,T3,T4,T5,T6,T7](implicit
a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7]
): Arbitrary[(T1,T2,T3,T4,T5,T6,T7)] =
Arbitrary(for {
t1 <- arbitrary[T1]
t2 <- arbitrary[T2]
t3 <- arbitrary[T3]
t4 <- arbitrary[T4]
t5 <- arbitrary[T5]
t6 <- arbitrary[T6]
t7 <- arbitrary[T7]
} yield (t1,t2,t3,t4,t5,t6,t7))
/** Arbitrary instance of 8-tuple */
implicit def arbTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit
a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8]
): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8)] =
Arbitrary(for {
t1 <- arbitrary[T1]
t2 <- arbitrary[T2]
t3 <- arbitrary[T3]
t4 <- arbitrary[T4]
t5 <- arbitrary[T5]
t6 <- arbitrary[T6]
t7 <- arbitrary[T7]
t8 <- arbitrary[T8]
} yield (t1,t2,t3,t4,t5,t6,t7,t8))
/** Arbitrary instance of 9-tuple */
implicit def arbTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit
a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4],
a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8],
a9: Arbitrary[T9]
): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] =
Arbitrary(for {
t1 <- arbitrary[T1]
t2 <- arbitrary[T2]
t3 <- arbitrary[T3]
t4 <- arbitrary[T4]
t5 <- arbitrary[T5]
t6 <- arbitrary[T6]
t7 <- arbitrary[T7]
t8 <- arbitrary[T8]
t9 <- arbitrary[T9]
} yield (t1,t2,t3,t4,t5,t6,t7,t8,t9))
}
|
tonymorris/scalacheck
|
src/main/scala/org/scalacheck/Arbitrary.scala
|
Scala
|
bsd-3-clause
| 13,762 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.rdd.{RDDOperationScope, RDD}
import org.apache.spark.util.{CallSite, Utils}
@DeveloperApi
class RDDInfo(
val id: Int,
val name: String,
val numPartitions: Int,
var storageLevel: StorageLevel,
val parentIds: Seq[Int],
val callSite: String = "",
val scope: Option[RDDOperationScope] = None)
extends Ordered[RDDInfo] {
var numCachedPartitions = 0
var memSize = 0L
var diskSize = 0L
var externalBlockStoreSize = 0L
def isCached: Boolean =
(memSize + diskSize + externalBlockStoreSize > 0) && numCachedPartitions > 0
override def toString: String = {
import Utils.bytesToString
("RDD \\"%s\\" (%d) StorageLevel: %s; CachedPartitions: %d; TotalPartitions: %d; " +
"MemorySize: %s; ExternalBlockStoreSize: %s; DiskSize: %s").format(
name, id, storageLevel.toString, numCachedPartitions, numPartitions,
bytesToString(memSize), bytesToString(externalBlockStoreSize), bytesToString(diskSize))
}
override def compare(that: RDDInfo): Int = {
this.id - that.id
}
}
private[spark] object RDDInfo {
def fromRdd(rdd: RDD[_]): RDDInfo = {
val rddName = Option(rdd.name).getOrElse(Utils.getFormattedClassName(rdd))
val parentIds = rdd.dependencies.map(_.rdd.id)
new RDDInfo(rdd.id, rddName, rdd.partitions.length,
rdd.getStorageLevel, parentIds, rdd.creationSite.shortForm, rdd.scope)
}
}
|
chenc10/Spark-PAF
|
core/src/main/scala/org/apache/spark/storage/RDDInfo.scala
|
Scala
|
apache-2.0
| 2,291 |
package io.getquill.quotation
import io.getquill._
import io.getquill.dsl.DynamicQueryDsl
class CompatibleDynamicQuerySpec extends Spec {
object testContext extends MirrorContext(MirrorIdiom, Literal) with TestEntities with DynamicQueryDsl
import testContext._
"implicit classes" - {
"query" in {
val q: Quoted[Query[TestEntity]] = qr1
val d = {
val d = q.dynamic
(d: DynamicQuery[TestEntity])
}
d.q mustEqual q
}
"entity query" in {
val q: Quoted[EntityQuery[TestEntity]] = qr1
val d = {
val d = q.dynamic
(d: DynamicEntityQuery[TestEntity])
}
d.q mustEqual q
}
"action" in {
val q: Quoted[Action[TestEntity]] = qr1.insert(_.i -> 1)
val d = {
val d = q.dynamic
(d: DynamicAction[Action[TestEntity]])
}
d.q mustEqual q
}
"insert" in {
val q: Quoted[Insert[TestEntity]] = qr1.insert(_.i -> 1)
val d = {
val d = q.dynamic
(d: DynamicInsert[TestEntity])
}
d.q mustEqual q
}
"update" in {
val q: Quoted[Update[TestEntity]] = qr1.update(_.i -> 1)
val d = {
val d = q.dynamic
(d: DynamicUpdate[TestEntity])
}
d.q mustEqual q
}
"action returning" in {
val q: Quoted[ActionReturning[TestEntity, Int]] = quote {
qr1.insert(_.i -> 1).returningGenerated(_.i)
}
val d = {
val d = q.dynamic
(d: DynamicActionReturning[TestEntity, Int])
}
d.q mustEqual q
}
}
// Need to put here so an summon TypeTag for these
case class S(v: String) extends Embedded
case class E(s: S)
"query" - {
def test[T: QueryMeta](d: Quoted[Query[T]], s: Quoted[Query[T]]) =
testContext.run(d).string mustEqual testContext.run(s).string
"dynamicQuery" in {
test(
quote(query[TestEntity]).dynamic,
query[TestEntity]
)
}
"map" - {
"simple" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => quote(v0.i)),
query[TestEntity].map(v0 => v0.i)
)
}
"dynamic" in {
var cond = true
test(
quote(query[TestEntity]).dynamic.map(v0 => if (cond) quote(v0.i) else quote(1)),
query[TestEntity].map(v0 => v0.i)
)
cond = false
test(
quote(query[TestEntity]).dynamic.map(v0 => if (cond) quote(v0.i) else quote(1)),
query[TestEntity].map(v0 => 1)
)
}
}
"flatMap" - {
"simple" in {
test(
quote(query[TestEntity]).dynamic.flatMap(v0 => quote(query[TestEntity]).dynamic),
query[TestEntity].flatMap(v0 => query[TestEntity])
)
}
"mixed with static" in {
test(
quote(query[TestEntity]).dynamic.flatMap(v0 => query[TestEntity]),
query[TestEntity].flatMap(v0 => query[TestEntity])
)
test(
query[TestEntity].flatMap(v0 => quote(query[TestEntity]).dynamic),
query[TestEntity].flatMap(v0 => query[TestEntity])
)
}
"with map" in {
test(
quote(query[TestEntity]).dynamic.flatMap(v0 => quote(query[TestEntity]).dynamic.map(v1 => quote((unquote(v0), unquote(v1))))),
query[TestEntity].flatMap(v0 => query[TestEntity].map(v1 => (v0, v1)))
)
}
"for comprehension" in {
test(
for {
v0 <- quote(query[TestEntity]).dynamic
v1 <- quote(query[TestEntity]).dynamic
} yield (unquote(v0), unquote(v1)),
for {
v0 <- query[TestEntity]
v1 <- query[TestEntity]
} yield (v0, v1)
)
}
}
"filter" in {
test(
quote(query[TestEntity]).dynamic.filter(v0 => quote(v0.i == 1)),
query[TestEntity].filter(v0 => v0.i == 1)
)
}
"withFilter" in {
test(
quote(query[TestEntity]).dynamic.withFilter(v0 => quote(v0.i == 1)),
query[TestEntity].withFilter(v0 => v0.i == 1)
)
}
"filterOpt" - {
"defined" in {
val o = Some(1)
test(
quote(query[TestEntity]).dynamic.filterOpt(o)((v0, i) => quote(v0.i == i)),
query[TestEntity].filter(v0 => v0.i == lift(1))
)
}
"empty" in {
val o: Option[Int] = None
test(
quote(query[TestEntity]).dynamic.filterOpt(o)((v0, i) => quote(v0.i == i)),
query[TestEntity]
)
}
}
"filterIf" - {
"true" in {
val ids = Seq(1)
test(
quote(query[TestEntity]).dynamic.filterIf(ids.nonEmpty)(v0 => quote(liftQuery(ids).contains(v0.i))),
query[TestEntity].filter(v0 => quote(liftQuery(ids).contains(v0.i)))
)
}
"false" in {
val ids = Seq.empty[Int]
test(
quote(query[TestEntity]).dynamic.filterIf(ids.nonEmpty)(v0 => quote(liftQuery(ids).contains(v0.i))),
query[TestEntity]
)
}
}
"concatMap" in {
test(
quote(query[TestEntity]).dynamic.concatMap[String, Array[String]](v0 => quote(v0.s.split(" "))),
query[TestEntity].concatMap[String, Array[String]](v0 => v0.s.split(" "))
)
}
"sortBy" in {
val o = Ord.desc[Int]
test(
quote(query[TestEntity]).dynamic.sortBy(v0 => quote(v0.i))(o),
query[TestEntity].sortBy(v0 => v0.i)(Ord.desc)
)
}
"take" - {
"quoted" in {
test(
quote(query[TestEntity]).dynamic.take(quote(1)),
query[TestEntity].take(1)
)
}
"int" in {
test(
quote(query[TestEntity]).dynamic.take(1),
query[TestEntity].take(lift(1))
)
}
"opt" - {
"defined" in {
test(
quote(query[TestEntity]).dynamic.takeOpt(Some(1)),
query[TestEntity].take(lift(1))
)
}
"empty" in {
test(
quote(query[TestEntity]).dynamic.takeOpt(None),
query[TestEntity]
)
}
}
}
"drop" - {
"quoted" in {
test(
quote(query[TestEntity]).dynamic.drop(quote(1)),
query[TestEntity].drop(1)
)
}
"int" in {
test(
quote(query[TestEntity]).dynamic.drop(1),
query[TestEntity].drop(lift(1))
)
}
"opt" - {
"defined" in {
test(
quote(query[TestEntity]).dynamic.dropOpt(Some(1)),
query[TestEntity].drop(lift(1))
)
}
"empty" in {
test(
quote(query[TestEntity]).dynamic.dropOpt(None),
query[TestEntity]
)
}
}
}
"++" in {
test(
quote(query[TestEntity]).dynamic ++ quote(query[TestEntity]).dynamic.filter(v0 => v0.i == 1),
query[TestEntity] ++ query[TestEntity].filter(v0 => v0.i == 1)
)
}
"unionAll" in {
test(
quote(query[TestEntity]).dynamic.unionAll(quote(query[TestEntity]).dynamic.filter(v0 => v0.i == 1)),
query[TestEntity].unionAll(query[TestEntity].filter(v0 => v0.i == 1))
)
}
"union" in {
test(
quote(query[TestEntity]).dynamic.union(quote(query[TestEntity]).dynamic.filter(v0 => v0.i == 1)),
query[TestEntity].union(query[TestEntity].filter(v0 => v0.i == 1))
)
}
"groupBy" in {
test(
quote(query[TestEntity]).dynamic.groupBy(v0 => v0.i).map(v1 => v1._1),
query[TestEntity].groupBy(v0 => v0.i).map(v1 => v1._1)
)
}
"min" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => quote(query[TestEntity]).dynamic.map(v1 => v1.i).min.contains(v0.i)),
query[TestEntity].map(v0 => query[TestEntity].map(v1 => v1.i).min.contains(v0.i))
)
}
"max" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => quote(query[TestEntity]).dynamic.map(v1 => v1.i).max.contains(v0.i)),
query[TestEntity].map(v0 => query[TestEntity].map(v1 => v1.i).max.contains(v0.i))
)
}
"avg" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => quote(query[TestEntity]).dynamic.map(v1 => v1.i).avg.contains(v0.i)),
query[TestEntity].map(v0 => query[TestEntity].map(v1 => v1.i).avg.contains(v0.i))
)
}
"sum" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => quote(query[TestEntity]).dynamic.map(v1 => v1.i).sum.contains(v0.i)),
query[TestEntity].map(v0 => query[TestEntity].map(v1 => v1.i).sum.contains(v0.i))
)
}
"size" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => quote(query[TestEntity]).dynamic.size),
query[TestEntity].map(v0 => query[TestEntity].size)
)
}
"regular joins" - {
"join" in {
test(
quote(query[TestEntity]).dynamic.join(quote(query[TestEntity]).dynamic).on((v0, v1) => v0.i == v1.i),
query[TestEntity].join(query[TestEntity]).on((v0, v1) => v0.i == v1.i)
)
}
"leftJoin" in {
test(
quote(query[TestEntity]).dynamic.leftJoin(quote(query[TestEntity2]).dynamic).on((v0, v1) => v0.i == v1.i),
query[TestEntity].leftJoin(query[TestEntity2]).on((v0, v1) => v0.i == v1.i)
)
}
"rightJoin" in {
test(
quote(query[TestEntity]).dynamic.rightJoin(quote(query[TestEntity2]).dynamic).on((v0, v1) => v0.i == v1.i),
query[TestEntity].rightJoin(query[TestEntity2]).on((v0, v1) => v0.i == v1.i)
)
}
"fullJoin" in {
test(
quote(query[TestEntity]).dynamic.fullJoin(quote(query[TestEntity2]).dynamic).on((v0, v1) => v0.i == v1.i),
query[TestEntity].fullJoin(query[TestEntity2]).on((v0, v1) => v0.i == v1.i)
)
}
}
"flat joins" - {
"join" in {
test(
for {
v0 <- quote(query[TestEntity]).dynamic
v1 <- quote(query[TestEntity2]).dynamic.join(v1 => v0.i == v1.i)
} yield (unquote(v0), unquote(v1)),
for {
v0 <- query[TestEntity]
v1 <- query[TestEntity2].join(v1 => v0.i == v1.i)
} yield (unquote(v0), unquote(v1))
)
}
"leftJoin" in {
test(
for {
v0 <- quote(query[TestEntity]).dynamic
v1 <- quote(query[TestEntity2]).dynamic.leftJoin(v1 => v0.i == v1.i)
} yield (unquote(v0), unquote(v1)),
for {
v0 <- query[TestEntity]
v1 <- query[TestEntity2].leftJoin(v1 => v0.i == v1.i)
} yield (unquote(v0), unquote(v1))
)
}
"rightJoin" in {
test(
for {
v0 <- quote(query[TestEntity]).dynamic
v1 <- quote(query[TestEntity2]).dynamic.rightJoin(v1 => v0.i == v1.i)
} yield (unquote(v0), unquote(v1)),
for {
v0 <- query[TestEntity]
v1 <- query[TestEntity2].rightJoin(v1 => v0.i == v1.i)
} yield (unquote(v0), unquote(v1))
)
}
}
"nonEmpty" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => quote(query[TestEntity]).dynamic.nonEmpty),
query[TestEntity].map(v0 => query[TestEntity].nonEmpty)
)
}
"isEmpty" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => quote(query[TestEntity]).dynamic.isEmpty),
query[TestEntity].map(v0 => query[TestEntity].isEmpty)
)
}
"contains" - {
"quoted" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => quote(query[TestEntity]).dynamic.map(v1 => v1.i).contains(quote(v0.i))),
query[TestEntity].map(v0 => query[TestEntity].map(v1 => v1.i).contains(v0.i))
)
}
"value" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => quote(query[TestEntity]).dynamic.map(v1 => v1.i).contains(1)),
query[TestEntity].map(v0 => query[TestEntity].map(v1 => v1.i).contains(lift(1)))
)
}
}
"distinct" in {
test(
quote(query[TestEntity]).dynamic.distinct,
query[TestEntity].distinct
)
}
"nested" in {
test(
quote(query[TestEntity]).dynamic.nested.map(v0 => v0.i),
query[TestEntity].nested.map(v0 => v0.i)
)
}
}
"entityQuery" - {
def test[T: QueryMeta](d: Quoted[EntityQuery[T]], s: Quoted[EntityQuery[T]]) =
testContext.run(d).string mustEqual testContext.run(s).string
"filter" in {
test(
quote(query[TestEntity]).dynamic.filter(v0 => v0.i == 1),
query[TestEntity].filter(v0 => v0.i == 1)
)
}
"withFilter" in {
test(
quote(query[TestEntity]).dynamic.withFilter(v0 => v0.i == 1),
query[TestEntity].withFilter(v0 => v0.i == 1)
)
}
"filterOpt" - {
"defined" in {
val o = Some(1)
test(
quote(query[TestEntity]).dynamic.filterOpt(o)((v0, i) => v0.i == i),
query[TestEntity].filter(v0 => v0.i == lift(1))
)
}
"empty" in {
val o: Option[Int] = None
test(
quote(query[TestEntity]).dynamic.filterOpt(o)((v0, i) => v0.i == i),
query[TestEntity]
)
}
}
"map" in {
test(
quote(query[TestEntity]).dynamic.map(v0 => v0.i),
query[TestEntity].map(v0 => v0.i)
)
}
}
"actions" - {
def test[T](d: Quoted[Action[T]], s: Quoted[Action[T]]) =
testContext.run(d).string mustEqual testContext.run(s).string
val t = TestEntity("s", 1, 2L, Some(3), true)
"insertValue" in {
test(
quote(query[TestEntity]).dynamic.insertValue(t),
query[TestEntity].insert(lift(t))
)
}
"updateValue" in {
test(
quote(query[TestEntity]).dynamic.updateValue(t),
query[TestEntity].update(lift(t))
)
}
"insert" - {
"one column" in {
test(
quote(query[TestEntity]).dynamic.insert(set(_.i, 1)),
query[TestEntity].insert(v => v.i -> 1)
)
}
"multiple columns" in {
test(
quote(query[TestEntity]).dynamic.insert(set(_.i, 1), set(_.l, 2L)),
query[TestEntity].insert(v => v.i -> 1, v => v.l -> 2L)
)
}
"setOpt" in {
test(
quote(query[TestEntity]).dynamic.insert(setOpt(_.i, None), setOpt(_.l, Some(2L))),
query[TestEntity].insert(v => v.l -> lift(2L))
)
}
"string column name" in {
test(
quote(query[TestEntity]).dynamic.insert(set("i", 1), set("l", 2L)),
query[TestEntity].insert(v => v.i -> 1, v => v.l -> 2L)
)
}
"returning" in {
test(
quote(query[TestEntity]).dynamic.insert(set(_.i, 1)).returningGenerated(v0 => v0.l),
quote {
query[TestEntity].insert(v => v.i -> 1).returningGenerated(v0 => v0.l)
}
)
}
"returning non quoted" in {
test(
quote(query[TestEntity]).dynamic.insert(set(_.i, 1)).returningGenerated(v0 => v0.l),
query[TestEntity].insert(v => v.i -> 1).returningGenerated((v0: TestEntity) => v0.l)
)
}
"onConflictIgnore" - {
"simple" in {
test(
quote(query[TestEntity]).dynamic.insert(set(_.i, 1)).onConflictIgnore,
query[TestEntity].insert(v => v.i -> 1).onConflictIgnore
)
}
"with targets" in {
test(
quote(query[TestEntity]).dynamic.insert(set(_.i, 1)).onConflictIgnore(_.i),
query[TestEntity].insert(v => v.i -> 1).onConflictIgnore(_.i)
)
}
}
}
"update" - {
"one column" in {
test(
quote(query[TestEntity]).dynamic.update(set(_.i, 1)),
query[TestEntity].update(v => v.i -> 1)
)
}
"multiple columns" in {
test(
quote(query[TestEntity]).dynamic.update(set(_.i, 1), set(_.l, 2L)),
query[TestEntity].update(v => v.i -> 1, v => v.l -> 2L)
)
}
"string column name" in {
test(
quote(query[TestEntity]).dynamic.update(set("i", 1), set("l", 2L)),
query[TestEntity].update(v => v.i -> 1, v => v.l -> 2L)
)
}
}
"delete" in {
test(
quote(query[TestEntity]).dynamic.delete,
query[TestEntity].delete
)
}
}
}
|
getquill/quill
|
quill-core/src/test/scala/io/getquill/quotation/CompatibleDynamicQuerySpec.scala
|
Scala
|
apache-2.0
| 16,540 |
package controllers
import org.asyncmongo.bson._
import play.api.libs.json._
import play.api.libs.json.Constraints._
import play.api.mvc._
import play.modules.mongodb._
import play.api.Play.current
import org.asyncmongo.protocol.messages._
import play.modules.mongodb.PlayBsonImplicits._
import org.asyncmongo.handlers.DefaultBSONHandlers._
import org.asyncmongo.bson._
import play.api.libs.concurrent._
object User extends Controller {
val users = MongoAsyncPlugin.collection("users")
//case class User(name: String, email: String, password: string, age: Option[Int])
implicit val userFormat: Format[(String, String, String, Option[Int])] = JsTupler(
JsPath \\ 'name -> in( required[String] ),
JsPath \\ 'email -> in( required[String] and email ),
JsPath \\ 'password -> (in( required[String] ) ~ out( pruned[String] )),
JsPath \\ 'age -> in( optional[Int] )
)
def register = Action { implicit request => Async {
request.body.asJson.map { json =>
json.validate[(String, String, String, Option[Int])].fold(
valid = { user =>
val promise = users.insert(json, GetLastError(MongoAsyncPlugin.dbName)).asPromise
promise.map { le => Ok("Created user " + Json.toJson(user)) }
},
invalid = jserror => Promise.pure(BadRequest("validation error:%s".format(jserror.toString)))
)
}.getOrElse {
Promise.pure(BadRequest("Expecting Json data"))
}
}}
implicit val addressFormat: Format[(String, Int, String)] = JsTupler(
JsPath \\ 'street -> in(required[String]),
JsPath \\ 'nb -> in(required[Int]),
JsPath \\ 'town -> in[String]
)
implicit val user2Format: Format[(String, String, (String, Int, String))] = JsTupler(
JsPath \\ 'name -> in( required[String] ),
JsPath \\ 'email -> in( required[String] and email ),
JsPath \\ 'address -> addressFormat
)
def register2 = Action { implicit request =>
request.body.asJson.map { json =>
json.validate[(String, String, (String, Int, String))].fold(
valid = user =>
Ok("Hello " + Json.toJson(user)),
invalid = jserror => BadRequest("validation error:%s".format(jserror.toString))
)
}.getOrElse {
BadRequest("Expecting Json data")
}
}
}
|
mandubian/play2-mongodb-async
|
samples/play2-mongodb-async-app/app/controllers/User.scala
|
Scala
|
apache-2.0
| 2,255 |
package slick.jdbc
import scala.language.{existentials, implicitConversions, higherKinds}
import scala.collection.mutable.HashMap
import slick.SlickException
import slick.ast._
import slick.ast.Util.nodeToNodeOps
import slick.ast.TypeUtil._
import slick.compiler.{RewriteBooleans, CodeGen, Phase, CompilerState, QueryCompiler}
import slick.lifted._
import slick.relational.{RelationalProfile, RelationalCapabilities, ResultConverter, CompiledMapping}
import slick.sql.SqlProfile
import slick.util._
import slick.util.MacroSupport.macroSupportInterpolation
import slick.util.SQLBuilder.Result
trait JdbcStatementBuilderComponent { self: JdbcProfile =>
// Create the different builders -- these methods should be overridden by profiles as needed
def createQueryBuilder(n: Node, state: CompilerState): QueryBuilder = new QueryBuilder(n, state)
def createInsertBuilder(node: Insert): InsertBuilder = new InsertBuilder(node)
def createUpsertBuilder(node: Insert): InsertBuilder = new UpsertBuilder(node)
def createCheckInsertBuilder(node: Insert): InsertBuilder = new CheckInsertBuilder(node)
def createUpdateInsertBuilder(node: Insert): InsertBuilder = new UpdateInsertBuilder(node)
def createTableDDLBuilder(table: Table[_]): TableDDLBuilder = new TableDDLBuilder(table)
def createColumnDDLBuilder(column: FieldSymbol, table: Table[_]): ColumnDDLBuilder = new ColumnDDLBuilder(column)
def createSequenceDDLBuilder(seq: Sequence[_]): SequenceDDLBuilder = new SequenceDDLBuilder(seq)
class JdbcCompiledInsert(source: Node) {
class Artifacts(val compiled: Node, val converter: ResultConverter[JdbcResultConverterDomain, Any], val ibr: InsertBuilderResult) {
def table: TableNode = ibr.table
def sql: String = ibr.sql
def fields: ConstArray[FieldSymbol] = ibr.fields
}
protected[this] def compile(compiler: QueryCompiler): Artifacts = {
val compiled = compiler.run(source).tree
val ResultSetMapping(_, CompiledStatement(sql, ibr: InsertBuilderResult, _), CompiledMapping(conv, _)) = compiled
new Artifacts(compiled, conv.asInstanceOf[ResultConverter[JdbcResultConverterDomain, Any]], ibr)
}
/** The compiled artifacts for standard insert statements. */
lazy val standardInsert = compile(insertCompiler)
/** The compiled artifacts for forced insert statements. */
lazy val forceInsert = compile(forceInsertCompiler)
/** The compiled artifacts for upsert statements. */
lazy val upsert = compile(upsertCompiler)
/** The compiled artifacts for 'check insert' statements. */
lazy val checkInsert = compile(checkInsertCompiler)
/** The compiled artifacts for 'update insert' statements. */
lazy val updateInsert = compile(updateInsertCompiler)
/** Build a list of columns and a matching `ResultConverter` for retrieving keys of inserted rows. */
def buildReturnColumns(node: Node): (ConstArray[String], ResultConverter[JdbcResultConverterDomain, _], Boolean) = {
if(!capabilities.contains(JdbcCapabilities.returnInsertKey))
throw new SlickException("This DBMS does not allow returning columns from INSERT statements")
val ResultSetMapping(_, CompiledStatement(_, ibr: InsertBuilderResult, _), CompiledMapping(rconv, _)) =
forceInsertCompiler.run(node).tree
if(ibr.table.baseIdentity != standardInsert.table.baseIdentity)
throw new SlickException("Returned key columns must be from same table as inserted columns ("+
ibr.table.baseIdentity+" != "+standardInsert.table.baseIdentity+")")
val returnOther = ibr.fields.length > 1 || !ibr.fields.head.options.contains(ColumnOption.AutoInc)
if(!capabilities.contains(JdbcCapabilities.returnInsertOther) && returnOther)
throw new SlickException("This DBMS allows only a single column to be returned from an INSERT, and that column must be an AutoInc column.")
(ibr.fields.map(_.name), rconv.asInstanceOf[ResultConverter[JdbcResultConverterDomain, _]], returnOther)
}
}
abstract class StatementPart
case object SelectPart extends StatementPart
case object FromPart extends StatementPart
case object WherePart extends StatementPart
case object HavingPart extends StatementPart
case object OtherPart extends StatementPart
/** Create a SQL representation of a literal value. */
def valueToSQLLiteral(v: Any, tpe: Type): String = {
val JdbcType(ti, option) = tpe
if(option) v.asInstanceOf[Option[Any]].fold("null")(ti.valueToSQLLiteral)
else ti.valueToSQLLiteral(v)
}
// Immutable config options (to be overridden by subclasses)
/** The table name for scalar selects (e.g. "select 42 from DUAL;"), or `None` for
* scalar selects without a FROM clause ("select 42;"). */
val scalarFrom: Option[String] = None
/** Builder for SELECT and UPDATE statements. */
class QueryBuilder(val tree: Node, val state: CompilerState) { queryBuilder =>
// Immutable config options (to be overridden by subclasses)
protected val supportsTuples = true
protected val supportsCast = true
protected val concatOperator: Option[String] = None
protected val hasPiFunction = true
protected val hasRadDegConversion = true
protected val parenthesizeNestedRHSJoin = false
protected val pi = "3.1415926535897932384626433832795"
protected val alwaysAliasSubqueries = true
protected val supportsLiteralGroupBy = false
protected val quotedJdbcFns: Option[Seq[Library.JdbcFunction]] = None // quote all by default
// Mutable state accessible to subclasses
protected val b = new SQLBuilder
protected var currentPart: StatementPart = OtherPart
protected val symbolName = new QuotingSymbolNamer(Some(state.symbolNamer))
protected val joins = new HashMap[TermSymbol, Join]
protected var currentUniqueFrom: Option[TermSymbol] = None
def sqlBuilder = b
final def buildSelect(): SQLBuilder.Result = {
expr(tree, true)
b.build
}
@inline protected final def building(p: StatementPart)(f: => Unit): Unit = {
val oldPart = currentPart
currentPart = p
f
currentPart = oldPart
}
protected def buildComprehension(c: Comprehension): Unit = {
val limit0 = c.fetch match {
case Some(LiteralNode(0L)) => true
case _ => false
}
scanJoins(ConstArray((c.sym, c.from)))
val (from, on) = flattenJoins(c.sym, c.from)
val oldUniqueFrom = currentUniqueFrom
def containsSymbolInSubquery(s: TermSymbol) =
c.children.iterator.drop(1).flatMap(_.collect { case c: Comprehension => c }.toSeq.flatMap(_.findNode(_ == Ref(s)))).nonEmpty
currentUniqueFrom = from match {
case Seq((s, _: TableNode)) if !containsSymbolInSubquery(s) => Some(s)
case Seq((s, _)) if !alwaysAliasSubqueries && !containsSymbolInSubquery(s) => Some(s)
case _ => None
}
buildSelectClause(c)
buildFromClause(from)
if(limit0) b"\\nwhere 1=0"
else buildWhereClause(and(c.where.toSeq ++ on))
buildGroupByClause(c.groupBy)
buildHavingClause(c.having)
buildOrderByClause(c.orderBy)
if(!limit0) buildFetchOffsetClause(c.fetch, c.offset)
buildForUpdateClause(c.forUpdate)
currentUniqueFrom = oldUniqueFrom
}
private[this] def and(ns: Seq[Node]): Option[Node] =
if(ns.isEmpty) None else Some(ns.reduceLeft((p1, p2) => Library.And.typed[Boolean](p1, p2)))
protected def flattenJoins(s: TermSymbol, n: Node): (Seq[(TermSymbol, Node)], Seq[Node]) = {
def f(s: TermSymbol, n: Node): Option[(Seq[(TermSymbol, Node)], Seq[Node])] = n match {
case Join(ls, rs, l, r, JoinType.Inner, on) =>
for {
(defs1, on1) <- f(ls, l)
(defs2, on2) <- f(rs, r)
} yield (defs1 ++ defs2, on match {
case LiteralNode(true) => on1 ++ on2
case on => on1 ++ on2 :+ on
})
case _: Join => None
case n => Some((Seq((s, n)), Nil))
}
f(s, n).getOrElse((Seq((s, n)), Nil))
}
protected def buildSelectClause(c: Comprehension) = building(SelectPart) {
b"select "
buildSelectModifiers(c)
c.select match {
case Pure(StructNode(ch), _) =>
b.sep(ch, ", ") { case (sym, n) =>
buildSelectPart(n)
b" as `$sym"
}
if(ch.isEmpty) b"1"
case Pure(ProductNode(ch), _) =>
b.sep(ch, ", ")(buildSelectPart)
if(ch.isEmpty) b"1"
case Pure(n, _) => buildSelectPart(n)
}
}
protected def buildSelectModifiers(c: Comprehension): Unit = {
c.distinct.foreach {
case ProductNode(ch) if ch.isEmpty => b"distinct "
case n => b"distinct on (!$n) "
}
}
protected def scanJoins(from: ConstArray[(TermSymbol, Node)]) {
for((sym, j: Join) <- from) {
joins += sym -> j
scanJoins(j.generators)
}
}
protected def buildFromClause(from: Seq[(TermSymbol, Node)]) = building(FromPart) {
from match {
case Nil | Seq((_, Pure(ProductNode(ConstArray()), _))) => scalarFrom.foreach { s => b"\\nfrom $s" }
case from =>
b"\\nfrom "
b.sep(from, ", ") { case (sym, n) => buildFrom(n, if(Some(sym) == currentUniqueFrom) None else Some(sym)) }
}
}
protected def buildWhereClause(where: Option[Node]) =
building(WherePart)(where.foreach(p => b"\\nwhere !$p"))
protected def buildGroupByClause(groupBy: Option[Node]) = building(OtherPart)(groupBy.foreach { n =>
b"\\ngroup by "
n match {
case ProductNode(es) => b.sep(es, ", ")(buildGroupByColumn)
case e => buildGroupByColumn(e)
}
})
protected def buildGroupByColumn(by: Node) = by match {
// Some database systems assign special meaning to literal values in GROUP BY, so we replace
// them by a constant non-literal expression unless it is known to be safe.
case LiteralNode(_) if !supportsLiteralGroupBy => b"0+0"
case e => b"!$e"
}
protected def buildHavingClause(having: Option[Node]) =
building(HavingPart)(having.foreach(p => b"\\nhaving !$p"))
protected def buildOrderByClause(order: ConstArray[(Node, Ordering)]) = building(OtherPart) {
if(!order.isEmpty) {
b"\\norder by "
b.sep(order, ", "){ case (n, o) => buildOrdering(n, o) }
}
}
protected def buildFetchOffsetClause(fetch: Option[Node], offset: Option[Node]) = building(OtherPart) {
(fetch, offset) match {
/* SQL:2008 syntax */
case (Some(t), Some(d)) => b"\\noffset $d row fetch next $t row only"
case (Some(t), None) => b"\\nfetch next $t row only"
case (None, Some(d)) => b"\\noffset $d row"
case _ =>
}
}
protected def buildForUpdateClause(forUpdate: Boolean) = building(OtherPart) {
if(forUpdate) {
b"\\nfor update "
}
}
protected def buildSelectPart(n: Node): Unit = n match {
case c: Comprehension =>
b"\\["
buildComprehension(c)
b"\\]"
case n =>
expr(n, true)
}
protected def buildFrom(n: Node, alias: Option[TermSymbol], skipParens: Boolean = false): Unit = building(FromPart) {
def addAlias = alias foreach { s => b += ' ' += symbolName(s) }
n match {
case t: TableNode =>
b += quoteTableName(t)
addAlias
case j: Join =>
buildJoin(j)
case n =>
expr(n, skipParens)
addAlias
}
}
protected def buildJoin(j: Join): Unit = {
buildFrom(j.left, Some(j.leftGen))
val op = j.on match {
case LiteralNode(true) if j.jt == JoinType.Inner => "cross"
case _ => j.jt.sqlName
}
b"\\n$op join "
if(j.right.isInstanceOf[Join] && parenthesizeNestedRHSJoin) {
b"\\["
buildFrom(j.right, Some(j.rightGen))
b"\\]"
} else buildFrom(j.right, Some(j.rightGen))
if(op != "cross") j.on match {
case LiteralNode(true) => b"\\non 1=1"
case on => b"\\non !$on"
}
}
def expr(n: Node, skipParens: Boolean = false): Unit = n match {
case p @ Path(path) =>
val (base, rest) = path.foldRight[(Option[TermSymbol], List[TermSymbol])]((None, Nil)) {
case (ElementSymbol(idx), (Some(b), Nil)) => (Some(joins(b).generators(idx-1)._1), Nil)
case (s, (None, Nil)) => (Some(s), Nil)
case (s, (b, r)) => (b, s :: r)
}
if(base != currentUniqueFrom) b += symbolName(base.get) += '.'
rest match {
case Nil => b += '*'
case field :: Nil => b += symbolName(field)
case _ => throw new SlickException("Cannot resolve "+p+" as field or view")
}
case (n @ LiteralNode(v)) :@ JdbcType(ti, option) =>
if(n.volatileHint || !ti.hasLiteralForm) b +?= { (p, idx, param) =>
if(option) ti.setOption(v.asInstanceOf[Option[Any]], p, idx)
else ti.setValue(v, p, idx)
} else b += valueToSQLLiteral(v, n.nodeType)
case ProductNode(ch) =>
b"\\("
b.sep(ch, ", ")(expr(_))
b"\\)"
case n: Apply => n match {
case Library.Not(Library.==(l, LiteralNode(null))) =>
b"\\($l is not null\\)"
case Library.==(l, LiteralNode(null)) =>
b"\\($l is null\\)"
case Library.==(left: ProductNode, right: ProductNode) =>
b"\\("
if(supportsTuples) b"$left = $right"
else {
val cols = left.children.zip(right.children).force
b.sep(cols, " and "){ case (l,r) => expr(l); b += "="; expr(r) }
}
b"\\)"
case RewriteBooleans.ToFakeBoolean(ch) =>
expr(IfThenElse(ConstArray(ch, LiteralNode(1).infer(), LiteralNode(0).infer())), skipParens)
case RewriteBooleans.ToRealBoolean(ch) =>
expr(Library.==.typed[Boolean](ch, LiteralNode(true).infer()), skipParens)
case Library.Exists(c: Comprehension) =>
/* If tuples are not supported, selecting multiple individial columns
* in exists(select ...) is probably not supported, either, so we rewrite
* such sub-queries to "select 1". */
b"exists\\[!${(if(supportsTuples) c else c.copy(select = Pure(LiteralNode(1))).infer()): Node}\\]"
case Library.Concat(l, r) if concatOperator.isDefined =>
b"\\($l${concatOperator.get}$r\\)"
case Library.User() if !capabilities.contains(RelationalCapabilities.functionUser) =>
b += "''"
case Library.Database() if !capabilities.contains(RelationalCapabilities.functionDatabase) =>
b += "''"
case Library.Pi() if !hasPiFunction => b += pi
case Library.Degrees(ch) if !hasRadDegConversion => b"(180.0/!${Library.Pi.typed(columnTypes.bigDecimalJdbcType)}*$ch)"
case Library.Radians(ch) if!hasRadDegConversion => b"(!${Library.Pi.typed(columnTypes.bigDecimalJdbcType)}/180.0*$ch)"
case Library.Between(left, start, end) => b"$left between $start and $end"
case Library.CountDistinct(e) => b"count(distinct $e)"
case Library.CountAll(e) => b"count($e)"
case Library.Like(l, r) => b"\\($l like $r\\)"
case Library.Like(l, r, LiteralNode(esc: Char)) =>
if(esc == '\\'' || esc == '%' || esc == '_') throw new SlickException("Illegal escape character '"+esc+"' for LIKE expression")
// JDBC defines an {escape } syntax but the unescaped version is understood by more DBs/drivers
b"\\($l like $r escape '$esc'\\)"
case Library.StartsWith(n, LiteralNode(s: String)) =>
b"\\($n like ${valueToSQLLiteral(likeEncode(s)+'%', ScalaBaseType.stringType)} escape '^'\\)"
case Library.EndsWith(n, LiteralNode(s: String)) =>
b"\\($n like ${valueToSQLLiteral("%"+likeEncode(s), ScalaBaseType.stringType)} escape '^'\\)"
case Library.Trim(n) =>
expr(Library.LTrim.typed[String](Library.RTrim.typed[String](n)), skipParens)
case Library.Substring(n, start, end) =>
b"\\({fn substring($n, ${QueryParameter.constOp[Int]("+")(_ + _)(start, LiteralNode(1).infer())}, ${QueryParameter.constOp[Int]("-")(_ - _)(end, start)})}\\)"
case Library.Substring(n, start) =>
b"\\({fn substring($n, ${QueryParameter.constOp[Int]("+")(_ + _)(start, LiteralNode(1).infer())})}\\)"
case Library.IndexOf(n, str) => b"\\({fn locate($str, $n)} - 1\\)"
case Library.Cast(ch @ _*) =>
val tn =
if(ch.length == 2) ch(1).asInstanceOf[LiteralNode].value.asInstanceOf[String]
else jdbcTypeFor(n.nodeType).sqlTypeName(None)
if(supportsCast) b"cast(${ch(0)} as $tn)"
else b"{fn convert(!${ch(0)},$tn)}"
case Library.SilentCast(ch) => b"$ch"
case Apply(sym: Library.SqlOperator, ch) =>
b"\\("
if(ch.length == 1) {
b"${sym.name} ${ch.head}"
} else b.sep(ch, " " + sym.name + " ")(expr(_))
b"\\)"
case Apply(sym: Library.JdbcFunction, ch) =>
val quote = quotedJdbcFns.map(_.contains(sym)).getOrElse(true)
if(quote) b"{fn "
b"${sym.name}("
b.sep(ch, ",")(expr(_, true))
b")"
if(quote) b"}"
case Apply(sym: Library.SqlFunction, ch) =>
b"${sym.name}("
b.sep(ch, ",")(expr(_, true))
b")"
case n => throw new SlickException("Unexpected function call "+n+" -- SQL prefix: "+b.build.sql)
}
case c: IfThenElse =>
b"(case"
c.ifThenClauses.foreach { case (l, r) => b" when $l then $r" }
c.elseClause match {
case LiteralNode(null) =>
case n => b" else $n"
}
b" end)"
case OptionApply(ch) => expr(ch, skipParens)
case QueryParameter(extractor, JdbcType(ti, option), _) =>
b +?= { (p, idx, param) =>
if(option) ti.setOption(extractor(param).asInstanceOf[Option[Any]], p, idx)
else ti.setValue(extractor(param), p, idx)
}
case s: SimpleFunction =>
if(s.scalar) b"{fn "
b"${s.name}("
b.sep(s.children, ",")(expr(_, true))
b")"
if(s.scalar) b += '}'
case RowNumber(by) =>
b"row_number() over(order by "
if(by.isEmpty) b"(select 1)"
else b.sep(by, ", "){ case (n, o) => buildOrdering(n, o) }
b")"
case c: Comprehension =>
b"\\{"
buildComprehension(c)
b"\\}"
case Union(left, right, all) =>
b"\\{"
buildFrom(left, None, true)
if(all) b"\\nunion all " else b"\\nunion "
buildFrom(right, None, true)
b"\\}"
case SimpleLiteral(w) => b += w
case s: SimpleExpression => s.toSQL(this)
case s: SimpleBinaryOperator => b"\\(${s.left} ${s.name} ${s.right}\\)"
case n => throw new SlickException("Unexpected node "+n+" -- SQL prefix: "+b.build.sql)
}
protected def buildOrdering(n: Node, o: Ordering) {
expr(n)
if(o.direction.desc) b" desc"
if(o.nulls.first) b" nulls first"
else if(o.nulls.last) b" nulls last"
}
def buildUpdate: SQLBuilder.Result = {
val (gen, from, where, select) = tree match {
case Comprehension(sym, from: TableNode, Pure(select, _), where, None, _, None, None, None, None, false) => select match {
case f @ Select(Ref(struct), _) if struct == sym => (sym, from, where, ConstArray(f.field))
case ProductNode(ch) if ch.forall{ case Select(Ref(struct), _) if struct == sym => true; case _ => false} =>
(sym, from, where, ch.map{ case Select(Ref(_), field) => field })
case _ => throw new SlickException("A query for an UPDATE statement must select table columns only -- Unsupported shape: "+select)
}
case o => throw new SlickException("A query for an UPDATE statement must resolve to a comprehension with a single table -- Unsupported shape: "+o)
}
val qtn = quoteTableName(from)
symbolName(gen) = qtn // Alias table to itself because UPDATE does not support aliases
b"update $qtn set "
b.sep(select, ", ")(field => b += symbolName(field) += " = ?")
if(!where.isEmpty) {
b" where "
expr(where.reduceLeft((a, b) => Library.And.typed[Boolean](a, b)), true)
}
b.build
}
protected def buildDeleteFrom(tableName: String): Unit = {
b"delete from $tableName"
}
def buildDelete: SQLBuilder.Result = {
def fail(msg: String) =
throw new SlickException("Invalid query for DELETE statement: " + msg)
val (gen, from, where) = tree match {
case Comprehension(sym, from, Pure(select, _), where, _, _, None, distinct, fetch, offset, forUpdate) =>
if(fetch.isDefined || offset.isDefined || distinct.isDefined || forUpdate)
fail(".take, .drop .forUpdate and .distinct are not supported for deleting")
from match {
case from: TableNode => (sym, from, where)
case from => fail("A single source table is required, found: "+from)
}
case o => fail("Unsupported shape: "+o+" -- A single SQL comprehension is required")
}
val qtn = quoteTableName(from)
symbolName(gen) = qtn // Alias table to itself because DELETE does not support aliases
buildDeleteFrom(qtn)
if(!where.isEmpty) {
b" where "
expr(where.reduceLeft((a, b) => Library.And.typed[Boolean](a, b)), true)
}
b.build
}
}
/** Builder for INSERT statements. */
class InsertBuilder(val ins: Insert) {
protected val Insert(_, table: TableNode, ProductNode(rawColumns), allFields) = ins
protected val syms: ConstArray[FieldSymbol] = rawColumns.map { case Select(_, fs: FieldSymbol) => fs }
protected lazy val allNames = syms.map(fs => quoteIdentifier(fs.name))
protected lazy val allVars = syms.iterator.map(_ => "?").mkString("(", ",", ")")
protected lazy val tableName = quoteTableName(table)
def buildInsert: InsertBuilderResult = {
val start = buildInsertStart
if(syms.isEmpty) new InsertBuilderResult(table, emptyInsert, syms)
else new InsertBuilderResult(table, s"$start values $allVars", syms) {
override def buildInsert(compiledQuery: Node) = {
val (_, sbr: SQLBuilder.Result) = CodeGen.findResult(compiledQuery)
SQLBuilder.Result(start + sbr.sql, sbr.setter)
}
}
}
def transformMapping(n: Node) = n
protected def buildInsertStart: String = allNames.iterator.mkString(s"insert into $tableName (", ",", ") ")
protected def emptyInsert: String =
if(allFields.isEmpty) s"insert into $tableName default values"
else s"insert into $tableName (${quoteIdentifier(allFields.head.name)}) values (default)"
/** Reorder InsertColumn indices in a mapping Node in the order of the given
* sequence of FieldSymbols (which may contain duplicates). */
protected def reorderColumns(n: Node, order: IndexedSeq[FieldSymbol]): Node = {
val newIndices = order.zipWithIndex.groupBy(_._1)
lazy val reordering: ConstArray[IndexedSeq[Int]] = syms.map(fs => newIndices(fs).map(_._2 + 1))
n.replace({ case InsertColumn(ConstArray(Select(ref, ElementSymbol(idx))), fs, tpe) =>
val newPaths = reordering(idx-1).map(i => Select(ref, ElementSymbol(i)))
InsertColumn(ConstArray.from(newPaths), fs, tpe) :@ tpe
}, keepType = true)
}
}
/** Builder for upsert statements, builds standard SQL MERGE statements by default. */
class UpsertBuilder(ins: Insert) extends InsertBuilder(ins) {
protected lazy val (pkSyms, softSyms) = syms.toSeq.partition(_.options.contains(ColumnOption.PrimaryKey))
protected lazy val pkNames = pkSyms.map { fs => quoteIdentifier(fs.name) }
protected lazy val softNames = softSyms.map { fs => quoteIdentifier(fs.name) }
protected lazy val nonAutoIncSyms = syms.filter(s => !(s.options contains ColumnOption.AutoInc))
protected lazy val nonAutoIncNames = nonAutoIncSyms.map(fs => quoteIdentifier(fs.name))
override def buildInsert: InsertBuilderResult = {
val start = buildMergeStart
val end = buildMergeEnd
val paramSel = "select " + allNames.map(n => "? as "+n).iterator.mkString(",") + scalarFrom.map(n => " from "+n).getOrElse("")
// We'd need a way to alias the column names at the top level in order to support merges from a source Query
new InsertBuilderResult(table, start + paramSel + end, syms)
}
protected def buildMergeStart: String = s"merge into $tableName t using ("
protected def buildMergeEnd: String = {
val updateCols = softNames.map(n => s"t.$n=s.$n").mkString(", ")
val insertCols = nonAutoIncNames /*.map(n => s"t.$n")*/ .mkString(", ")
val insertVals = nonAutoIncNames.map(n => s"s.$n").mkString(", ")
val cond = pkNames.map(n => s"t.$n=s.$n").mkString(" and ")
s") s on ($cond) when matched then update set $updateCols when not matched then insert ($insertCols) values ($insertVals)"
}
}
/** Builder for SELECT statements that can be used to check for the existing of
* primary keys supplied to an INSERT operation. Used by the insertOrUpdate emulation
* on databases that don't support this in a single server-side statement. */
class CheckInsertBuilder(ins: Insert) extends UpsertBuilder(ins) {
override def buildInsert: InsertBuilderResult =
new InsertBuilderResult(table, pkNames.map(n => s"$n=?").mkString(s"select 1 from $tableName where ", " and ", ""), ConstArray.from(pkSyms))
}
/** Builder for UPDATE statements used as part of an insertOrUpdate operation
* on databases that don't support this in a single server-side statement. */
class UpdateInsertBuilder(ins: Insert) extends UpsertBuilder(ins) {
override def buildInsert: InsertBuilderResult =
new InsertBuilderResult(table,
"update " + tableName + " set " + softNames.map(n => s"$n=?").mkString(",") + " where " + pkNames.map(n => s"$n=?").mkString(" and "),
ConstArray.from(softSyms ++ pkSyms))
override def transformMapping(n: Node) = reorderColumns(n, softSyms ++ pkSyms)
}
/** Builder for various DDL statements. */
class TableDDLBuilder(val table: Table[_]) { self =>
protected val tableNode = table.toNode.asInstanceOf[TableExpansion].table.asInstanceOf[TableNode]
protected val columns: Iterable[ColumnDDLBuilder] = table.create_*.map(fs => createColumnDDLBuilder(fs, table))
protected val indexes: Iterable[Index] = table.indexes
protected val foreignKeys: Iterable[ForeignKey] = table.foreignKeys
protected val primaryKeys: Iterable[PrimaryKey] = table.primaryKeys
def buildDDL: DDL = {
if(primaryKeys.size > 1)
throw new SlickException("Table "+tableNode.tableName+" defines multiple primary keys ("
+ primaryKeys.map(_.name).mkString(", ") + ")")
DDL(createPhase1, createPhase2, dropPhase1, dropPhase2)
}
protected def createPhase1 = Iterable(createTable) ++ primaryKeys.map(createPrimaryKey) ++ indexes.map(createIndex)
protected def createPhase2 = foreignKeys.map(createForeignKey)
protected def dropPhase1 = foreignKeys.map(dropForeignKey)
protected def dropPhase2 = primaryKeys.map(dropPrimaryKey) ++ Iterable(dropTable)
protected def createTable: String = {
val b = new StringBuilder append "create table " append quoteTableName(tableNode) append " ("
var first = true
for(c <- columns) {
if(first) first = false else b append ","
c.appendColumn(b)
}
addTableOptions(b)
b append ")"
b.toString
}
protected def addTableOptions(b: StringBuilder) {}
protected def dropTable: String = "drop table "+quoteTableName(tableNode)
protected def createIndex(idx: Index): String = {
val b = new StringBuilder append "create "
if(idx.unique) b append "unique "
b append "index " append quoteIdentifier(idx.name) append " on " append quoteTableName(tableNode) append " ("
addIndexColumnList(idx.on, b, idx.table.tableName)
b append ")"
b.toString
}
protected def createForeignKey(fk: ForeignKey): String = {
val sb = new StringBuilder append "alter table " append quoteTableName(tableNode) append " add "
addForeignKey(fk, sb)
sb.toString
}
protected def addForeignKey(fk: ForeignKey, sb: StringBuilder) {
sb append "constraint " append quoteIdentifier(fk.name) append " foreign key("
addForeignKeyColumnList(fk.linearizedSourceColumns, sb, tableNode.tableName)
sb append ") references " append quoteTableName(fk.targetTable) append "("
addForeignKeyColumnList(fk.linearizedTargetColumnsForOriginalTargetTable, sb, fk.targetTable.tableName)
sb append ") on update " append fk.onUpdate.action
sb append " on delete " append fk.onDelete.action
}
protected def createPrimaryKey(pk: PrimaryKey): String = {
val sb = new StringBuilder append "alter table " append quoteTableName(tableNode) append " add "
addPrimaryKey(pk, sb)
sb.toString
}
protected def addPrimaryKey(pk: PrimaryKey, sb: StringBuilder) {
sb append "constraint " append quoteIdentifier(pk.name) append " primary key("
addPrimaryKeyColumnList(pk.columns, sb, tableNode.tableName)
sb append ")"
}
protected def dropForeignKey(fk: ForeignKey): String =
"alter table " + quoteTableName(tableNode) + " drop constraint " + quoteIdentifier(fk.name)
protected def dropPrimaryKey(pk: PrimaryKey): String =
"alter table " + quoteTableName(tableNode) + " drop constraint " + quoteIdentifier(pk.name)
protected def addIndexColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String) =
addColumnList(columns, sb, requiredTableName, "index")
protected def addForeignKeyColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String) =
addColumnList(columns, sb, requiredTableName, "foreign key constraint")
protected def addPrimaryKeyColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String) =
addColumnList(columns, sb, requiredTableName, "foreign key constraint")
protected def addColumnList(columns: IndexedSeq[Node], sb: StringBuilder, requiredTableName: String, typeInfo: String) {
var first = true
for(c <- columns) c match {
case Select(t: TableNode, field: FieldSymbol) =>
if(first) first = false
else sb append ","
sb append quoteIdentifier(field.name)
if(requiredTableName != t.tableName)
throw new SlickException("All columns in "+typeInfo+" must belong to table "+requiredTableName)
case _ => throw new SlickException("Cannot use column "+c+" in "+typeInfo+" (only named columns are allowed)")
}
}
}
/** Builder for column specifications in DDL statements. */
class ColumnDDLBuilder(column: FieldSymbol) {
protected val JdbcType(jdbcType, isOption) = column.tpe
protected var sqlType: String = null
protected var varying: Boolean = false
protected var size: Option[Int] = None
protected var customSqlType: Boolean = false
protected var notNull = !isOption
protected var autoIncrement = false
protected var primaryKey = false
protected var unique = false
protected var defaultLiteral: String = null
init()
protected def init() {
for(o <- column.options) handleColumnOption(o)
if(sqlType ne null) {
size.foreach(l => sqlType += s"($l)")
customSqlType = true
} else sqlType = jdbcType.sqlTypeName(Some(column))
}
protected def handleColumnOption(o: ColumnOption[_]): Unit = o match {
case SqlProfile.ColumnOption.SqlType(s) => sqlType = s
case RelationalProfile.ColumnOption.Length(s,v) =>
size = Some(s)
varying = v
case SqlProfile.ColumnOption.NotNull => notNull = true
case SqlProfile.ColumnOption.Nullable => notNull = false
case ColumnOption.AutoInc => autoIncrement = true
case ColumnOption.PrimaryKey => primaryKey = true
case ColumnOption.Unique => unique = true
case RelationalProfile.ColumnOption.Default(v) => defaultLiteral = valueToSQLLiteral(v, column.tpe)
}
def appendType(sb: StringBuilder): Unit = sb append sqlType
def appendColumn(sb: StringBuilder) {
sb append quoteIdentifier(column.name) append ' '
appendType(sb)
appendOptions(sb)
}
protected def appendOptions(sb: StringBuilder) {
if(defaultLiteral ne null) sb append " DEFAULT " append defaultLiteral
if(autoIncrement) sb append " GENERATED BY DEFAULT AS IDENTITY(START WITH 1)"
if(notNull) sb append " NOT NULL"
if(primaryKey) sb append " PRIMARY KEY"
if( unique ) sb append " UNIQUE"
}
}
/** Builder for DDL statements for sequences. */
class SequenceDDLBuilder(seq: Sequence[_]) {
def buildDDL: DDL = {
val b = new StringBuilder append "create sequence " append quoteIdentifier(seq.name)
seq._increment.foreach { b append " increment " append _ }
seq._minValue.foreach { b append " minvalue " append _ }
seq._maxValue.foreach { b append " maxvalue " append _ }
seq._start.foreach { b append " start " append _ }
if(seq._cycle) b append " cycle"
DDL(b.toString, "drop sequence " + quoteIdentifier(seq.name))
}
}
}
class InsertBuilderResult(val table: TableNode, val sql: String, val fields: ConstArray[FieldSymbol]) {
def buildInsert(compiledQuery: Node): SQLBuilder.Result =
throw new SlickException("Building Query-based inserts from this InsertBuilderResult is not supported")
}
|
AtkinsChang/slick
|
slick/src/main/scala/slick/jdbc/JdbcStatementBuilderComponent.scala
|
Scala
|
bsd-2-clause
| 33,891 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers.thisLineNumber
import org.scalactic.Prettifier
class ShouldBeAnTypeSpec extends FunSpec with Matchers {
private val prettifier = Prettifier.default
val fileName: String = "ShouldBeAnTypeSpec.scala"
case class Book(title: String)
def wasNotAnInstanceOf(left: Any, right: Class[_]): String =
wasNotAnInstanceOf(left, right.getName)
def wasNotAnInstanceOf(left: Any, className: String): String =
FailureMessages.wasNotAnInstanceOf(prettifier, left, UnquotedString(className), UnquotedString(left.getClass.getName))
def wasAnInstanceOf(left: Any, right: Class[_]) =
FailureMessages.wasAnInstanceOf(prettifier, left, UnquotedString(right.getName))
def wasNotEqualTo(left: Any, right: Any) =
FailureMessages.wasNotEqualTo(prettifier, left, right)
def wasEqualTo(left: Any, right: Any) =
FailureMessages.wasEqualTo(prettifier, left, right)
def didNotEqual(left: Any, right: Any) =
FailureMessages.didNotEqual(prettifier, left, right)
def equaled(left: Any, right: Any) =
FailureMessages.equaled(prettifier, left, right)
val aTaleOfTwoCities = new Book("A Tale of Two Cities")
val aTaleOfThreeCities = new Book("A Tale of Three Cities")
// Checking for a specific size
describe("The be an [Type] syntax") {
it("should do nothing if the LHS is an instance of specified RHS") {
aTaleOfTwoCities should be (an [Book])
aTaleOfTwoCities shouldBe an [Book]
1 should be (an [AnyVal])
1 shouldBe an [AnyVal]
aTaleOfTwoCities should not be an [AnyVal]
}
it("should throw TestFailedException if LHS is not an instance of specified RHS") {
val caught1 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should be (an [String])
}
assert(caught1.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities shouldBe an [String]
}
assert(caught2.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities shouldBe an [AnyVal]
}
assert(caught3.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, "AnyVal")))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should be (an [AnyVal])
}
assert(caught4.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, "AnyVal")))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
}
it("should do nothing if LHS is not an instance of specified RHS, when used with not") {
aTaleOfTwoCities should not be an [String]
aTaleOfTwoCities shouldNot be (an [String])
}
it("should throw TestFailedException LSH is an instance of specified RHS, when used with not") {
val caught1 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should not be an [Book]
}
assert(caught1.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities shouldNot be (an [Book])
}
assert(caught2.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
}
it("should do nothing if LHS true for both specified RHS, when used in a logical-and expression") {
aTaleOfTwoCities should (be (an [Book]) and be (an [Book]))
aTaleOfTwoCities should (be (aTaleOfTwoCities) and be (an [Book]))
aTaleOfTwoCities should (be (an [Book]) and be (aTaleOfTwoCities))
aTaleOfTwoCities should (equal (aTaleOfTwoCities) and be (an [Book]))
aTaleOfTwoCities should (be (an [Book]) and equal (aTaleOfTwoCities))
}
it("should throw TestFailedException if LHS is false for either specified RHS, when used in a logical-and expression") {
val caught1 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [String]) and be (an [Book]))
}
assert(caught1.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [Book]) and be (an [String]))
}
assert(caught2.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book]) + ", but " + wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [String]) and be (an [String]))
}
assert(caught3.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (aTaleOfThreeCities) and be (an [Book]))
}
assert(caught4.message === Some(wasNotEqualTo(aTaleOfTwoCities, aTaleOfThreeCities)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught5 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (aTaleOfTwoCities) and be (an [String]))
}
assert(caught5.message === Some(wasEqualTo(aTaleOfTwoCities, aTaleOfTwoCities) + ", but " + wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught5.failedCodeFileName === Some(fileName))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught6 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (aTaleOfThreeCities) and be (an [String]))
}
assert(caught6.message === Some(wasNotEqualTo(aTaleOfTwoCities, aTaleOfThreeCities)))
assert(caught6.failedCodeFileName === Some(fileName))
assert(caught6.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught7 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [String]) and be (aTaleOfTwoCities))
}
assert(caught7.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught7.failedCodeFileName === Some(fileName))
assert(caught7.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught8 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [Book]) and be (aTaleOfThreeCities))
}
assert(caught8.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book]) + ", but " + wasNotEqualTo(aTaleOfTwoCities, aTaleOfThreeCities)))
assert(caught8.failedCodeFileName === Some(fileName))
assert(caught8.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught9 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [String]) and be (aTaleOfThreeCities))
}
assert(caught9.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught9.failedCodeFileName === Some(fileName))
assert(caught9.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught10 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (equal (aTaleOfThreeCities) and be (an [Book]))
}
assert(caught10.message === Some(didNotEqual(aTaleOfTwoCities, aTaleOfThreeCities)))
assert(caught10.failedCodeFileName === Some(fileName))
assert(caught10.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught11 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (equal (aTaleOfTwoCities) and be (an [String]))
}
assert(caught11.message === Some(equaled(aTaleOfTwoCities, aTaleOfTwoCities) + ", but " + wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught11.failedCodeFileName === Some(fileName))
assert(caught11.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught12 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (equal (aTaleOfThreeCities) and be (an [String]))
}
assert(caught12.message === Some(didNotEqual(aTaleOfTwoCities, aTaleOfThreeCities)))
assert(caught12.failedCodeFileName === Some(fileName))
assert(caught12.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught13 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [String]) and equal (aTaleOfTwoCities))
}
assert(caught13.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught13.failedCodeFileName === Some(fileName))
assert(caught13.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught14 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [Book]) and equal (aTaleOfThreeCities))
}
assert(caught14.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book]) + ", but " + didNotEqual(aTaleOfTwoCities, aTaleOfThreeCities)))
assert(caught14.failedCodeFileName === Some(fileName))
assert(caught14.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught15 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [String]) and equal (aTaleOfThreeCities))
}
assert(caught15.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught15.failedCodeFileName === Some(fileName))
assert(caught15.failedCodeLineNumber === Some(thisLineNumber - 4))
}
it("should do nothing if LHS is true for either specified RHS, when used in a logical-or expression") {
aTaleOfTwoCities should (be (an [Book]) or be (an [Book]))
aTaleOfTwoCities should (be (an [String]) or be (an [Book]))
aTaleOfTwoCities should (be (an [Book]) or be (an [String]))
aTaleOfTwoCities should (be (aTaleOfTwoCities) or be (an [Book]))
aTaleOfTwoCities should (be (aTaleOfThreeCities) or be (an [Book]))
aTaleOfTwoCities should (be (aTaleOfTwoCities) or be (an [String]))
aTaleOfTwoCities should (be (an [Book]) or be (aTaleOfTwoCities))
aTaleOfTwoCities should (be (an [String]) or be (aTaleOfTwoCities))
aTaleOfTwoCities should (be (an [Book]) or be (aTaleOfThreeCities))
aTaleOfTwoCities should (equal (aTaleOfTwoCities) or be (an [Book]))
aTaleOfTwoCities should (equal (aTaleOfThreeCities) or be (an [Book]))
aTaleOfTwoCities should (equal (aTaleOfTwoCities) or be (an [String]))
aTaleOfTwoCities should (be (an [Book]) or equal (aTaleOfTwoCities))
aTaleOfTwoCities should (be (an [String]) or equal (aTaleOfTwoCities))
aTaleOfTwoCities should (be (an [Book]) or equal (aTaleOfThreeCities))
}
it("should throw TestFailedException LHS is false for both specified RHS, when used in a logical-or expression") {
val caught1 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [String]) or be (an [String]))
}
assert(caught1.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String]) + ", and " + wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (aTaleOfThreeCities) or be (an [String]))
}
assert(caught2.message === Some(wasNotEqualTo(aTaleOfTwoCities, aTaleOfThreeCities) + ", and " + wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [String]) or be (aTaleOfThreeCities))
}
assert(caught3.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String]) + ", and " + wasNotEqualTo(aTaleOfTwoCities, aTaleOfThreeCities)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (equal (aTaleOfThreeCities) or be (an [String]))
}
assert(caught4.message === Some(didNotEqual(aTaleOfTwoCities, aTaleOfThreeCities) + ", and " + wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String])))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught5 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (be (an [String]) or equal (aTaleOfThreeCities))
}
assert(caught5.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String]) + ", and " + didNotEqual(aTaleOfTwoCities, aTaleOfThreeCities)))
assert(caught5.failedCodeFileName === Some(fileName))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
}
it("should do nothing if should do nothing if LHS is false for both specified RHS, when used in a logical-and expression with not") {
aTaleOfTwoCities should (not be an [String] and not be an [String])
aTaleOfTwoCities should (not be aTaleOfThreeCities and not be an [String])
aTaleOfTwoCities should (not be an [String] and not be aTaleOfThreeCities)
aTaleOfTwoCities should (not equal aTaleOfThreeCities and not be an [String])
aTaleOfTwoCities should (not be an [String] and not equal aTaleOfThreeCities)
}
it("should throw TestFailedException if LHS true for either specified RHS, when used in a logical-and expression with not") {
val caught1 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be an [Book] and not be an [String])
}
assert(caught1.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be an [String] and not be an [Book])
}
assert(caught2.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String]) + ", but " + wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be an [Book] and not be an [Book])
}
assert(caught3.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be aTaleOfTwoCities and not be an [String])
}
assert(caught4.message === Some(wasEqualTo(aTaleOfTwoCities, aTaleOfTwoCities)))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught5 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be aTaleOfThreeCities and not be an [Book])
}
assert(caught5.message === Some(wasNotEqualTo(aTaleOfTwoCities, aTaleOfThreeCities) + ", but " + wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught5.failedCodeFileName === Some(fileName))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught6 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be aTaleOfTwoCities and not be an [Book])
}
assert(caught6.message === Some(wasEqualTo(aTaleOfTwoCities, aTaleOfTwoCities)))
assert(caught6.failedCodeFileName === Some(fileName))
assert(caught6.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught7 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be an [Book] and not be aTaleOfThreeCities)
}
assert(caught7.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught7.failedCodeFileName === Some(fileName))
assert(caught7.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught8 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be an [String] and not be aTaleOfTwoCities)
}
assert(caught8.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String]) + ", but " + wasEqualTo(aTaleOfTwoCities, aTaleOfTwoCities)))
assert(caught8.failedCodeFileName === Some(fileName))
assert(caught8.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught9 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be an [Book] and not be aTaleOfTwoCities)
}
assert(caught9.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught9.failedCodeFileName === Some(fileName))
assert(caught9.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught10 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not equal aTaleOfTwoCities and not be an [String])
}
assert(caught10.message === Some(equaled(aTaleOfTwoCities, aTaleOfTwoCities)))
assert(caught10.failedCodeFileName === Some(fileName))
assert(caught10.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught11 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not equal aTaleOfThreeCities and not be an [Book])
}
assert(caught11.message === Some(didNotEqual(aTaleOfTwoCities, aTaleOfThreeCities) + ", but " + wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught11.failedCodeFileName === Some(fileName))
assert(caught11.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught12 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not equal aTaleOfTwoCities and not be an [Book])
}
assert(caught12.message === Some(equaled(aTaleOfTwoCities, aTaleOfTwoCities)))
assert(caught12.failedCodeFileName === Some(fileName))
assert(caught12.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught13 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be an [Book] and not equal aTaleOfThreeCities)
}
assert(caught13.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught13.failedCodeFileName === Some(fileName))
assert(caught13.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught14 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be an [String] and not equal aTaleOfTwoCities)
}
assert(caught14.message === Some(wasNotAnInstanceOf(aTaleOfTwoCities, classOf[String]) + ", but " + equaled(aTaleOfTwoCities, aTaleOfTwoCities)))
assert(caught14.failedCodeFileName === Some(fileName))
assert(caught14.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught15 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be an [Book] and not equal aTaleOfTwoCities)
}
assert(caught15.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught15.failedCodeFileName === Some(fileName))
assert(caught15.failedCodeLineNumber === Some(thisLineNumber - 4))
}
it("should do nothing if LHS is false for either specified RHS, when used in a logical-or expression with not") {
aTaleOfTwoCities should (not be an [String] or not be an [String])
aTaleOfTwoCities should (not be an [Book] or not be an [String])
aTaleOfTwoCities should (not be an [String] or not be an [Book])
aTaleOfTwoCities should (not be aTaleOfThreeCities or not be an [String])
aTaleOfTwoCities should (not be aTaleOfTwoCities or not be an [String])
aTaleOfTwoCities should (not be aTaleOfThreeCities or not be an [Book])
aTaleOfTwoCities should (not be an [String] or not be aTaleOfThreeCities)
aTaleOfTwoCities should (not be an [Book] or not be aTaleOfThreeCities)
aTaleOfTwoCities should (not be an [String] or not be aTaleOfTwoCities)
aTaleOfTwoCities should (not equal aTaleOfThreeCities or not be an [String])
aTaleOfTwoCities should (not equal aTaleOfTwoCities or not be an [String])
aTaleOfTwoCities should (not equal aTaleOfThreeCities or not be an [Book])
aTaleOfTwoCities should (not be an [String] or not equal aTaleOfThreeCities)
aTaleOfTwoCities should (not be an [Book] or not equal aTaleOfThreeCities)
aTaleOfTwoCities should (not be an [String] or not equal (aTaleOfTwoCities))
}
it("should throw TestFailedException if LHS is true both specified RHS, when used in a logical-or expression with not") {
val caught1 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be a [Book] or not be a [Book])
}
assert(caught1.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book]) + ", and " + wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught1.failedCodeFileName === Some(fileName))
assert(caught1.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught2 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be aTaleOfTwoCities or not be a [Book])
}
assert(caught2.message === Some(wasEqualTo(aTaleOfTwoCities, aTaleOfTwoCities) + ", and " + wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught2.failedCodeFileName === Some(fileName))
assert(caught2.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught3 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be a [Book] or not be aTaleOfTwoCities)
}
assert(caught3.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book]) + ", and " + wasEqualTo(aTaleOfTwoCities, aTaleOfTwoCities)))
assert(caught3.failedCodeFileName === Some(fileName))
assert(caught3.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught4 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not equal aTaleOfTwoCities or not be a [Book])
}
assert(caught4.message === Some(equaled(aTaleOfTwoCities, aTaleOfTwoCities) + ", and " + wasAnInstanceOf(aTaleOfTwoCities, classOf[Book])))
assert(caught4.failedCodeFileName === Some(fileName))
assert(caught4.failedCodeLineNumber === Some(thisLineNumber - 4))
val caught5 = intercept[exceptions.TestFailedException] {
aTaleOfTwoCities should (not be a [Book] or not equal aTaleOfTwoCities)
}
assert(caught5.message === Some(wasAnInstanceOf(aTaleOfTwoCities, classOf[Book]) + ", and " + equaled(aTaleOfTwoCities, aTaleOfTwoCities)))
assert(caught5.failedCodeFileName === Some(fileName))
assert(caught5.failedCodeLineNumber === Some(thisLineNumber - 4))
}
// TODO: to uncomment these tests after TypeMatcherMacro raises compiler error instead of warning
it("should do nothing if the LHS is an instance of specified RHS with _ type parameter") {
List(Book("Book 1"), Book("Book 2")) should be (an [List[_]])
List(Book("Book 1"), Book("Book 2")) shouldBe an [List[_]]
}
/*it("should not compile when LHS is an instance of specified RHS with type parameter ") {
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should be (an [List[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) shouldBe an [List[Book]]" shouldNot compile
}*/
it("should do nothing if LHS is not an instance of specified RHS with _ type parameter") {
Book("Book 1") should not be an [List[_]]
Book("Book 1") shouldNot be (an [List[_]])
}
/*it("should not compile when LHS is not an instance of specified RHS with type parameter") {
"Book(\\"Book 1\\") should not be an [List[Book]]" shouldNot compile
"Book(\\"Book 1\\") shouldNot be (an [List[Book]])" shouldNot compile
}*/
it("should do nothing if LHS true for both specified RHS with _ type parameter, when used in a logical-and expression") {
List(Book("Book 1"), Book("Book 2")) should (be (an [List[_]]) and be (an [List[_]]))
List(Book("Book 1"), Book("Book 2")) should (be (List(Book("Book 1"), Book("Book 2"))) and be (an [List[_]]))
List(Book("Book 1"), Book("Book 2")) should (be (an [List[_]]) and be (List(Book("Book 1"), Book("Book 2"))))
List(Book("Book 1"), Book("Book 2")) should (equal (List(Book("Book 1"), Book("Book 2"))) and be (an [List[_]]))
List(Book("Book 1"), Book("Book 2")) should (be (an [List[_]]) and equal (List(Book("Book 1"), Book("Book 2"))))
}
/*it("should not compile when LHS is true for both specified RHS with type parameter") {
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (an [List[Book]]) and be (an [List[Book]]))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (List(Book(\\"Book 1\\"), Book(\\"Book 2\\"))) and be (an [List[Book]]))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (an [List[Book]]) and be (List(Book(\\"Book 1\\"), Book(\\"Book 2\\"))))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (equal (List(Book(\\"Book 1\\"), Book(\\"Book 2\\"))) and be (an [List[Book]]))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (an [List[Book]]) and equal (List(Book(\\"Book 1\\"), Book(\\"Book 2\\"))))" shouldNot compile
}*/
it("should do nothing if LHS is true for either specified RHS with _ type parameter, when used in a logical-or expression") {
List(Book("Book 1"), Book("Book 2")) should (be (an [List[_]]) or be (an [List[_]]))
List(Book("Book 1"), Book("Book 2")) should (be (an [String]) or be (an [List[_]]))
List(Book("Book 1"), Book("Book 2")) should (be (an [List[_]]) or be (an [String]))
List(Book("Book 1"), Book("Book 2")) should (be (List(Book("Book 1"), Book("Book 2"))) or be (an [List[_]]))
List(Book("Book 1"), Book("Book 2")) should (be (aTaleOfThreeCities) or be (an [List[_]]))
List(Book("Book 1"), Book("Book 2")) should (be (an [List[_]]) or be (List(Book("Book 1"), Book("Book 2"))))
List(Book("Book 1"), Book("Book 2")) should (be (an [List[_]]) or be (aTaleOfThreeCities))
List(Book("Book 1"), Book("Book 2")) should (equal (List(Book("Book 1"), Book("Book 2"))) or be (an [List[_]]))
List(Book("Book 1"), Book("Book 2")) should (equal (aTaleOfThreeCities) or be (an [List[_]]))
List(Book("Book 1"), Book("Book 2")) should (be (an [List[_]]) or equal (List(Book("Book 1"), Book("Book 2"))))
List(Book("Book 1"), Book("Book 2")) should (be (an [List[_]]) or equal (aTaleOfThreeCities))
}
/*it("should not compile if LHS is true for either specified RHS with type parameter, when used in a logical-or expression") {
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (an [List[Book]]) or be (an [List[Book]]))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (an [String]) or be (an [List[Book]]))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (an [List[Book]]) or be (an [String]))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (List(Book(\\"Book 1\\"), Book(\\"Book 2\\"))) or be (an [List[Book]]))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (aTaleOfThreeCities) or be (an [List[Book]]))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (an [List[Book]]) or be (List(Book(\\"Book 1\\"), Book(\\"Book 2\\"))))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (an [List[Book]]) or be (aTaleOfThreeCities))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (equal (List(Book(\\"Book 1\\"), Book(\\"Book 2\\"))) or be (an [List[Book]]))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (equal (aTaleOfThreeCities) or be (an [List[Book]]))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (an [List[Book]]) or equal (List(Book(\\"Book 1\\"), Book(\\"Book 2\\"))))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (be (an [List[Book]]) or equal (aTaleOfThreeCities))" shouldNot compile
}*/
it("should do nothing if LHS is false for both specified RHS with _ type parameter, when used in a logical-and expression with not") {
List(Book("Book 1"), Book("Book 2")) should (not be an [Vector[_]] and not be an [Vector[_]])
List(Book("Book 1"), Book("Book 2")) should (not be aTaleOfThreeCities and not be an [Vector[_]])
List(Book("Book 1"), Book("Book 2")) should (not be an [Vector[_]] and not be aTaleOfThreeCities)
List(Book("Book 1"), Book("Book 2")) should (not equal aTaleOfThreeCities and not be an [Vector[_]])
List(Book("Book 1"), Book("Book 2")) should (not be an [Vector[_]] and not equal aTaleOfThreeCities)
}
/*it("should not compile if LHS is false for both specified RHS with type parameter, when used in a logical-and expression with not ") {
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [Vector[Book]] and not be an [Vector[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be aTaleOfThreeCities and not be an [Vector[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [Vector[Book]] and not be aTaleOfThreeCities)" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not equal aTaleOfThreeCities and not be an [Vector[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [Vector[Book]] and not equal aTaleOfThreeCities)" shouldNot compile
}*/
it("should do nothing if LHS is false for either specified RHS with _ type parameter, when used in a logical-or expression with not") {
List(Book("Book 1"), Book("Book 2")) should (not be an [Vector[_]] or not be an [Vector[_]])
List(Book("Book 1"), Book("Book 2")) should (not be an [List[_]] or not be an [Vector[_]])
List(Book("Book 1"), Book("Book 2")) should (not be an [Vector[_]] or not be an [List[_]])
List(Book("Book 1"), Book("Book 2")) should (not be aTaleOfThreeCities or not be an [Vector[_]])
List(Book("Book 1"), Book("Book 2")) should (not be List(Book("Book 1"), Book("Book 2")) or not be an [Vector[_]])
List(Book("Book 1"), Book("Book 2")) should (not be aTaleOfThreeCities or not be an [List[_]])
List(Book("Book 1"), Book("Book 2")) should (not be an [Vector[_]] or not be aTaleOfThreeCities)
List(Book("Book 1"), Book("Book 2")) should (not be an [List[_]] or not be aTaleOfThreeCities)
List(Book("Book 1"), Book("Book 2")) should (not be an [Vector[_]] or not be List(Book("Book 1"), Book("Book 2")))
List(Book("Book 1"), Book("Book 2")) should (not equal aTaleOfThreeCities or not be an [Vector[_]])
List(Book("Book 1"), Book("Book 2")) should (not equal List(Book("Book 1"), Book("Book 2")) or not be an [Vector[_]])
List(Book("Book 1"), Book("Book 2")) should (not equal aTaleOfThreeCities or not be an [List[_]])
List(Book("Book 1"), Book("Book 2")) should (not be an [Vector[_]] or not equal aTaleOfThreeCities)
List(Book("Book 1"), Book("Book 2")) should (not be an [List[_]] or not equal aTaleOfThreeCities)
List(Book("Book 1"), Book("Book 2")) should (not be an [Vector[_]] or not equal (List(Book("Book 1"), Book("Book 2"))))
}
/*it("should not compile if LHS is false for either specified RHS with type parameter, when used in a logical-or expression with not") {
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [Vector[Book]] or not be an [Vector[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [List[Book]] or not be an [Vector[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [Vector[Book]] or not be an [List[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be aTaleOfThreeCities or not be an [Vector[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) or not be an [Vector[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be aTaleOfThreeCities or not be an [List[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [Vector[Book]] or not be aTaleOfThreeCities)" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [List[Book]] or not be aTaleOfThreeCities)" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [Vector[Book]] or not be List(Book(\\"Book 1\\"), Book(\\"Book 2\\")))" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not equal aTaleOfThreeCities or not be an [Vector[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not equal List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) or not be an [Vector[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not equal aTaleOfThreeCities or not be an [List[Book]])" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [Vector[Book]] or not equal aTaleOfThreeCities)" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [List[Book]] or not equal aTaleOfThreeCities)" shouldNot compile
"List(Book(\\"Book 1\\"), Book(\\"Book 2\\")) should (not be an [Vector[Book]] or not equal (List(Book(\\"Book 1\\"), Book(\\"Book 2\\"))))" shouldNot compile
}*/
}
}
|
dotty-staging/scalatest
|
scalatest-test/src/test/scala/org/scalatest/ShouldBeAnTypeSpec.scala
|
Scala
|
apache-2.0
| 36,191 |
package dotty.tools
package dotc
package typer
import core._
import ast._
import Contexts._, Types._, Flags._, Denotations._, Names._, StdNames._, NameOps._, Symbols._
import Trees._
import Constants._
import Scopes._
import ProtoTypes._
import annotation.unchecked
import util.Positions._
import util.{Stats, SimpleMap}
import util.common._
import Decorators._
import Uniques._
import ErrorReporting.{errorType, DiagnosticString}
import config.Printers._
import collection.mutable
trait Inferencing { this: Checking =>
import tpd._
/** Is type fully defined, meaning the type does not contain wildcard types
* or uninstantiated type variables. As a side effect, this will minimize
* any uninstantiated type variables, according to the given force degree,
* but only if the overall result of `isFullyDefined` is `true`.
* Variables that are successfully minimized do not count as uninstantiated.
*/
def isFullyDefined(tp: Type, force: ForceDegree.Value)(implicit ctx: Context): Boolean = {
val nestedCtx = ctx.fresh.setNewTyperState
val result = new IsFullyDefinedAccumulator(force)(nestedCtx).process(tp)
if (result) nestedCtx.typerState.commit()
result
}
/** The fully defined type, where all type variables are forced.
* Throws an error if type contains wildcards.
*/
def fullyDefinedType(tp: Type, what: String, pos: Position)(implicit ctx: Context) =
if (isFullyDefined(tp, ForceDegree.all)) tp
else throw new Error(i"internal error: type of $what $tp is not fully defined, pos = $pos") // !!! DEBUG
/** The accumulator which forces type variables using the policy encoded in `force`
* and returns whether the type is fully defined. Two phases:
* 1st Phase: Try to stantiate covariant and non-variant type variables to
* their lower bound. Record whether succesful.
* 2nd Phase: If first phase was succesful, instantiate all remaining type variables
* to their upper bound.
*/
private class IsFullyDefinedAccumulator(force: ForceDegree.Value)(implicit ctx: Context) extends TypeAccumulator[Boolean] {
private def instantiate(tvar: TypeVar, fromBelow: Boolean): Type = {
val inst = tvar.instantiate(fromBelow)
typr.println(i"forced instantiation of ${tvar.origin} = $inst")
inst
}
private var toMaximize: Boolean = false
def apply(x: Boolean, tp: Type): Boolean = tp.dealias match {
case _: WildcardType | _: ProtoType =>
false
case tvar: TypeVar if !tvar.isInstantiated =>
if (force == ForceDegree.none) false
else {
val minimize =
variance >= 0 && !(
force == ForceDegree.noBottom &&
isBottomType(ctx.typeComparer.approximation(tvar.origin, fromBelow = true)))
if (minimize) instantiate(tvar, fromBelow = true)
else toMaximize = true
foldOver(x, tvar)
}
case tp =>
foldOver(x, tp)
}
private class UpperInstantiator(implicit ctx: Context) extends TypeAccumulator[Unit] {
def apply(x: Unit, tp: Type): Unit = {
tp match {
case tvar: TypeVar if !tvar.isInstantiated =>
instantiate(tvar, fromBelow = false)
case _ =>
}
foldOver(x, tp)
}
}
def process(tp: Type): Boolean = {
val res = apply(true, tp)
if (res && toMaximize) new UpperInstantiator().apply((), tp)
res
}
}
def isBottomType(tp: Type)(implicit ctx: Context) =
tp == defn.NothingType || tp == defn.NullType
/** Recursively widen and also follow type declarations and type aliases. */
def widenForMatchSelector(tp: Type)(implicit ctx: Context): Type = tp.widen match {
case tp: TypeRef if !tp.symbol.isClass => widenForMatchSelector(tp.info.bounds.hi)
case tp: AnnotatedType => tp.derivedAnnotatedType(tp.annot, widenForMatchSelector(tp.tpe))
case tp => tp
}
/** Following type aliases and stripping refinements and annotations, if one arrives at a
* class type reference where the class has a companion module, a reference to
* that companion module. Otherwise NoType
*/
def companionRef(tp: Type)(implicit ctx: Context): Type =
tp.underlyingClassRef(refinementOK = true) match {
case tp: TypeRef =>
val companion = tp.classSymbol.companionModule
if (companion.exists)
companion.valRef.asSeenFrom(tp.prefix, companion.symbol.owner)
else NoType
case _ => NoType
}
/** Ensure that the first type in a list of parent types Ps points to a non-trait class.
* If that's not already the case, add one. The added class type CT is determined as follows.
* First, let C be the unique class such that
* - there is a parent P_i such that P_i derives from C, and
* - for every class D: If some parent P_j, j <= i derives from D, then C derives from D.
* Then, let CT be the smallest type which
* - has C as its class symbol, and
* - for all parents P_i: If P_i derives from C then P_i <:< CT.
*/
def ensureFirstIsClass(parents: List[Type])(implicit ctx: Context): List[Type] = {
def realClassParent(cls: Symbol): ClassSymbol =
if (!cls.isClass) defn.ObjectClass
else if (!(cls is Trait)) cls.asClass
else cls.asClass.classParents match {
case parentRef :: _ => realClassParent(parentRef.symbol)
case nil => defn.ObjectClass
}
def improve(candidate: ClassSymbol, parent: Type): ClassSymbol = {
val pcls = realClassParent(parent.classSymbol)
if (pcls derivesFrom candidate) pcls else candidate
}
parents match {
case p :: _ if p.classSymbol.isRealClass => parents
case _ =>
val pcls = (defn.ObjectClass /: parents)(improve)
typr.println(i"ensure first is class $parents%, % --> ${parents map (_ baseTypeWithArgs pcls)}%, %")
val ptype = ctx.typeComparer.glb(
defn.ObjectType :: (parents map (_ baseTypeWithArgs pcls)))
ptype :: parents
}
}
/** Ensure that first parent tree refers to a real class. */
def ensureFirstIsClass(parents: List[Tree], pos: Position)(implicit ctx: Context): List[Tree] = parents match {
case p :: ps if p.tpe.classSymbol.isRealClass => parents
case _ =>
// add synthetic class type
val first :: _ = ensureFirstIsClass(parents.tpes)
TypeTree(checkFeasible(first, pos, d"\\n in inferred parent $first")).withPos(pos) :: parents
}
/** Interpolate those undetermined type variables in the widened type of this tree
* which are introduced by type application contained in the tree.
* If such a variable appears covariantly in type `tp` or does not appear at all,
* approximate it by its lower bound. Otherwise, if it appears contravariantly
* in type `tp` approximate it by its upper bound.
*/
def interpolateUndetVars(tree: Tree)(implicit ctx: Context): Unit = {
val constraint = ctx.typerState.constraint
val qualifies = (tvar: TypeVar) => tree contains tvar.owningTree
def interpolate() = Stats.track("interpolateUndetVars") {
val tp = tree.tpe.widen
constr.println(s"interpolate undet vars in ${tp.show}, pos = ${tree.pos}, mode = ${ctx.mode}, undets = ${constraint.uninstVars map (tvar => s"${tvar.show}@${tvar.owningTree.pos}")}")
constr.println(s"qualifying undet vars: ${constraint.uninstVars filter qualifies map (tvar => s"$tvar / ${tvar.show}")}, constraint: ${constraint.show}")
val vs = tp.variances(qualifies)
var changed = false
vs foreachBinding { (tvar, v) =>
if (v != 0) {
typr.println(s"interpolate ${if (v == 1) "co" else "contra"}variant ${tvar.show} in ${tp.show}")
tvar.instantiate(fromBelow = v == 1)
changed = true
}
}
if (changed) // instantiations might have uncovered new typevars to interpolate
interpolateUndetVars(tree)
else
for (tvar <- constraint.uninstVars)
if (!(vs contains tvar) && qualifies(tvar)) {
typr.println(s"instantiating non-occurring ${tvar.show} in ${tp.show}")
tvar.instantiate(fromBelow = true)
}
}
if (constraint.uninstVars exists qualifies) interpolate()
}
/** Instantiate undetermined type variables to that type `tp` is
* maximized and return None. If this is not possible, because a non-variant
* typevar is not uniquely determined, return that typevar in a Some.
*/
def maximizeType(tp: Type)(implicit ctx: Context): Option[TypeVar] = Stats.track("maximizeType") {
val vs = tp.variances(alwaysTrue)
var result: Option[TypeVar] = None
vs foreachBinding { (tvar, v) =>
if (v == 1) tvar.instantiate(fromBelow = false)
else if (v == -1) tvar.instantiate(fromBelow = true)
else {
val bounds = ctx.typerState.constraint.fullBounds(tvar.origin)
if (!(bounds.hi <:< bounds.lo)) result = Some(tvar)
tvar.instantiate(fromBelow = false)
}
}
result
}
}
/** An enumeration controlling the degree of forcing in "is-dully-defined" checks. */
object ForceDegree extends Enumeration {
val none, // don't force type variables
noBottom, // force type variables, fail if forced to Nothing or Null
all = Value // force type variables, don't fail
}
|
AlexSikia/dotty
|
src/dotty/tools/dotc/typer/Inferencing.scala
|
Scala
|
bsd-3-clause
| 9,351 |
package se.lu.nateko.cp.meta.onto
import org.semanticweb.owlapi.model.OWLDataFactory
import org.semanticweb.owlapi.apibinding.OWLManager
import org.semanticweb.owlapi.util.DefaultPrefixManager
import org.semanticweb.owlapi.model.PrefixManager
import org.semanticweb.owlapi.model.OWLAnnotationProperty
import org.semanticweb.owlapi.model.IRI
object Vocab {
val ontoIri: IRI = IRI.create("http://meta.icos-cp.eu/ontologies/uiannotations/")
private val factory: OWLDataFactory =
OWLManager.createOWLOntologyManager.getOWLDataFactory
private val prefixManager: PrefixManager =
new DefaultPrefixManager(null, null, ontoIri.toString)
def getAnnotationProperty(localName: String): OWLAnnotationProperty =
factory.getOWLAnnotationProperty(localName, prefixManager)
val exposedToUsersAnno: OWLAnnotationProperty = getAnnotationProperty("isExposedToUsers")
val newInstanceBaseUriAnno: OWLAnnotationProperty = getAnnotationProperty("newInstanceBaseUri")
val displayPropAnno: OWLAnnotationProperty = getAnnotationProperty("displayProperty")
val displayPropAnnos: IndexedSeq[OWLAnnotationProperty] =
(1 to 5).map(i => getAnnotationProperty(s"displayProperty$i"))
}
|
ICOS-Carbon-Portal/meta
|
src/main/scala/se/lu/nateko/cp/meta/onto/Vocab.scala
|
Scala
|
gpl-3.0
| 1,174 |
/**
* Licensed to the Minutemen Group under one or more contributor license
* agreements. See the COPYRIGHT file distributed with this work for
* additional information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package silhouette.http.transport
import org.specs2.mutable.Specification
import org.specs2.specification.Scope
import silhouette.http._
/**
* Test case for the [[CookieTransport]] class.
*/
class CookieTransportSpec extends Specification {
"The `copy` method" should {
"allow to override the config" in new Context {
transport.copy(transport.config.copy("not-name")).config.name must be equalTo "not-name"
}
}
"The `retrieve` method" should {
"return some payload from the cookie with the given name" in new Context {
transport.retrieve(requestPipeline.withCookies(Cookie("test", "payload"))) must beSome("payload")
}
"return None if no cookie with the give name exists" in new Context {
transport.retrieve(requestPipeline) must beNone
}
}
"The `smuggle` method" should {
"smuggle a cookie into the request" in new Context {
transport.smuggle("payload", requestPipeline).cookie("test") must beSome.like {
case cookie =>
cookie.value must be equalTo "payload"
}
}
}
"The `embed` method" should {
"embed a cookie into the response" in new Context {
transport.embed("payload", responsePipeline).cookie("test") must beSome.like {
case cookie =>
cookie.value must be equalTo "payload"
}
}
}
"The `discard` method" should {
"discard a cookie" in new Context {
transport.discard(responsePipeline).cookie("test") must beSome.like {
case cookie =>
cookie.value must be equalTo ""
cookie.maxAge must beSome(-86400)
}
}
}
"The `RetrieveFromCookie` reads" should {
"read some payload from a cookie stored in the request" in new Context {
RetrieveFromCookie("test").read(
requestPipeline.withCookies(Cookie("test", "payload"))
) must beSome("payload")
}
"return None if no cookie with the give name exists" in new Context {
RetrieveFromCookie("noz-existing").read(requestPipeline) must beNone
}
}
"The `SmuggleIntoCookie` writes" should {
"smuggle a cookie into the request" in new Context {
SmuggleIntoCookie(config)
.write(("payload", requestPipeline))
.cookie("test") must beSome.like {
case cookie =>
cookie.value must be equalTo "payload"
}
}
}
"The `EmbedIntoCookie` writes" should {
"embed a cookie into the response" in new Context {
EmbedIntoCookie(config)
.write(("payload", responsePipeline))
.cookie("test") must beSome.like {
case cookie =>
cookie.value must be equalTo "payload"
}
}
}
"The `DiscardFromCookie` writes" should {
"discard a cookie" in new Context {
DiscardFromCookie(config)
.write(responsePipeline)
.cookie("test") must beSome.like {
case cookie =>
cookie.value must be equalTo ""
cookie.maxAge must beSome(-86400)
}
}
}
/**
* The context.
*/
trait Context extends Scope {
/**
* The cookie transport config.
*/
val config = CookieTransportConfig(name = "test")
/**
* The cookie transport to test.
*/
val transport = CookieTransport(config)
/**
* A request pipeline.
*/
lazy val requestPipeline = Fake.request
/**
* A response pipeline.
*/
lazy val responsePipeline = Fake.response
}
}
|
mohiva/silhouette
|
modules/http/src/test/scala/silhouette/http/transport/CookieTransportSpec.scala
|
Scala
|
apache-2.0
| 4,207 |
/*
* Copyright (C) 2014 GRNET S.A.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gr.grnet.egi.vmcatcher
import java.io.{File, IOException}
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import com.beust.jcommander.ParameterException
import gr.grnet.egi.vmcatcher.cmdline.CmdLine._
import gr.grnet.egi.vmcatcher.cmdline._
import gr.grnet.egi.vmcatcher.config.{Config, RabbitMQConfig}
import gr.grnet.egi.vmcatcher.event._
import gr.grnet.egi.vmcatcher.image.handler.HandlerData
import gr.grnet.egi.vmcatcher.image.transformer.ImageTransformers
import gr.grnet.egi.vmcatcher.queue.{QueueConnectAttempt, QueueConnectFailedAttempt, QueueConnectFirstAttempt}
import gr.grnet.egi.vmcatcher.rabbit.{Rabbit, RabbitConnector}
import gr.grnet.egi.vmcatcher.util.{GetImage, UsernamePassword}
import org.apache.avro.io.DecoderFactory
import org.apache.avro.specific.SpecificDatumReader
import org.slf4j.LoggerFactory
import scala.annotation.tailrec
/**
*
*/
object Main extends {
val t0 = System.currentTimeMillis()
var _args = Array[String]()
lazy val argsDebugStr = _args.mkString(" ")
final val ProgName = getClass.getName.stripSuffix("$")
final val Log = LoggerFactory.getLogger(getClass)
lazy val vmcatcher: VMCatcher = new StdVMCatcher(config)
lazy val iaas: IaaS = new KamakiBasedIaaS(config.getIaasConfig)
def beginSequence(args: Array[String]): Unit = {
_args = args
Log.info("=" * 30)
Log.info(s"BEGIN snf-vmcatcher ($t0) [$argsDebugStr]")
}
def endSequence(): Unit = {
val t1 = System.currentTimeMillis()
val dtms = t1 - t0
Log.info(s"END snf-vmcatcher ($dtms ms) [$argsDebugStr]")
Log.info("=" * 30)
}
def ERROR(s: String): Unit = {
System.err.println(s)
Log.error(s)
}
def INFO(s: String): Unit = {
System.err.println(s)
Log.info(s)
}
def EXIT(status: Int, alsoDo: () ⇒ Any = () ⇒ ()): Nothing = {
Log.warn(s"Exiting with status $status")
alsoDo()
sys.exit(status)
}
def mkcmd[A <: AnyRef](c: A, f: (A) ⇒ Unit): (String, () ⇒ Unit) = {
val name = nameOf(c)
val command = () ⇒ f(c)
name → command
}
val commandMap = Map(
mkcmd(CmdLine.usage, do_usage),
// Debugging
mkcmd(CmdLine.showEnv, do_show_env),
mkcmd(CmdLine.showConf, do_show_conf),
// Queues
mkcmd(CmdLine.enqueueFromEnv, do_enqueue_from_env),
// mkcmd(CmdLine.enqueueFromImageList, do_enqueue_from_image_list),
mkcmd(CmdLine.dequeue, do_dequeue),
mkcmd(CmdLine.drainQueue, do_drain_queue),
mkcmd(CmdLine.testQueue, do_test_queue),
// Image lists
mkcmd(CmdLine.registerImageList, do_register_image_list), /*(N)*/
mkcmd(CmdLine.activateImageList, do_activate_image_list), /*(N)*/
mkcmd(CmdLine.deactivateImageList, do_deactivate_image_list), /*(N)*/
mkcmd(CmdLine.updateCredentials, do_update_credentials), /*(N)*/
mkcmd(CmdLine.fetchImageList, do_fetch_image_list), /*(N)*/
mkcmd(CmdLine.listImageList, do_list_image_list), /*(N)*/
// mkcmd(CmdLine.parseImageList, do_parse_image_list),
// mkcmd(CmdLine.getImageList, do_get_image_list),
// Images
mkcmd(CmdLine.listRegisteredImages, do_list_registered_images), /*(N)*/
// mkcmd(CmdLine.checkImage, do_check_image), /*(N)*/
// mkcmd(CmdLine.registerImage, do_register_image), /*(N)*/
mkcmd(CmdLine.registerImageNow, do_register_now),
mkcmd(CmdLine.transform, do_transform)
)
def env: Map[String, String] = sys.env
def envAsJson = Json.jsonOfMap(env)
def envAsPrettyJson: String = Json.jsonOfMap(env)
def isVerbose = CmdLine.globalOptions.verbose
def isHelp = CmdLine.globalOptions.help
def isServer = CmdLine.dequeue.server
def serverSleepMillis = CmdLine.dequeue.sleepMillis max 0L min 1000L
def dequeueHandler = CmdLine.dequeue.handler
def do_usage(args: Usage): Unit = jc.usage()
def do_show_env(args: ShowEnv): Unit = println(envAsPrettyJson)
def do_show_conf(args: ShowConf): Unit = println(config.toString)
def do_enqueue(connector: RabbitConnector): Unit = {
val event = ImageEvent.ofSysEnvFields
Log.info(s"event (sysenv) = $event")
val json = event.envFieldsView.json
val rabbit = connector.connect()
rabbit.publish(json)
rabbit.close()
}
def do_enqueue_from_env(args: EnqueueFromEnv): Unit = {
val connector = RabbitConnector(config.getRabbitConfig)
do_enqueue(connector)
}
def do_register_image_list(args: RegisterImageList): Unit = {
val upOpt = UsernamePassword.optional(args.username, args.password)
val ref = vmcatcher.registerImageList(args.name, args.url, args.isActive, upOpt)
INFO(s"Registered $ref")
}
def do_activate_image_list(args: ActivateImageList): Unit = {
val wasActive = vmcatcher.activateImageList(args.name)
if(wasActive) { INFO(s"Already active") }
else { INFO(s"Activated") }
}
def do_deactivate_image_list(args: DeactivateImageList): Unit = {
val wasActive = vmcatcher.deactivateImageList(args.name)
if(wasActive) { INFO(s"Deactivated") }
else { INFO(s"Already deactive") }
}
def do_update_credentials(args: UpdateCredentials): Unit = {
val upOpt = UsernamePassword.optional(args.username, args.password)
vmcatcher.updateCredentials(args.name, upOpt)
if(upOpt.isDefined) { INFO(s"Credentials have been set") }
else { INFO(s"Credentials have been cleared") }
}
def do_fetch_image_list(args: FetchImageList): Unit = {
val (ref, currentImages) = vmcatcher.fetchImageList(args.name)
INFO(s"Fetched image list $ref, parsed ${currentImages.size} images")
for {
currentImage ← currentImages
image ← currentImage.f_image.obj
} {
INFO(s"Parsed image $image")
}
}
def do_list_image_list(args: ListImageList): Unit = {
val images = vmcatcher.listImageList(args.name)
for {
image ← images
id = image.id.get
adMpuri = image.adMpuri.get
hvUri = image.hvUri.get
dcIdentifier = image.dcIdentifier.get
} {
INFO(s"$id $dcIdentifier $hvUri")
}
}
def do_list_registered_images(args: ListRegisteredImages): Unit = {
val all = iaas.listRegisteredImages()
for {
image ← all
} {
INFO(s"$image")
}
}
// def do_parse_image_list(args: ParseImageList): Unit = {
// val imageListContainerURL = args.imageListUrl
// val token = args.token
// val rawImageListContainer = Sys.downloadRawImageList(imageListContainerURL, Option(token))
// Log.info (s"imageListContainer (URL ) = $imageListContainerURL")
// Log.debug(s"imageListContainer (raw ) =\\n$rawImageListContainer")
// val imageListContainerJson = parseImageListContainerJson(rawImageListContainer)
// Log.info (s"imageListContainer (json) =\\n$imageListContainerJson")
//
// val events0 = Events.ofImageListJson(imageListContainerJson, Map())
// INFO(s"Found ${events0.size} events")
// if(events0.isEmpty) { return }
//
// // Sort by size ascending and print basic info
// val sortedEvents =
// try events0.sortBy(_(ImageEventField.VMCATCHER_EVENT_HV_SIZE).toLong)
// catch {
// case e: Exception ⇒
// Log.warn(s"Could not sort events", e)
// events0
// }
//
// val miniKeys = Seq(
// ImageEventField.VMCATCHER_EVENT_DC_IDENTIFIER,
// ImageEventField.VMCATCHER_EVENT_AD_MPURI,
// ImageEventField.VMCATCHER_EVENT_HV_URI,
// ImageEventField.VMCATCHER_EVENT_HV_SIZE)
//
// for(event ← sortedEvents) {
// val miniMap = Map((for(key ← miniKeys) yield (key, event(key))):_*)
// val miniInfo = miniMap.mkString("[", ", ", "]")
// INFO(s"Found: $miniInfo")
// }
// }
// def do_get_image_list(args: GetImageList): Unit = {
// val url = args.url
// val token = args.token
// val rawImageListContainer = Sys.downloadRawImageList(url, Option(token))
// Log.info (s"imageListContainer (URL ) = $url")
// val imageListContainerJson = parseImageListContainerJson(rawImageListContainer)
// INFO(s"imageListContainer (json) =\\n$imageListContainerJson")
// }
// def do_enqueue_from_image_list(args: EnqueueFromImageList): Unit = {
// val imageListURL = args.imageListUrl
// val imageIdentifier = args.imageIdentifier
// val tokenOpt = Option(args.token)
//
// val rawText = Sys.downloadRawImageList(imageListURL, tokenOpt)
// Log.info(s"imageList (URL) = $imageListURL")
// Log.info(s"imageList (raw) = $rawText")
// val jsonImageList = parseImageListContainerJson(rawText)
// val events0 = Events.ofImageListJson(
// jsonImageList,
// //Map(ExternalEventField.VMCATCHER_X_EVENT_IMAGE_LIST_URL → imageListURL.toString)
// Map()
// )
//
// events0 match {
// case Nil ⇒
// val errMsg = s"Could not parse events from image list"
// ERROR(errMsg)
// EXIT(4)
//
// case event :: _ ⇒
// val dcIdentifier = event(ImageListEventField.VMCATCHER_EVENT_IL_DC_IDENTIFIER)
// val parsedMsg = s"Parsed image list dc:identifier = $dcIdentifier"
// INFO(parsedMsg)
// val events =
// if(imageIdentifier eq null)
// events0
// else
// events0.filter(_(ImageEventField.VMCATCHER_EVENT_DC_IDENTIFIER) == imageIdentifier)
//
// if(events.isEmpty) {
// val errMsg = s"Image identifier $imageIdentifier not found"
// ERROR(errMsg)
// val identifiers = events0.map(_(ImageEventField.VMCATCHER_EVENT_DC_IDENTIFIER))
// val availableMsg = s"Available identifiers are: ${identifiers.mkString(", ")}"
// INFO(availableMsg)
// EXIT(3)
// }
//
// val matchMsg = s"Matched ${events.size} event(s)"
// INFO(matchMsg)
//
// val connector = RabbitConnector(config.getRabbitConfig)
// val rabbit = connector.connect()
//
// for {
// event ← events
// } {
// val imageIdent = event(ImageEventField.VMCATCHER_EVENT_DC_IDENTIFIER)
// val image_HV_URI = event(ImageEventField.VMCATCHER_EVENT_HV_URI)
// val image_AD_MPURI = event(ImageEventField.VMCATCHER_EVENT_AD_MPURI)
// val eventMsg = s"Enqueueing event for dc:identifier = $imageIdent, hv:uri = $image_HV_URI, ad:mpuri = $image_AD_MPURI"
// INFO(eventMsg)
// Log.info(s"event (image) = $event")
//
// rabbit.publish(event.toEventFieldJson)
// }
//
// val enqueuedMsg = s"Enqueued ${events.size} event(s)"
// INFO(enqueuedMsg)
//
// rabbit.close()
// }
// }
def do_dequeue_(connector: RabbitConnector, data: HandlerData): Unit = {
def doOnce(rabbit: Rabbit): Unit = {
try {
rabbit.getAndAck {} { response ⇒
val jsonMsgBytes = response.getBody
val jsonMsg = new String(jsonMsgBytes, StandardCharsets.UTF_8)
val event = ImageEvent.ofEnvFieldsJson(jsonMsg)
Log.info(s"dequeueHandler = ${dequeueHandler.getClass.getName}")
dequeueHandler.handle(event, data)
}
}
finally rabbit.close()
}
@tailrec
def connectToRabbit(lastStatus: QueueConnectAttempt): Rabbit = {
try {
val rabbit = connector.connect()
lastStatus match {
case QueueConnectFailedAttempt(firstAttemptMillis, failedAttempts) ⇒
val dtMillis = System.currentTimeMillis() - firstAttemptMillis
val dtSec = dtMillis / 1000
Log.info(s"OK, successfully connected to Rabbit after $dtSec sec and $failedAttempts attempts")
case QueueConnectFirstAttempt(firstAttemptMillis) ⇒
if(!isServer) {
val dtMillis = System.currentTimeMillis() - firstAttemptMillis
Log.info(s"OK, successfully connected to Rabbit after $dtMillis ms")
}
case _ ⇒
}
rabbit
}
catch {
case e: Exception ⇒
if(lastStatus.isFirstAttempt) {
Log.error("First failed attempt to connect to the queue", e)
}
else {
val failedAttempts = lastStatus.failedAttempts + 1
Log.error(s"Successive ($failedAttempts) failed attempt to connect to the queue: $e")
}
Thread.sleep(serverSleepMillis)
connectToRabbit(lastStatus.toFailed)
}
}
@tailrec
def doOnceOrLoop(): Unit = {
try {
val attempt = QueueConnectFirstAttempt(System.currentTimeMillis())
val rabbit = connectToRabbit(attempt)
doOnce(rabbit)
}
catch {
case unrelated: Exception ⇒
Log.error("", unrelated)
if(!isServer) throw unrelated
}
if(isServer) {
// DO not reconnect too often
Thread.sleep(serverSleepMillis)
doOnceOrLoop()
}
}
doOnceOrLoop()
}
def do_dequeue(args: Dequeue): Unit = {
val kamakiCloud = args.kamakiCloud
val insecureSSL = args.insecureSSL
val workingFolder = CmdLine.globalOptions.workingFolder
val data = HandlerData(Log, kamakiCloud, ImageTransformers, insecureSSL, workingFolder)
if(insecureSSL) {
Log.warn(s"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
Log.warn(s"! Insecure SSL mode. This is provided as a debugging aid only !!")
Log.warn(s"! If you trust a (possibly self-signed) certificate, add it to !")
Log.warn(s"! the trust store. Do not ignore SSL validation errors !!!!!!!!!")
Log.warn(s"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
}
val connector = RabbitConnector(config.getRabbitConfig)
do_dequeue_(connector, data)
}
def do_register_now(args: RegisterImageNow): Unit = {
val url = args.url
val insecureSSL = args.insecureSSL
val kamakiCloud = args.kamakiCloud
val osfamily = args.osfamily
val users = args.users
val format = args.format
val formatOpt = Option(format).map(Sys.fixFormat)
val workingFolder = CmdLine.globalOptions.workingFolder
val data = HandlerData(Log, kamakiCloud, ImageTransformers, insecureSSL, workingFolder)
val properties = Sys.minimumImageProperties(osfamily, users)
Sys.downloadAndPublishImageFile(
formatOpt,
properties,
url,
data,
None
)
}
def do_drain_queue(args: DrainQueue): Unit = {
val connector = RabbitConnector(config.getRabbitConfig)
val rabbit = connector.connect()
def drainLoop(count: Int): Int = {
rabbit.get() match {
case null ⇒
rabbit.close()
count
case getResponse ⇒
rabbit.ack(getResponse)
try {
val drainedBytes = getResponse.getBody
val drainedString = new String(drainedBytes, StandardCharsets.UTF_8)
val event = ImageEvent.ofEnvFieldsJson(drainedString)
Log.info(s"Drained event $count\\n$event")
}
catch {
case e: Exception ⇒
Log.error(s"Error converting drained event $count to appropriate format: ${e.getMessage}")
}
drainLoop(count + 1)
}
}
val howmany = drainLoop(0)
val msg = s"Drained $howmany messages"
Log.info(msg)
System.out.println(msg)
}
def do_transform(args: Transform): Unit = {
val imageURL = args.url
val insecureSSL = args.insecureSSL
val workingFolder = CmdLine.globalOptions.workingFolder
val data = HandlerData(Log, "", ImageTransformers, insecureSSL, workingFolder)
val GetImage(isTemporary, imageFile) = Sys.getImage(imageURL, data)
try {
val tramsformedFileOpt = ImageTransformers.transform(None, imageFile, workingFolder)
for {
transformedFile ← tramsformedFileOpt
} {
Log.info(s"do_transform(): Transformed $imageURL to $transformedFile.")
System.out.println(s"Transformed $imageURL to $transformedFile. Do not forget to delete the temporary file.")
System.out.println(s"$transformedFile")
}
}
finally {
if(isTemporary){
Log.info(s"do_transform(): Deleting temporary $imageFile")
imageFile.delete()
}
}
}
def do_test_queue(args: TestQueue): Unit = {
val rabbitConfig = config.getRabbitConfig
val maskedRabbitConfig = RabbitMQConfig.newBuilder(rabbitConfig).setPassword("***").build()
val connector = RabbitConnector(rabbitConfig)
try {
val rabbit = connector.connect()
rabbit.close()
val successMsg = s"Successfully connected to queue using $maskedRabbitConfig"
Log.info(successMsg)
System.out.println(successMsg)
}
catch {
case e: IOException ⇒
val errMsg = s"Could not connect to queue using $maskedRabbitConfig"
Log.error(errMsg, e)
System.err.println(errMsg)
}
}
lazy val config: Config = {
val path = CmdLine.globalOptions.config
val file = new File(path)
if(!file.exists()) {
throw new IllegalArgumentException(s"Configuration file $path does not exist")
}
else if(!file.isFile) {
throw new IllegalArgumentException(s"Configuration file $path is not a file (!)")
}
val bytes = Files.readAllBytes(file.toPath)
val json = new String(bytes, StandardCharsets.UTF_8)
val instance = new Config()
val schema = instance.getSchema
val reader = new SpecificDatumReader[Config](schema)
val decoderFactory = DecoderFactory.get()
val jsonDecoder = decoderFactory.jsonDecoder(schema, json)
val validatingDecoder = decoderFactory.validatingDecoder(schema, jsonDecoder)
reader.read(instance, validatingDecoder)
}
def mainV(args: Array[String]): Unit = {
beginSequence(args)
jc.parse(args:_*)
val map = Map(
("-v", isVerbose),
("-h", isHelp),
("-server", isServer),
("-sleepMillis", serverSleepMillis),
("-handler", dequeueHandler)
)
Log.info(map.mkString(", "))
val command = jc.getParsedCommand
val isNoCommand = command eq null
if(isHelp || isNoCommand) {
jc.usage()
EXIT(1, endSequence)
}
else {
commandMap.get(command) match {
case None ⇒
throw new ParameterException(s"Unknown command $command")
case Some(commandf) ⇒
commandf()
EXIT(0, endSequence)
}
commandMap(command)()
}
}
def main(args: Array[String]): Unit = {
try mainV(args)
catch {
case e: ParameterException ⇒
ERROR(e.getMessage)
EXIT(2, endSequence)
case e: IllegalArgumentException ⇒
ERROR(e.getMessage)
EXIT(2, endSequence)
case e: VMCatcherException ⇒
System.err.println(e.msg)
EXIT(e.code.code, endSequence)
case e: Exception ⇒
System.err.println(e.getMessage)
Log.error("", e)
e.printStackTrace(System.err)
EXIT(3, endSequence)
case e: Throwable ⇒
System.err.println(e.getMessage)
Log.error("", e)
e.printStackTrace(System.err)
EXIT(4, endSequence)
}
}
}
|
grnet/snf-vmcatcher
|
src/main/scala/gr/grnet/egi/vmcatcher/Main.scala
|
Scala
|
gpl-3.0
| 19,869 |
/*
* Copyright (C) 2007-2008 Artima, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Example code from:
*
* Programming in Scala (First Edition, Version 6)
* by Martin Odersky, Lex Spoon, Bill Venners
*
* http://booksites.artima.com/programming_in_scala
*/
package scells
import scala.util.parsing.combinator.RegexParsers
object FormulaParsers extends RegexParsers {
def ident: Parser[String] = """[a-zA-Z_]\\w*""".r
def decimal: Parser[String] = """-?\\d+(\\.\\d*)?""".r
def cell: Parser[Coord] =
"""[A-Za-z]\\d\\d*""".r ^^ { s =>
val column = s.charAt(0) - 'A'
val row = s.substring(1).toInt
Coord(row, column)
}
def range: Parser[Range] =
cell~":"~cell ^^ {
case c1~":"~c2 => Range(c1, c2)
}
def number: Parser[Number] =
decimal ^^ (d => Number(d.toDouble))
def application: Parser[Application] =
ident~"("~repsep(expr, ",")~")" ^^ {
case f~"("~ps~")" => Application(f, ps)
}
def expr: Parser[Formula] =
cell | range | number | application
def textual: Parser[Textual] =
"""[^=].*""".r ^^ Textual
def formula: Parser[Formula] =
number | textual | "=" ~> expr
def parse(input: String): Formula =
parseAll(formula, input) match {
case Success(e, _) => e
case f: NoSuccess => Textual("["+f.msg+"]")
}
}
|
peachyy/scalastu
|
scells/src/scells/FormulaParsers.scala
|
Scala
|
apache-2.0
| 1,884 |
package pl.touk.nussknacker.ui.validation
import cats.data.NonEmptyList
import cats.data.Validated.{Invalid, Valid}
import pl.touk.nussknacker.engine.ModelData
import pl.touk.nussknacker.engine.api.component.AdditionalPropertyConfig
import pl.touk.nussknacker.engine.api.context.ProcessCompilationError
import pl.touk.nussknacker.engine.api.expression.ExpressionParser
import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess
import pl.touk.nussknacker.engine.compile.{NodeTypingInfo, ProcessValidator}
import pl.touk.nussknacker.engine.graph.node.{Disableable, NodeData, Source, SubprocessInputDefinition}
import pl.touk.nussknacker.restmodel.displayedgraph.DisplayableProcess
import pl.touk.nussknacker.restmodel.displayedgraph.displayablenode.{Edge, EdgeType}
import pl.touk.nussknacker.restmodel.process.ProcessingType
import pl.touk.nussknacker.restmodel.validation.{CustomProcessValidator, PrettyValidationErrors}
import pl.touk.nussknacker.restmodel.validation.ValidationResults.{NodeTypingData, ValidationResult}
import pl.touk.nussknacker.ui.definition.UIProcessObjectsFactory
import pl.touk.nussknacker.ui.process.marshall.ProcessConverter
import pl.touk.nussknacker.ui.process.processingtypedata.ProcessingTypeDataProvider
import pl.touk.nussknacker.ui.process.subprocess.SubprocessResolver
import shapeless.syntax.typeable._
object ProcessValidation {
def apply(data: ProcessingTypeDataProvider[ModelData],
additionalProperties: ProcessingTypeDataProvider[Map[String, AdditionalPropertyConfig]],
subprocessResolver: SubprocessResolver,
customProcessNodesValidators: ProcessingTypeDataProvider[CustomProcessValidator]): ProcessValidation = {
new ProcessValidation(data.mapValues(_.validator), additionalProperties, subprocessResolver, customProcessNodesValidators)
}
}
class ProcessValidation(validators: ProcessingTypeDataProvider[ProcessValidator],
additionalPropertiesConfig: ProcessingTypeDataProvider[Map[String, AdditionalPropertyConfig]],
subprocessResolver: SubprocessResolver,
customProcessNodesValidators: ProcessingTypeDataProvider[CustomProcessValidator]) {
val uiValidationError = "UiValidation"
import pl.touk.nussknacker.engine.util.Implicits._
private val additionalPropertiesValidator = new AdditionalPropertiesValidator(additionalPropertiesConfig)
def withSubprocessResolver(subprocessResolver: SubprocessResolver) = new ProcessValidation(validators, additionalPropertiesConfig, subprocessResolver, customProcessNodesValidators)
def withExpressionParsers(modify: PartialFunction[ExpressionParser, ExpressionParser]) = new ProcessValidation(
validators.mapValues(_.withExpressionParsers(modify)), additionalPropertiesConfig, subprocessResolver, customProcessNodesValidators)
def validate(displayable: DisplayableProcess): ValidationResult = {
val uiValidationResult = uiValidation(displayable)
//there is no point in further validations if ui process structure is invalid
//displayable to canonical conversion for invalid ui process structure can have unexpected results
if (uiValidationResult.saveAllowed) {
val canonical = ProcessConverter.fromDisplayable(displayable)
uiValidationResult.add(processingTypeValidationWithTypingInfo(canonical, displayable.processingType))
} else {
uiValidationResult
}
}
def processingTypeValidationWithTypingInfo(canonical: CanonicalProcess, processingType: ProcessingType): ValidationResult = {
validators.forType(processingType) match {
case None =>
ValidationResult.errors(Map(), List(), List(PrettyValidationErrors.noValidatorKnown(processingType)))
case Some(processValidator) =>
validateUsingTypeValidator(canonical, processValidator)
}
}
def uiValidation(displayable: DisplayableProcess): ValidationResult = {
validateIds(displayable)
.add(validateDuplicates(displayable))
.add(validateLooseNodes(displayable))
.add(validateEdgeUniqueness(displayable))
.add(validateAdditionalProcessProperties(displayable))
.add(validateWithCustomProcessValidator(displayable))
.add(warningValidation(displayable))
}
private def validateUsingTypeValidator(canonical: CanonicalProcess, processValidator: ProcessValidator): ValidationResult = {
subprocessResolver.resolveSubprocesses(canonical) match {
case Invalid(e) => formatErrors(e)
case _ =>
/* 1. We remove disabled nodes from canonical to not validate disabled nodes
2. TODO: handle types when subprocess resolution fails... */
subprocessResolver.resolveSubprocesses(canonical.withoutDisabledNodes) match {
case Valid(process) =>
val validated = processValidator.validate(process)
//FIXME: Validation errors for subprocess nodes are not properly handled by FE
validated.result.fold(formatErrors, _ => ValidationResult.success)
.withNodeResults(validated.typing.mapValues(nodeInfoToResult))
case Invalid(e) => formatErrors(e)
}
}
}
private def nodeInfoToResult(typingInfo: NodeTypingInfo)
= NodeTypingData(typingInfo.inputValidationContext.variables,
typingInfo.parameters.map(_.map(UIProcessObjectsFactory.createUIParameter)),
typingInfo.expressionsTypingInfo)
private def warningValidation(process: DisplayableProcess): ValidationResult = {
val disabledNodes = process.nodes.collect { case d: NodeData with Disableable if d.isDisabled.getOrElse(false) => d }
val disabledNodesWarnings = disabledNodes.map(node => (node.id, List(PrettyValidationErrors.disabledNode(uiValidationError)))).toMap
ValidationResult.warnings(disabledNodesWarnings)
}
private def validateIds(displayable: DisplayableProcess): ValidationResult = {
val invalidCharsRegexp = "[\\"'\\\\.]".r
ValidationResult.errors(
displayable.nodes.map(_.id).filter(n => invalidCharsRegexp.findFirstIn(n).isDefined)
.map(n => n -> List(PrettyValidationErrors.invalidCharacters(uiValidationError))).toMap,
List(),
List()
)
}
private def validateAdditionalProcessProperties(displayable: DisplayableProcess): ValidationResult = {
if (displayable.metaData.isSubprocess) {
ValidationResult.success
} else {
additionalPropertiesValidator.validate(displayable)
}
}
private def validateEdgeUniqueness(displayableProcess: DisplayableProcess): ValidationResult = {
val edgesByFrom = displayableProcess.edges.groupBy(_.from)
def findNonUniqueEdge(edgesFromNode: List[Edge]) = {
val nonUniqueByType = edgesFromNode.groupBy(_.edgeType).collect { case (Some(eType), list) if list.size > 1 =>
PrettyValidationErrors.nonuniqeEdgeType(uiValidationError, eType)
}
val nonUniqueByTarget = edgesFromNode.groupBy(_.to).collect { case (to, list) if list.size > 1 =>
PrettyValidationErrors.nonuniqeEdge(uiValidationError, to)
}
(nonUniqueByType ++ nonUniqueByTarget).toList
}
val edgeUniquenessErrors = edgesByFrom.map { case (from, edges) => from -> findNonUniqueEdge(edges) }.filterNot(_._2.isEmpty)
ValidationResult.errors(edgeUniquenessErrors, List(), List())
}
private def validateLooseNodes(displayableProcess: DisplayableProcess): ValidationResult = {
val looseNodes = displayableProcess.nodes
//source & subprocess inputs don't have inputs
.filterNot(n => n.isInstanceOf[SubprocessInputDefinition] || n.isInstanceOf[Source])
.filterNot(n => displayableProcess.edges.exists(_.to == n.id))
.map(n => n.id -> List(PrettyValidationErrors.looseNode(uiValidationError)))
.toMap
ValidationResult.errors(looseNodes, List(), List())
}
private def validateDuplicates(displayable: DisplayableProcess): ValidationResult = {
val nodeIds = displayable.nodes.map(_.id)
val duplicates = nodeIds.groupBy(identity).filter(_._2.size > 1).keys.toList
if (duplicates.isEmpty) {
ValidationResult.success
} else {
ValidationResult.errors(Map(), List(), List(PrettyValidationErrors.duplicatedNodeIds(uiValidationError, duplicates)))
}
}
private def formatErrors(errors: NonEmptyList[ProcessCompilationError]): ValidationResult = {
val globalErrors = errors.filter(_.nodeIds.isEmpty)
ValidationResult.errors(
invalidNodes = (for {
error <- errors.toList.filterNot(globalErrors.contains)
nodeId <- error.nodeIds
} yield nodeId -> PrettyValidationErrors.formatErrorMessage(error)).toGroupedMap,
processPropertiesErrors = Nil,
globalErrors = globalErrors.map(PrettyValidationErrors.formatErrorMessage)
)
}
private def validateWithCustomProcessValidator(process: DisplayableProcess): ValidationResult = {
customProcessNodesValidators
.forType(process.processingType)
.map(_.validate(process))
.getOrElse(ValidationResult.success)
}
}
|
TouK/nussknacker
|
ui/server/src/main/scala/pl/touk/nussknacker/ui/validation/ProcessValidation.scala
|
Scala
|
apache-2.0
| 9,010 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.attachments
import models.api.{AttachmentType, IdentityEvidence, TransactorIdentityEvidence, VAT2, VAT51, VAT5L}
import org.jsoup.Jsoup
import org.jsoup.nodes.Document
import views.VatRegViewSpec
import views.html.attachments.PostalCoverSheet
class PostalCoverSheetViewSpec extends VatRegViewSpec {
val testRef = "VRN12345689"
val testAttachments: List[AttachmentType] = List[AttachmentType](VAT2, VAT51, IdentityEvidence, VAT5L)
val testVat2: List[AttachmentType] = List[AttachmentType](VAT2)
val testVat5L: List[AttachmentType] = List[AttachmentType](VAT5L)
lazy val view: PostalCoverSheet = app.injector.instanceOf[PostalCoverSheet]
object ExpectedContent {
val heading = "Print cover letter for documents"
val title = s"$heading - Register for VAT - GOV.UK"
val para1 = "Print this page and include it with the documents you are sending to HMRC. This will enable us to match your online application to your supporting documents."
val warningText = "Do not post the original documents to HMRC as we are unable to return them to you."
val panel1 = s"Register for VAT reference number: $testRef"
val heading2 = "What you must post to us"
val para2 = "You must send us additional documents in order for us to process this VAT application:"
val para3 = "Include this cover letter."
val heading3 = "Postal address"
val para4 = "Send the supporting documents and covering letter to:"
val panel2 = "VAT Registration Applications BT VAT HM Revenue and Customs BX9 1WR United Kingdom"
val vat2Bullet = "a completed VAT2 form (opens in new tab) to capture the details of all the partners"
val vat51Bullet = "a completed VAT 50/51 form (opens in new tab) to provide us with details of the VAT group, including details of each subsidiary"
val vat5LBullet = "a completed VAT5L form (opens in new tab)"
val idEvidence = "three documents to confirm your identity"
def idEvidenceNamed(name: String) = s"three documents to confirm $name’s identity"
val print = "Print this page"
val transactorName = "Transactor Name"
val applicantName = "Applicant Name"
}
object IdentityEvidenceBlock {
val summary = "What identity documents can I provide?"
val content: String = "Include a copy of one piece of evidence that includes a government issued photo. This could be a: " +
"passport " +
"driving licence photocard " +
"national identity card " +
"And " +
"Also include two additional pieces of evidence which can be copies of a: " +
"mortgage statement " +
"lease or rental agreement " +
"work permit or visa " +
"letter from the Department for Work and Pensions for confirming entitlement to benefits " +
"utility bill " +
"birth certificate"
}
"The Postal Cover Sheet page" must {
implicit val doc: Document = Jsoup.parse(view(testRef, testAttachments, None, None).body)
"have the correct heading" in new ViewSetup {
doc.heading mustBe Some(ExpectedContent.heading)
}
"have the correct warning text" in new ViewSetup {
doc.warningText(1) match {
case Some(value) => value must include(ExpectedContent.warningText)
case None => fail()
}
}
"have the correct panel text" in new ViewSetup {
doc.select(Selectors.indent).get(0).text mustBe ExpectedContent.panel1
}
"have the correct heading2" in new ViewSetup {
doc.headingLevel2(1) mustBe Some(ExpectedContent.heading2)
}
"have the correct heading3" in new ViewSetup {
doc.headingLevel2(2) mustBe Some(ExpectedContent.heading3)
}
"have the correct page title" in new ViewSetup {
doc.title mustBe ExpectedContent.title
}
"have the correct paragraph1 text" in new ViewSetup {
doc.para(1) mustBe Some(ExpectedContent.para1)
}
"have the correct paragraph2 text" in new ViewSetup {
doc.para(2) mustBe Some(ExpectedContent.para2)
}
"have the correct paragraph3 text" in new ViewSetup {
doc.para(3) mustBe Some(ExpectedContent.para3)
}
"not show the identity documents bullet point when attachment list does not contain IdentityEvidence" in new ViewSetup {
override val doc: Document = Jsoup.parse(view(testRef, testVat2, None, None).body)
doc.unorderedList(1) mustBe List(ExpectedContent.vat2Bullet)
doc.unorderedList(1) mustNot contain(ExpectedContent.idEvidence)
}
"not show the VAT51 bullet point when attachment list does not contain VAT51" in new ViewSetup {
override val doc: Document = Jsoup.parse(view(testRef, testVat2, None, None).body)
doc.unorderedList(1) mustBe List(ExpectedContent.vat2Bullet)
doc.unorderedList(1) mustNot contain(ExpectedContent.vat51Bullet)
}
"not show the vat5L bullet point when attachment list does not contain VAT5L" in new ViewSetup {
override val doc: Document = Jsoup.parse(view(testRef, testVat2, None, None).body)
doc.unorderedList(1) mustBe List(ExpectedContent.vat2Bullet)
doc.unorderedList(1) mustNot contain(ExpectedContent.vat5LBullet)
}
"have the correct first bullet list" in new ViewSetup {
doc.unorderedList(1) mustBe List(
ExpectedContent.vat2Bullet,
ExpectedContent.vat51Bullet,
ExpectedContent.idEvidence,
ExpectedContent.vat5LBullet
)
}
"have the correct first bullet list for the transactor flow" when {
"transactor is unverified" in new ViewSetup {
override val doc: Document = Jsoup.parse(view(testRef, List(TransactorIdentityEvidence), None, Some(ExpectedContent.transactorName)).body)
doc.unorderedList(1) mustBe List(
ExpectedContent.idEvidenceNamed(ExpectedContent.transactorName)
)
}
"applicant is unverified" in new ViewSetup {
override val doc: Document = Jsoup.parse(view(testRef, List(IdentityEvidence), Some(ExpectedContent.applicantName), None).body)
doc.unorderedList(1) mustBe List(
ExpectedContent.idEvidenceNamed(ExpectedContent.applicantName)
)
}
"both are unverified" in new ViewSetup {
override val doc: Document = Jsoup.parse(view(testRef, List(IdentityEvidence, TransactorIdentityEvidence), Some(ExpectedContent.applicantName), Some(ExpectedContent.transactorName)).body)
doc.unorderedList(1) mustBe List(
ExpectedContent.idEvidenceNamed(ExpectedContent.applicantName),
ExpectedContent.idEvidenceNamed(ExpectedContent.transactorName)
)
}
}
"have a details block" in new ViewSetup {
doc.details mustBe Some(Details(IdentityEvidenceBlock.summary, IdentityEvidenceBlock.content))
}
"have the correct panel text two" in new ViewSetup {
doc.select(Selectors.indent).get(1).text mustBe ExpectedContent.panel2
}
"have a print button" in new ViewSetup {
doc.submitButton mustBe Some(ExpectedContent.print)
}
}
}
|
hmrc/vat-registration-frontend
|
test/views/attachments/PostalCoverSheetViewSpec.scala
|
Scala
|
apache-2.0
| 7,610 |
package discreteevent
class Inverter {
def inverter(input: Wire, output: Wire) {
def invertAction() {
val inputSig = input.getSignal
afterDelay(InverterDelay) {
output setSignal !inputSig
}
}
input addAction invertAction
}
}
|
relyah/CourseraFunctionalProgramming
|
workspace/MutableState/src/discreteevent/Inverter.scala
|
Scala
|
gpl-2.0
| 271 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http
import java.lang.{ StringBuilder => JStringBuilder }
import java.nio.charset.Charset
import java.util.{ List => JList }
import scala.collection.JavaConversions._
import scala.util.control.NonFatal
import io.gatling.commons.util.StringHelper.Eol
import io.gatling.http.response.Response
import io.gatling.http.util.HttpHelper.isTxt
import com.typesafe.scalalogging.LazyLogging
import io.netty.buffer.ByteBufAllocator
import io.netty.handler.codec.http.HttpHeaders
import org.asynchttpclient.netty.request.NettyRequest
import org.asynchttpclient.netty.request.body.NettyMultipartBody
import org.asynchttpclient.{ Param, Request }
import org.asynchttpclient.request.body.multipart._
package object util extends LazyLogging {
implicit class HttpStringBuilder(val buff: JStringBuilder) extends AnyVal {
def appendHttpHeaders(headers: HttpHeaders): JStringBuilder =
headers.foldLeft(buff) { (buf, entry) =>
buff.append(entry.getKey).append(": ").append(entry.getValue).append(Eol)
}
def appendParamJList(list: JList[Param]): JStringBuilder =
list.foldLeft(buff) { (buf, param) =>
buff.append(param.getName).append(": ").append(param.getValue).append(Eol)
}
def appendRequest(request: Request, nettyRequest: Option[NettyRequest], charset: Charset): JStringBuilder = {
buff.append(request.getMethod).append(" ").append(request.getUrl).append(Eol)
nettyRequest match {
case Some(nr) =>
val headers = nr.getHttpRequest.headers
if (!headers.isEmpty) {
buff.append("headers=").append(Eol)
for (header <- headers) {
buff.append(header.getKey).append(": ").append(header.getValue).append(Eol)
}
}
case _ =>
if (!request.getHeaders.isEmpty) {
buff.append("headers=").append(Eol)
buff.appendHttpHeaders(request.getHeaders)
}
if (!request.getCookies.isEmpty) {
buff.append("cookies=").append(Eol)
for (cookie <- request.getCookies) {
buff.append(cookie).append(Eol)
}
}
}
if (!request.getFormParams.isEmpty) {
buff.append("params=").append(Eol)
buff.appendParamJList(request.getFormParams)
}
if (request.getStringData != null) buff.append("stringData=").append(request.getStringData).append(Eol)
if (request.getByteData != null) buff.append("byteData=").append(new String(request.getByteData, charset)).append(Eol)
if (request.getCompositeByteData != null) {
buff.append("compositeByteData=")
request.getCompositeByteData.foreach(b => buff.append(new String(b, charset)))
buff.append(Eol)
}
if (request.getFile != null) buff.append("file=").append(request.getFile.getCanonicalPath).append(Eol)
if (!request.getBodyParts.isEmpty) {
buff.append("parts=").append(Eol)
request.getBodyParts.foreach {
case part: StringPart =>
buff
.append("StringPart:")
.append(" name=").append(part.getName)
.append(" contentType=").append(part.getContentType)
.append(" dispositionType=").append(part.getDispositionType)
.append(" charset=").append(part.getCharset)
.append(" transferEncoding=").append(part.getTransferEncoding)
.append(" contentId=").append(part.getContentId)
.append(Eol)
case part: FilePart =>
buff.append("FilePart:")
.append(" name=").append(part.getName)
.append(" contentType=").append(part.getContentType)
.append(" dispositionType=").append(part.getDispositionType)
.append(" charset=").append(part.getCharset)
.append(" transferEncoding=").append(part.getTransferEncoding)
.append(" contentId=").append(part.getContentId)
.append(" filename=").append(part.getFileName)
.append(" file=").append(part.getFile.getCanonicalPath)
.append(Eol)
case part: ByteArrayPart =>
buff.append("ByteArrayPart:")
.append(" name=").append(part.getName)
.append(" contentType=").append(part.getContentType)
.append(" dispositionType=").append(part.getDispositionType)
.append(" charset=").append(part.getCharset)
.append(" transferEncoding=").append(part.getTransferEncoding)
.append(" contentId=").append(part.getContentId)
.append(" filename=").append(part.getFileName)
.append(Eol)
}
buff.append("multipart=").append(Eol)
val multipartBody = nettyRequest match {
case Some(req) =>
val originalMultipartBody = req.getBody.asInstanceOf[NettyMultipartBody].getBody.asInstanceOf[MultipartBody]
val multipartParts = MultipartUtils.generateMultipartParts(request.getBodyParts, originalMultipartBody.getBoundary)
new MultipartBody(multipartParts, originalMultipartBody.getContentType, originalMultipartBody.getBoundary)
case None => MultipartUtils.newMultipartBody(request.getBodyParts, request.getHeaders)
}
val byteBuf = ByteBufAllocator.DEFAULT.buffer(8 * 1024)
multipartBody.transferTo(byteBuf)
buff.append(byteBuf.toString(charset))
multipartBody.close()
byteBuf.release()
}
if (request.getProxyServer != null) buff.append("proxy=").append(request.getProxyServer).append(Eol)
if (request.getRealm != null) buff.append("realm=").append(request.getRealm).append(Eol)
buff
}
def appendResponse(response: Response) = {
response.status.foreach { status =>
buff.append("status=").append(Eol).append(status.getStatusCode).append(" ").append(status.getStatusText).append(Eol)
if (!response.headers.isEmpty) {
buff.append("headers= ").append(Eol)
buff.appendHttpHeaders(response.headers).append(Eol)
}
if (response.hasResponseBody) {
buff.append("body=").append(Eol)
if (isTxt(response.headers)) {
try {
buff.append(response.body.string)
} catch {
case NonFatal(t) =>
val message = "Could not decode response body"
logger.trace(message, t)
buff.append(s"$message: ${t.getMessage}")
}
} else {
buff.append("<<<BINARY CONTENT>>>")
}
}
}
buff
}
}
}
|
GabrielPlassard/gatling
|
gatling-http/src/main/scala/io/gatling/http/util/package.scala
|
Scala
|
apache-2.0
| 7,297 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Framework **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.jasminetest
import sbt.testing._
final class JasmineEvent(
taskDef: TaskDef,
val status: Status,
val selector: Selector,
val throwable: OptionalThrowable = new OptionalThrowable,
val duration: Long = -1L
) extends Event {
def fullyQualifiedName: String = taskDef.fullyQualifiedName
def fingerprint: Fingerprint = taskDef.fingerprint
}
|
doron123/scala-js
|
jasmine-test-framework/src/main/scala/org/scalajs/jasminetest/JasmineEvent.scala
|
Scala
|
bsd-3-clause
| 908 |
package com.typedynamic.eventrisk
import Config._
import collection.immutable._
import org.jsoup.Jsoup
import io.Source._
import collection.JavaConversions._
object YieldCurve {
def yieldPage(): Map[Int, Double] = {
val bondyieldsUrl = conf.getString("bondyieldsUrl")
return Map(Jsoup.connect(bondyieldsUrl).get.
select("#column0 table.mdcTable tr").drop(1).
map(elem => (elem.child(0).text.toInt,
elem.child(5).text.toDouble)): _*)
}
}
|
commonlisp/eventrisk
|
src/main/scala/yieldcurve.scala
|
Scala
|
apache-2.0
| 511 |
/**
* Copyright: Copyright (C) 2016, Jaguar Land Rover
* License: MPL-2.0
*/
package org.genivi.sota.resolver.test
import akka.http.scaladsl.model.StatusCodes
import eu.timepit.refined.api.Refined
import io.circe.generic.auto._
import org.genivi.sota.data.{Namespaces, PackageId}
import org.genivi.sota.marshalling.CirceMarshallingSupport._
import org.genivi.sota.resolver.common.Errors.Codes
import org.genivi.sota.resolver.db.{PackageFilter, PackageFilterResponse, PackageResponse}
import org.genivi.sota.resolver.filters.Filter
import org.genivi.sota.rest.{ErrorCodes, ErrorRepresentation}
/**
* Spec for Package Filter REST actions
*/
class PackageFilterResourceWordSpec extends ResourceWordSpec with Namespaces {
"Package filter resource" should {
val pkgName = "package1"
val pkgVersion = "1.0.0"
val filterName = "filter"
val filterExpr = s"""vin_matches "^X.*""""
val pkgFilter = PackageFilterResponse(
Refined.unsafeApply(pkgName),
Refined.unsafeApply(pkgVersion),
Refined.unsafeApply(filterName))
"be able to assign exisiting filters to existing packages" in {
addPackageOK(pkgName, pkgVersion, None, None)
addFilterOK(filterName, filterExpr)
addPackageFilterOK(pkgName, pkgVersion, filterName)
}
"not allow assignment of filters to non-existing package names" in {
addPackageFilter("nonexistant", pkgVersion, filterName) ~> route ~> check {
status shouldBe StatusCodes.NotFound
responseAs[ErrorRepresentation].code shouldBe ErrorCodes.MissingEntity
}
}
"not allow assignment of filters to non-existing package versions" in {
addPackageFilter(pkgName, "0.0.9", filterName) ~> route ~> check {
status shouldBe StatusCodes.NotFound
responseAs[ErrorRepresentation].code shouldBe ErrorCodes.MissingEntity
}
}
"not allow assignment of non-existing filters to existing packages " in {
addPackageFilter(pkgName, pkgVersion, "nonexistant") ~> route ~> check {
status shouldBe StatusCodes.NotFound
responseAs[ErrorRepresentation].code shouldBe ErrorCodes.MissingEntity
}
}
"list existing package filters on GET requests" in {
listPackageFilters ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[Seq[PackageFilterResponse]] shouldBe List(pkgFilter)
}
}
"list packages associated to a filter on GET requests to /filters/:filterName/package" in {
listPackagesForFilter(filterName) ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[List[PackageResponse]] shouldBe List(
PackageResponse(PackageId(Refined.unsafeApply(pkgName), Refined.unsafeApply(pkgVersion)), None, None))
}
}
"fail to list packages associated to empty filter names" in {
listPackagesForFilter("") ~> route ~> check {
status shouldBe StatusCodes.NotFound
}
}
"fail to list packages associated to non-existant filters" in {
listPackagesForFilter("nonexistant") ~> route ~> check {
status shouldBe StatusCodes.NotFound
}
}
"list filters associated to a package on GET requests to /packages/:pkgName/:pkgVersion/filter" in {
listFiltersForPackage(pkgName, pkgVersion) ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[Seq[Filter]] shouldBe List(
Filter(defaultNs, Refined.unsafeApply(filterName), Refined.unsafeApply(filterExpr)))
}
}
"fail to list filters associated to a package if no package name is given" in {
listFiltersForPackage("", pkgVersion) ~> route ~> check {
status shouldBe StatusCodes.NotFound
}
}
"fail to list filters associated to a package if a non-existant package name is given" in {
listFiltersForPackage("nonexistant", pkgVersion) ~> route ~> check {
status shouldBe StatusCodes.NotFound
}
}
"fail to list filters associated to a package if no package version is given" in {
listFiltersForPackage(pkgName, "") ~> route ~> check {
status shouldBe StatusCodes.NotFound
}
}
"fail to list filters associated to a package if a non-existant package version is given" in {
listFiltersForPackage(pkgName, "6.6.6") ~> route ~> check {
status shouldBe StatusCodes.NotFound
}
}
"delete package filters on DELETE requests" in {
deletePackageFilter(pkgName, pkgVersion, filterName) ~> route ~> check {
status shouldBe StatusCodes.OK
listPackageFilters ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[Seq[PackageFilter]] shouldBe List()
}
}
}
"fail if package filter does not exist" in {
deletePackageFilter("nonexistant", pkgVersion, filterName) ~> route ~> check {
status shouldBe StatusCodes.NotFound
responseAs[ErrorRepresentation].code shouldBe Codes.PackageFilterNotFound
}
}
"delete all package filters when a filter is deleted" in {
addPackageFilterOK(pkgName, pkgVersion, filterName)
deleteFilterOK(filterName)
listPackageFilters ~> route ~> check {
status shouldBe StatusCodes.OK
responseAs[Seq[PackageFilterResponse]] shouldBe List()
}
}
}
}
|
PDXostc/rvi_sota_server
|
external-resolver/src/test/scala/org/genivi/sota/resolver/test/PackageFilterResourceSpec.scala
|
Scala
|
mpl-2.0
| 5,325 |
package org.imdex.tractor.util
import scala.concurrent.duration.FiniteDuration
import scala.language.implicitConversions
/**
* Created by a.tsukanov on 21.07.2016.
*/
final case class Timeout(duration: FiniteDuration) extends AnyVal
object Timeout {
implicit def toDuration(timeout: Timeout): FiniteDuration = timeout.duration
}
|
Im-dex/trActor
|
tractor-actor/src/main/scala/org/imdex/tractor/util/Timeout.scala
|
Scala
|
mit
| 340 |
package mesosphere.marathon
package integration
import java.util.UUID
import akka.util.ByteString
import mesosphere.marathon.integration.facades.MarathonFacade._
import mesosphere.marathon.integration.facades.{ITDeployment, ITEnrichedTask, ITQueueItem}
import mesosphere.marathon.integration.setup._
import mesosphere.marathon.raml.{App, AppCommandCheck, AppHealthCheck, AppHealthCheckProtocol, AppUpdate, Container, ContainerPortMapping, DockerContainer, EngineType, Network, NetworkMode, NetworkProtocol, UpgradeStrategy}
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state.{PathId, Timestamp}
import mesosphere.{AkkaIntegrationTest, WaitTestSupport}
import org.scalactic.source.Position
import scala.concurrent.duration._
class AppDeployIntegrationTest extends AkkaIntegrationTest with EmbeddedMarathonTest {
def appId(suffix: Option[String] = None): PathId = testBasePath / s"app-${suffix.getOrElse(UUID.randomUUID)}"
"AppDeploy" should {
"create a simple app without health checks" in {
Given("a new app")
val app = appProxy(appId(Some("without-health-checks")), "v1", instances = 1, healthCheck = None)
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
waitForTasks(app.id.toPath, 1) //make sure, the app has really started
}
"redeploying an app without changes should not cause restarts" in {
Given("an deployed app")
val app = appProxy(appId(Some("without-changes-should-not-restart")), "v1", instances = 1, healthCheck = None)
val result = marathon.createAppV2(app)
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
val taskBeforeRedeployment = waitForTasks(app.id.toPath, 1) //make sure, the app has really started
When("redeploying the app without changes")
val update = marathon.updateApp(app.id.toPath, AppUpdate(id = Some(app.id), cmd = app.cmd))
waitForDeployment(update)
val tasksAfterRedeployment = waitForTasks(app.id.toPath, 1) //make sure, the app has really started
Then("no tasks should have been restarted")
taskBeforeRedeployment should be(tasksAfterRedeployment)
}
"backoff delays are reset on configuration changes" in {
val pathId = testBasePath / "app-with-backoff-delay-is-reset-on-conf-changes"
Given(s"a backed off app with name $pathId")
val app: App = backedOffApp(pathId)
When("we force deploy a working configuration")
val deployment2 = marathon.updateApp(app.id.toPath, AppUpdate(cmd = Some("sleep 120; true")), force = true)
Then("The app deployment is created")
deployment2 should be(OK)
And("and the app gets deployed immediately")
waitForDeployment(deployment2)
waitForTasks(app.id.toPath, 1)
}
"backoff delays are NOT reset on scaling changes" in {
val pathId = testBasePath / "app-with-backoff-delays-is-not-reset-on-scheduling-changes"
Given(s"a backed off app with name $pathId")
val app: App = backedOffApp(pathId)
When("we force deploy a scale change")
val deployment2 = marathon.updateApp(app.id.toPath, AppUpdate(instances = Some(3)), force = true)
Then("The app deployment is created")
deployment2 should be(OK)
And("BUT our app still has a backoff delay")
val queueAfterScaling: List[ITQueueItem] = marathon.launchQueueForAppId(app.id.toPath).value
queueAfterScaling should have size 1
queueAfterScaling.map(_.delay.overdue) should contain(false)
}
"backoff delay can be get/deleted on a crash looping app" in {
Given("a crash looping app instance")
val id = testBasePath / "crash-looping-app-with-backoff-delay"
val uptime = 10.seconds
createAFailingApp(id, Some(s"sleep ${uptime.toSeconds} && false"), uptime, 1.hour)
When("we try to fetch the current delay on the app")
val delayShownForQueuedInstance = () => {
val response = marathon.launchQueueForAppId(id)
response should be(OK)
response.value should have size 1
val queueItem = response.value.head
queueItem.delay.timeLeftSeconds should be >= 0
true
}
val delayResetSuccess = () => {
val response = marathon.launchQueueDelayReset(id)
response should be(NoContent)
true
}
val patienceConfig: WaitTestSupport.PatienceConfig = WaitTestSupport.PatienceConfig(timeout = 5.minutes, interval = 2.seconds)
Then("we delete the current delay set on application")
WaitTestSupport.waitUntil("delay is shown and then reset it") {
delayShownForQueuedInstance()
delayResetSuccess()
}(patienceConfig)
When("the application is healthy")
waitForStatusUpdates("TASK_RUNNING")
Then("we delete the current delay set on application again")
WaitTestSupport.waitUntil("delay is shown and then reset it") {
delayShownForQueuedInstance()
delayResetSuccess()
}(patienceConfig)
}
"restarting an app with backoff delay starts immediately" in {
val pathId = testBasePath / "app-restart-with-backoff"
Given(s"a backed off app with name $pathId")
val app: App = backedOffApp(pathId)
When("we force a restart")
val deployment2 = marathon.restartApp(app.id.toPath, force = true)
Then("The app deployment is created")
deployment2 should be(OK)
And("the task eventually fails AGAIN")
waitForStatusUpdates("TASK_RUNNING", "TASK_FAILED")
}
def backedOffApp(id: PathId): App = {
val app = createAFailingApp(id, Some("false"), 1.hour, 1.hour)
eventually {
val queue: List[ITQueueItem] = marathon.launchQueueForAppId(app.id.toPath).value
queue should have size 1
queue.map(_.delay.overdue) should contain(false)
}
app
}
def createAFailingApp(
id: PathId,
failingCmd: Option[String],
backoffSeconds: FiniteDuration,
maxLaunchDelaySeconds: FiniteDuration): App = {
val app =
appProxy(id, "v1", instances = 1, healthCheck = None)
.copy(
cmd = failingCmd,
backoffSeconds = backoffSeconds.toSeconds.toInt,
maxLaunchDelaySeconds = maxLaunchDelaySeconds.toSeconds.toInt
)
val result = marathon.createAppV2(app)
result should be(Created)
waitForStatusUpdates("TASK_RUNNING", "TASK_FAILED")
app
}
// OK
"increase the app count metric when an app is created" in {
Given("a new app")
val app = appProxy(appId(Some("with-increased-count-when-an-app-created")), "v1", instances = 1, healthCheck = None)
val appCount = (marathon.metrics().entityJson \\ "gauges" \\ "marathon.apps.active.gauge" \\ "value").as[Double]
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app count metric should increase")
result should be(Created)
eventually {
(marathon.metrics().entityJson \\ "gauges" \\ "marathon.apps.active.gauge" \\ "value").as[Double] should be > appCount
}
}
// OK
"create a simple app without health checks via secondary (proxying)" in {
Given("a new app")
val app = appProxy(appId(Some("without-health-checks-via-secondary")), "v1", instances = 1, healthCheck = None)
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
waitForTasks(app.id.toPath, 1) //make sure, the app has really started
}
"create a simple app with a Marathon HTTP health check" in {
Given("a new app")
val app = appProxy(appId(Some("with-marathon-http-health-check")), "v1", instances = 1, healthCheck = None).
copy(healthChecks = Set(ramlHealthCheck))
val check = registerAppProxyHealthCheck(PathId(app.id), "v1", state = true)
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
check.pinged.set(false)
eventually {
check.pinged.get should be(true) withClue "App did not start"
}
}
"create a simple app with a Mesos HTTP health check" in {
Given("a new app")
val app = appProxy(appId(Some("with-mesos-http-health-check")), "v1", instances = 1, healthCheck = None).
copy(healthChecks = Set(ramlHealthCheck.copy(protocol = AppHealthCheckProtocol.MesosHttp)))
val check = registerAppProxyHealthCheck(app.id.toPath, "v1", state = true)
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
check.pinged.set(false)
eventually {
check.pinged.get should be(true) withClue "App did not start"
}
}
"create a simple app with a Marathon HTTP health check using port instead of portIndex" in {
Given("A clean cluster (since we need a concrete port that should be free)")
cleanUp()
And("a new app")
val port = mesosCluster.randomAgentPort()
val app = appProxy(appId(Some("with-marathon-http-health-check-using-port")), "v1", instances = 1, healthCheck = None).
copy(
portDefinitions = Option(raml.PortDefinitions(port)),
requirePorts = true,
healthChecks = Set(ramlHealthCheck.copy(port = Some(port), portIndex = None))
)
val check = registerAppProxyHealthCheck(app.id.toPath, "v1", state = true)
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
check.pinged.set(false)
eventually {
check.pinged.get should be(true) withClue "App did not start"
}
}
"create a simple app with a Marathon TCP health check" in {
Given("a new app")
val app = appProxy(appId(Some("with-marathon-tcp-health-check")), "v1", instances = 1, healthCheck = None).
copy(healthChecks = Set(ramlHealthCheck.copy(protocol = AppHealthCheckProtocol.Tcp)))
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
}
"create a simple app with a Mesos TCP health check" in {
Given("a new app")
val app = appProxy(appId(Some("with-mesos-tcp-health-check")), "v1", instances = 1, healthCheck = None).
copy(healthChecks = Set(ramlHealthCheck.copy(protocol = AppHealthCheckProtocol.Tcp)))
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
}
"create a simple app with a COMMAND health check" in {
Given("a new app")
val app = appProxy(appId(Some("with-command-health-check")), "v1", instances = 1, healthCheck = None).
copy(healthChecks = Set(AppHealthCheck(
protocol = AppHealthCheckProtocol.Command,
command = Some(AppCommandCheck("true")))))
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
}
// OK
"list running apps and tasks" in {
Given("a new app is deployed")
val app = appProxy(appId(Some("listing-running-apps-and-tasks")), "v1", instances = 2, healthCheck = None)
val create = marathon.createAppV2(app)
create should be(Created)
When("the deployment has finished")
waitForDeployment(create)
Then("the list of running app tasks can be fetched")
val apps = marathon.listAppsInBaseGroupForAppId(app.id.toPath).value
apps should have size 1
val tasksResult: RestResult[List[ITEnrichedTask]] = marathon.tasks(app.id.toPath)
tasksResult should be(OK)
val tasks = tasksResult.value
tasks should have size 2
}
"an unhealthy app fails to deploy" in {
Given("a new app that is not healthy")
val id = appId(Some("unhealthy-fails-to-deploy"))
registerAppProxyHealthCheck(id, "v1", state = false)
val app = appProxy(id, "v1", instances = 1, healthCheck = Some(appProxyHealthCheck()))
When("The app is deployed")
val create = marathon.createAppV2(app)
Then("We receive a deployment created confirmation")
create should be(Created)
extractDeploymentIds(create) should have size 1
And("a number of failed health events but the deployment does not succeed")
def interestingEvent() = waitForEventMatching("failed_health_check_event or deployment_success")(callbackEvent =>
callbackEvent.eventType == "deployment_success" ||
callbackEvent.eventType == "failed_health_check_event"
)
for (event <- Iterator.continually(interestingEvent()).take(10)) {
event.eventType should be("failed_health_check_event")
}
When("The app is deleted")
val delete = marathon.deleteApp(id, force = true)
delete should be(OK)
waitForDeployment(delete)
marathon.listAppsInBaseGroupForAppId(id).value should have size 0
}
"an unhealthy app fails to deploy because health checks takes too long to pass" in {
Given("a new app that is not healthy")
val id = appId(Some("unhealthy-fails-to-deploy-because-health-check-takes-too-long"))
val check = registerAppProxyHealthCheck(id, "v1", state = false)
val app = appProxy(id, "v1", instances = 1, healthCheck = Some(appProxyHealthCheck().copy(timeoutSeconds = 2)))
When("The app is deployed")
val create = marathon.createAppV2(app)
Then("We receive a deployment created confirmation")
create should be(Created)
extractDeploymentIds(create) should have size 1
And("a number of failed health events but the deployment does not succeed")
def interestingEvent() = waitForEventMatching("failed_health_check_event or deployment_success")(callbackEvent =>
callbackEvent.eventType == "deployment_success" ||
callbackEvent.eventType == "failed_health_check_event"
)
check.afterDelay(20.seconds, true)
for (event <- Iterator.continually(interestingEvent()).take(10)) {
event.eventType should be("failed_health_check_event")
}
When("The app is deleted")
val delete = marathon.deleteApp(id, force = true)
delete should be(OK)
waitForDeployment(delete)
marathon.listAppsInBaseGroupForAppId(id).value should have size 0
}
"update an app" in {
Given("a new app")
val id = appId(Some("with-update-test"))
val v1 = appProxy(id, "v1", instances = 1, healthCheck = Some(appProxyHealthCheck()))
val create = marathon.createAppV2(v1)
create should be(Created)
waitForDeployment(create)
val before = marathon.tasks(id)
When("The app is updated")
val check = registerAppProxyHealthCheck(id, "v2", state = true)
val update = marathon.updateApp(PathId(v1.id), AppUpdate(cmd = appProxy(id, "v2", 1).cmd))
Then("The app gets updated")
update should be(OK)
waitForDeployment(update)
waitForTasks(id, before.value.size)
check.pinged.set(false)
eventually {
check.pinged.get should be(true) withClue "App did not start"
}
}
"update an app through patch request" in {
Given("a new app")
val id = appId(Some("update-through-patch-request"))
val v1 = appProxy(id, "v1", instances = 1, healthCheck = Some(appProxyHealthCheck()))
val create = marathon.createAppV2(v1)
create should be(Created)
waitForDeployment(create)
val before = marathon.tasks(id)
When("The app is updated")
val check = registerAppProxyHealthCheck(id, "v2", state = true)
val update = marathon.patchApp(v1.id.toPath, AppUpdate(cmd = appProxy(id, "v2", 1).cmd))
Then("The app gets updated")
update should be(OK)
waitForDeployment(update)
waitForTasks(id, before.value.size)
check.pinged.set(false)
eventually {
check.pinged.get should be(true) withClue "App did not start"
}
Then("Check if healthcheck is not updated")
val appResult = marathon.app(id)
appResult should be(OK)
appResult.value.app.healthChecks
}
"scale an app up and down" in {
Given("a new app")
val app = appProxy(appId(Some("scale-up-and-down")), "v1", instances = 1, healthCheck = None)
val create = marathon.createAppV2(app)
create should be(Created)
waitForDeployment(create)
When("The app gets an update to be scaled up")
val scaleUp = marathon.updateApp(PathId(app.id), AppUpdate(instances = Some(2)))
Then("New tasks are launched")
scaleUp should be(OK)
waitForDeployment(scaleUp)
waitForTasks(app.id.toPath, 2)
When("The app gets an update to be scaled down")
val scaleDown = marathon.updateApp(PathId(app.id), AppUpdate(instances = Some(1)))
Then("Tasks are killed")
scaleDown should be(OK)
waitForStatusUpdates("TASK_KILLED")
waitForTasks(app.id.toPath, 1)
}
"restart an app" in {
Given("a new app")
val id = appId(Some("testing-restart-an-app"))
val v1 = appProxy(id, "v1", instances = 1, healthCheck = None)
val create = marathon.createAppV2(v1)
create should be(Created)
waitForDeployment(create)
val before = marathon.tasks(id)
When("The app is restarted")
val restart = marathon.restartApp(PathId(v1.id))
Then("All instances of the app get restarted")
restart should be(OK)
waitForDeployment(restart)
val after = marathon.tasks(id)
waitForTasks(id, before.value.size)
before.value.toSet should not be after.value.toSet
}
"list app versions" in {
Given("a new app")
val v1 = appProxy(appId(Some("list-app-versions")), "v1", instances = 1, healthCheck = None)
val createResponse = marathon.createAppV2(v1)
createResponse should be(Created)
waitForDeployment(createResponse)
When("The list of versions is fetched")
val list = marathon.listAppVersions(v1.id.toPath)
Then("The response should contain all the versions")
list should be(OK)
list.value.versions should have size 1
list.value.versions.headOption should be(createResponse.value.version.map(Timestamp(_)))
}
"correctly version apps on update" in {
Given("a new app")
val v1 = appProxy(appId(Some("correctly-version-app-on-update")), "v1", instances = 1, healthCheck = None)
val createResponse = marathon.createAppV2(v1)
createResponse should be(Created)
val originalVersion = createResponse.value.version
waitForDeployment(createResponse)
When("A resource specification is updated")
val updatedDisk: Double = v1.disk + 1.0
val appUpdate = AppUpdate(Option(v1.id), disk = Option(updatedDisk))
val updateResponse = marathon.updateApp(PathId(v1.id), appUpdate)
updateResponse should be(OK)
waitForDeployment(updateResponse)
Then("It should create a new version with the right data")
val responseOriginalVersion = marathon.appVersion(v1.id.toPath, Timestamp(originalVersion.get))
responseOriginalVersion should be(OK)
responseOriginalVersion.value.disk should be(v1.disk)
val updatedVersion = updateResponse.value.version
val responseUpdatedVersion = marathon.appVersion(PathId(v1.id), updatedVersion)
responseUpdatedVersion should be(OK)
responseUpdatedVersion.value.disk should be(updatedDisk)
}
"kill a task of an App" in {
Given("a new app")
val app = appProxy(appId(Some("kill-a-task-of-an-app")), "v1", instances = 1, healthCheck = None)
val create = marathon.createAppV2(app)
create should be(Created)
waitForDeployment(create)
val taskId = marathon.tasks(app.id.toPath).value.head.id
When("a task of an app is killed")
val response = marathon.killTask(PathId(app.id), taskId)
response should be(OK)
waitForStatusUpdates("TASK_KILLED")
Then("All instances of the app get restarted")
waitForTasks(app.id.toPath, 1)
marathon.tasks(app.id.toPath).value.head should not be taskId
}
"kill a task of an App with scaling" in {
Given("a new app")
val app = appProxy(appId(Some("kill-a-task-of-an-app-with-scaling")), "v1", instances = 2, healthCheck = None)
val create = marathon.createAppV2(app)
create should be(Created)
waitForDeployment(create)
val taskId = marathon.tasks(app.id.toPath).value.head.id
When("a task of an app is killed and scaled")
marathon.killTask(app.id.toPath, taskId, scale = true) should be(OK)
waitForStatusUpdates("TASK_KILLED")
Then("All instances of the app get restarted")
waitForTasks(app.id.toPath, 1)
marathon.app(app.id.toPath).value.app.instances should be(1)
}
"kill all tasks of an App" in {
Given("a new app with multiple tasks")
val app = appProxy(appId(Some("kill-all-tasks-of-an-app")), "v1", instances = 2, healthCheck = None)
val create = marathon.createAppV2(app)
create should be(Created)
waitForDeployment(create)
When("all task of an app are killed")
val response = marathon.killAllTasks(PathId(app.id))
response should be(OK)
waitForStatusUpdates("TASK_KILLED")
waitForStatusUpdates("TASK_KILLED")
Then("All instances of the app get restarted")
waitForTasks(app.id.toPath, 2)
}
"kill all tasks of an App with scaling" in {
Given("a new app with multiple tasks")
val app = appProxy(appId(Some("kill-all-tasks-of-an-app-with-scaling")), "v1", instances = 2, healthCheck = None)
val create = marathon.createAppV2(app)
create should be(Created)
waitForDeployment(create)
marathon.app(app.id.toPath).value.app.instances should be(2)
When("all task of an app are killed")
val result = marathon.killAllTasksAndScale(app.id.toPath)
result should be(OK)
result.value.version should not be empty
Then("All instances of the app get restarted")
waitForDeployment(result)
waitForTasks(app.id.toPath, 0)
marathon.app(app.id.toPath).value.app.instances should be(0)
}
"delete an application" in {
Given("a new app with one task")
val app = appProxy(appId(Some("delete-an-application")), "v1", instances = 1, healthCheck = None)
val create = marathon.createAppV2(app)
create should be(Created)
waitForDeployment(create)
When("the app is deleted")
val delete = marathon.deleteApp(PathId(app.id))
delete should be(OK)
waitForDeployment(delete)
Then("All instances of the app get restarted")
marathon.listAppsInBaseGroupForAppId(app.id.toPath).value should have size 0
}
"create and deploy an app with two tasks" in {
Given("a new app")
val appIdPath: PathId = appId(Some("create-and-deploy-an-app-with-two-tasks"))
val app = appProxy(appIdPath, "v1", instances = 2, healthCheck = None)
When("the app gets posted")
val createdApp: RestResult[App] = marathon.createAppV2(app)
Then("the app is created and a success event arrives eventually")
createdApp should be(Created)
Then("we get various events until deployment success")
val deploymentIds: Seq[String] = extractDeploymentIds(createdApp)
deploymentIds.length should be(1)
val deploymentId = deploymentIds.head
val waitingFor = Map[String, CallbackEvent => Boolean](
elems =
"api_post_event" -> (_.info("appDefinition").asInstanceOf[Map[String, Any]]("id") == appIdPath.toString),
"group_change_success" -> (_.info("groupId").asInstanceOf[String] == appIdPath.parent.toString),
"status_update_event" -> (_.info("appId") == appIdPath.toString),
"status_update_event" -> (_.info("appId") == appIdPath.toString),
"deployment_success" -> (_.info("id") == deploymentId)
)
waitForEventsWith(s"waiting for various events for ${app.id} to be successfully deployed", waitingFor)
Then("after that deployments should be empty")
val event: RestResult[List[ITDeployment]] = marathon.listDeploymentsForPathId(appIdPath)
event.value should be('empty)
Then("Both tasks respond to http requests")
eventually {
val tasks = marathon.tasks(appIdPath).value
tasks.size shouldBe 2
tasks.foreach(et => appMock.ping(et.host, et.ports.get.head))
}
}
"stop (forcefully delete) a deployment" in {
Given("a new app with constraints that cannot be fulfilled")
val c = Seq("nonExistent", "CLUSTER", "na")
val id = appId(Some("stop-and-force-delete-a-deployment"))
val app = App(id.toString, constraints = Set(c), cmd = Some("na"), instances = 5, portDefinitions = None)
val create = marathon.createAppV2(app)
create should be(Created)
// Created
val deploymentId = extractDeploymentIds(create).head
Then("the deployment gets created")
WaitTestSupport.validFor("deployment visible", 1.second)(marathon.listDeploymentsForPathId(id).value.size == 1)
Then("overdue flag is set to true")
eventually {
val queueItem = marathon.launchQueue().value.queue.find(_.app.id equals id.toString)
queueItem shouldBe defined
queueItem.get.delay.overdue shouldBe true
}
When("the deployment is forcefully removed")
val delete = marathon.deleteDeployment(deploymentId, force = true)
delete should be(Accepted)
Then("the deployment should be gone")
waitForEvent("deployment_failed")
marathon.listDeploymentsForPathId(id).value should have size 0
Then("the app should still be there")
marathon.app(id) should be(OK)
}
"rollback a deployment (by removing an app)" in {
Given("a new app with constraints that cannot be fulfilled")
val c = Seq("nonExistent", "CLUSTER", "na")
val id = appId(Some("rollback-a-deployment"))
val app = App(id.toString, constraints = Set(c), cmd = Some("na"), instances = 5, portDefinitions = None)
val create = marathon.createAppV2(app)
create should be(Created)
// Created
val deploymentId = extractDeploymentIds(create).head
Then("the deployment gets created")
WaitTestSupport.validFor("deployment visible", 5.second)(marathon.listDeploymentsForPathId(id).value.size == 1)
When("the deployment is rolled back")
val delete = marathon.deleteDeployment(deploymentId, force = false)
delete should be(OK)
val rollbackId = delete.deploymentId.value
Then("old deployment should be canceled and rollback-deployment succeed")
// Both deployment events may come out of order
val waitingFor = Map[String, CallbackEvent => Boolean](
"deployment_failed" -> (_.id == deploymentId),
"deployment_success" -> (_.id == rollbackId))
waitForEventsWith(s"waiting for canceled $deploymentId and successful $rollbackId", waitingFor)
Then("no more deployment in the queue")
WaitTestSupport.waitUntil("Deployments get removed from the queue") {
marathon.listDeploymentsForPathId(id).value.isEmpty
}
Then("the app should also be gone")
marathon.app(id) should be(NotFound)
}
// Regression for MARATHON-8537
"manage yet another deployment rollback" in {
Given("an existing app")
val id: PathId = appId(Some("yet-another-deployment-rollback"))
val app = App(
id = id.toString,
cmd = Some("sleep 12345"),
instances = 2,
backoffFactor = 1d,
upgradeStrategy = Some(UpgradeStrategy(maximumOverCapacity = 0d, minimumHealthCapacity = 0d))
)
val created = marathon.createAppV2(app)
created shouldBe Created
waitForDeployment(created)
And("it is updated with an impossible constraint")
val updated = marathon.updateApp(id, AppUpdate(cpus = Some(1000d), cmd = Some("na"), instances = Some(1)))
updated shouldBe OK
val deploymentId = updated.deploymentId.value
Then("we wait for the first new instance to become scheduled")
// waitForEventWith("instance_changed_event", ev => ev.info("goal") == "Scheduled", s"event instance_changed_event with goal = Scheduled to arrive")
// But since we don't have this event now, we just simply try to wait for 5s which seems to work too ¯\\_(ツ)_/¯
val start = System.currentTimeMillis()
eventually(System.currentTimeMillis() should be >= (start + 5000))(config = PatienceConfig(10.seconds, 100.millis), pos = Position.here)
And("cancel previous update")
val canceled = marathon.deleteDeployment(deploymentId)
canceled shouldBe OK
Then(s"rollback should be successful and ${app.instances} tasks running")
waitForDeployment(canceled)
waitForTasks(id, 2) //make sure, all the tasks have really started
}
// Regression for MARATHON-8537
"rollback deployment with default deployment strategy" in {
Given("an existing app")
val id: PathId = appId(Some("deployment-rollback-default-version"))
val app = App(
id = id.toString,
cmd = Some("sleep 12345"),
backoffFactor = 1d
)
val created = marathon.createAppV2(app)
created shouldBe Created
waitForDeployment(created)
And("it is updated with an impossible constraint")
val updated = marathon.updateApp(id, AppUpdate(cpus = Some(1000d), cmd = Some("na")))
updated shouldBe OK
val deploymentId = updated.deploymentId.value
Then("we wait for the first new instance to become scheduled")
// waitForEventWith("instance_changed_event", ev => ev.info("goal") == "Scheduled", s"event instance_changed_event with goal = Scheduled to arrive")
// But since we don't have this event now, we just simply try to wait for 5s which seems to work too ¯\\_(ツ)_/¯
val start = System.currentTimeMillis()
eventually(System.currentTimeMillis() should be >= (start + 5000))(config = PatienceConfig(10.seconds, 100.millis), pos = Position.here)
And("cancel previous update")
val canceled = marathon.deleteDeployment(deploymentId)
canceled shouldBe OK
Then(s"rollback should be successful and ${app.instances} tasks running")
waitForDeployment(canceled)
waitForTasks(id, 1) //make sure, all the tasks have really started
}
"Docker info is not automagically created" in {
Given("An app with MESOS container")
val id = appId(Some("docker-info-is-not-automagically-created"))
val app = App(
id = id.toString,
cmd = Some("sleep 1"),
instances = 0,
container = Some(Container(`type` = EngineType.Mesos))
)
When("The request is sent")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
When("We fetch the app definition")
val getResult1 = marathon.app(id)
val maybeContainer1 = getResult1.value.app.container
Then("The container should still be of type MESOS")
maybeContainer1 should not be empty
maybeContainer1.get.`type` should be(EngineType.Mesos)
And("container.docker should not be set")
maybeContainer1.get.docker shouldBe empty
When("We update the app")
val update = marathon.updateApp(id, AppUpdate(cmd = Some("sleep 100")))
Then("The app gets updated")
update should be(OK)
waitForDeployment(update)
When("We fetch the app definition")
val getResult2 = marathon.app(id)
val maybeContainer2 = getResult2.value.app.container
Then("The container should still be of type MESOS")
maybeContainer2 should not be empty
maybeContainer2.get.`type` should be(EngineType.Mesos)
And("container.docker should not be set")
maybeContainer2.get.docker shouldBe empty
}
"create a simple app with a docker container and update it" in {
Given("a new app")
val id = appId(Some("with-docker-container-and-update-it"))
val app = App(
id = id.toString,
cmd = Some("cmd"),
container = Some(Container(
`type` = EngineType.Docker,
docker = Some(DockerContainer(
image = "jdef/helpme"
)),
portMappings = Option(Seq(
ContainerPortMapping(containerPort = 3000, protocol = NetworkProtocol.Tcp)
))
)),
instances = 0,
networks = Seq(Network(mode = NetworkMode.ContainerBridge))
)
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
// change port from 3000 to 4000
val appUpdate = AppUpdate(container = Some(raml.Container(
EngineType.Docker,
docker = Some(raml.DockerContainer(
image = "jdef/helpme"
)), portMappings = Option(Seq(
ContainerPortMapping(containerPort = 4000, protocol = NetworkProtocol.Tcp)
))
)))
val updateResult = marathon.updateApp(app.id.toPath, appUpdate, force = true)
And("The app is updated")
updateResult should be(OK)
Then("The container is updated correctly")
val updatedApp = marathon.app(id)
updatedApp.value.app.container should not be None
updatedApp.value.app.container.flatMap(_.portMappings).exists(_.nonEmpty) should be(true)
updatedApp.value.app.container.flatMap(_.portMappings).flatMap(_.headOption.map(_.containerPort)) should contain(4000)
}
"create a simple app with tty configured should succeed" in {
Given("a new app")
val app = appProxy(appId(Some("with-tty-configured-should-succeed")), "v1", instances = 1, healthCheck = None).copy(tty = Some(true), cmd = Some("if [ -t 0 ] ; then sleep 100; else exit 1; fi"))
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
waitForStatusUpdates("TASK_RUNNING")
}
"create a simple app with tty configured should fail" in {
Given("a new app")
val app = appProxy(appId(Some("with-tty-configured-should-fail")), "v1", instances = 1, healthCheck = None).copy(cmd = Some("if [ -t 0 ] ; then sleep 100; else exit 1; fi"))
When("The app is deployed")
val result = marathon.createAppV2(app)
Then("The app is created")
result should be(Created)
extractDeploymentIds(result) should have size 1
waitForDeployment(result)
waitForStatusUpdates("TASK_FAILED")
}
"update an app and make sure ports are set correctly" in {
Given("an app update app")
val applicationId = PathId("/tomcat")
val appUpdateJson = """{
| "id":"tomcat",
| "mem":512,
| "cpus":1.0,
| "instances":1,
| "container": {
| "type":"DOCKER",
| "docker": {
| "image":"tomcat:8.0",
| "network":"HOST"
| }
| }
|}""".stripMargin
When("creating an app using PUT")
marathon.putAppByteString(applicationId, ByteString.fromString(appUpdateJson))
Then("port definitions are set correctly")
val updatedApp = marathon.app(applicationId)
updatedApp.value.app.portDefinitions should not be None
val port = updatedApp.value.app.portDefinitions.map(pd => pd.head.port).get
port should be >= 10000
port should be <= 20000
}
}
private val ramlHealthCheck = AppHealthCheck(
path = Some("/health"),
protocol = AppHealthCheckProtocol.Http,
gracePeriodSeconds = 20,
intervalSeconds = 1,
maxConsecutiveFailures = 10,
portIndex = Some(0),
delaySeconds = 2
)
}
|
gsantovena/marathon
|
tests/integration/src/test/scala/mesosphere/marathon/integration/AppDeployIntegrationTest.scala
|
Scala
|
apache-2.0
| 37,431 |
def if2[A](cond: Boolean,onTrue: =>A,onFalse: =>A): A={
if (cond) onTrue else onFalse
}
def maybeTwice(b: Boolean, i: => Int)= if (b) i+i else 0
def maybeTwice3(b: Boolean, i: Int)= if (b) i+i else 0
def maybeTwice2(b: Boolean, i: => Int)={
lazy val j=i;
if (b) j+j else 0
}
|
Tomcruseal/FunctionalLearn
|
fpinscala/src/main/scala/fpinScala/lazyBoy/lazzyIf.scala
|
Scala
|
mit
| 289 |
package feh.tec.cvis
import java.text.DecimalFormat
import java.util.UUID
import javax.swing.SwingConstants
import javax.swing.table.{DefaultTableCellRenderer, DefaultTableModel}
import breeze.stats
import breeze.stats.DescriptiveStats
import feh.dsl.swing2.{Monitor, Var}
import feh.tec.cvis.common.AreaDescriptor.{HasStatistics, SingleChannel}
import feh.tec.cvis.common.ChannelDescriptor.Statistics
import feh.tec.cvis.common.cv.Helper._
import feh.tec.cvis.common.cv.describe.CallHistory.{TypedArgEntry, ArgEntry}
import feh.tec.cvis.common.cv.describe.{CallHistory, CallDescriptor, CallHistoryContainer, ArgDescriptor}
import feh.tec.cvis.common.cv.describe.ArgModifier.MinCap
import feh.tec.cvis.common.{AreaDescriptor, ChannelDescriptor, ImageDescriptor}
import feh.tec.cvis.gui.GenericSimpleAppFrameImplementation
import feh.tec.cvis.gui.configurations.ConfigBuildHelper
import feh.tec.cvis.gui.configurations.GuiArgModifier.Step
import feh.util._
import org.opencv.core._
import scala.swing.{Component, ScrollPane, Table}
trait DescriptorsSupport {
env: GenericSimpleAppFrameImplementation with ConfigBuildHelper =>
import DescriptorsSupport._
trait DescriptorsSupportFrame extends ConfigurationsPanelBuilder with MatSupport {
frame: GenericSimpleAppFrame
with FrameExec
with LayoutDSL
with HistorySupport
with ConfigBuildHelperGUI =>
lazy val imageDescriptors: Var[CallHistoryContainer[Map[Point, ADescriptor]]] = Var(CallHistoryContainer.empty(Map()))
trait DescriptorsPanel
extends SimpleVerticalPanel
with PanelExecHistory[(Mat, Set[Point]), Seq[(Point, ADescriptor)]]
with ConfigBuildHelperPanel
{
def steps = 1
def classTag = scala.reflect.classTag[Seq[(Point, ADescriptor)]]
def params: Set[ArgEntry[_]] = Set( TypedArgEntry(DescriptorSideSize, descriptorSideSize.get) )
def callDescriptor: CallDescriptor[Seq[(Point, ADescriptor)]] = CallDescriptor("descriptors")
def setResult: (CallHistoryContainer[Seq[(Point, ADescriptor)]]) => Unit =
imageDescriptors set _.affect(CallHistory.Entry("toMap"))(_.toMap)
lazy val descriptorSideSize = Var(1)
object DescriptorSideSize extends ArgDescriptor[Int]("Descriptor side", null, MinCap(1), Step(2))
lazy val descriptorSideSizeControl = mkNumericControl(DescriptorSideSize)(descriptorSideSize.get, descriptorSideSize.set)
private def descriptorGroupsInfoModel(data: Seq[(Point, ADescriptor)]) ={
val names = "Point" :: "Mean" :: "StDev" :: "Range" :: "IQR" :: Nil
val dArr = data.toArray.map{
case (p, descr) => Array[AnyRef](p.pairInt,
Double.box(descr.channel.mean),
Double.box(descr.channel.std),
Double.box(descr.channel.range),
Double.box(descr.channel.iqr))
}
new DefaultTableModel(dArr, names.toArray[AnyRef]){
override def isCellEditable(row: Int, column: Int) = false
}
}
lazy val descriptorGroupsInfo = Monitor.custom(imageDescriptors, new Table){
c =>
c.model = descriptorGroupsInfoModel(Nil)
lazy val formatter = new DecimalFormat("0.###E0")
c.peer.setDefaultRenderer(classOf[java.lang.Double], new DefaultTableCellRenderer.UIResource{
setHorizontalAlignment(SwingConstants.RIGHT)
override def setValue(value: AnyRef): Unit = setText(Option(value) map formatter.format getOrElse "")
})
}{
c =>
t =>
c.model = descriptorGroupsInfoModel(t.value.toSeq)
}
lazy val formBuilders: Seq[(String, (AbstractDSLBuilder, DSLLabelBuilder[_]))] = Seq(
"descriptorSideSize" -> descriptorSideSizeControl
)
override lazy val elems: Seq[(String, Seq[Component])] = mkElems ++ Seq(
"descriptorGroupsInfo" -> Seq(new ScrollPane(descriptorGroupsInfo.component))
)
// todo: bounding conditions!
def mkDescriptor(img: Mat, sideSize: Int)(p: Point) = {
val n = (sideSize - 1).ensuring(_ % 2 == 0, "sideSize must be odd") / 2
val subMatOpt = if(n > 1)
if(p.x + n > img.width || p.x - n < 0 || p.y + n > img.height || p.y - n < 0) None
else Some(img.submat(p.x.toInt-n, p.x.toInt+n, p.y.toInt-n, p.y.toInt+n))
else Some(new MatOfDouble(img.get(p.x.toInt, p.y.toInt): _*))
val data: Array[Double] = subMatOpt.map(_.toArray[Double]) getOrElse Array.empty
if(data.nonEmpty) Some( ADescriptor( sideSize
, data
, stats.mean(data)
, stats.stddev(data)
, data.max - data.min
, DescriptiveStats.percentile(data, 0.75) - DescriptiveStats.percentile(data, 0.25)
))
else None
}
protected def throwIfInterrupted(): Unit = if(interrupted_?) throw Interrupted
lazy val runner: Runner[Params, (Mat, Set[Point]), Seq[(Point, ADescriptor)]] = Runner(
nextStep =>
params =>
{
case (img, iPoints) => iPoints.toSeq
.zipMap(mkDescriptor(img.convert(CvType.CV_64F).normalize /* todo: normalize? */, params.arg(DescriptorSideSize)))
.filter(_._2.isDefined)
.mapVals(_.get)
}
)
}
}
}
object DescriptorsSupport{
case class IDescriptor( name: String
, sideLength: Int
, matType: Int
, javaType: Int
, originalSize: Size
, originalImage: Array[Byte]
, interestPoints: Map[Point, ADescriptor]
, interestPointsHistory: CallHistory[Map[Point, ADescriptor]]
)
(val id: Option[UUID] = None) extends ImageDescriptor
{
type ADescriptor = DescriptorsSupport.ADescriptor
def descriptorChannels = 1
}
case class ADescriptor( sideLength: Int
, data : Array[Double]
, mean : Double
, std : Double
, range : Double
, iqr : Double
)
extends AreaDescriptor with SingleChannel with HasStatistics
{
type Channel = ChannelDescriptor with Statistics
lazy val channel: Channel = new ChannelDescriptor with Statistics{
def data = ADescriptor.this.data
def n = sideLength*2+1
def sideRange = 0 until n
lazy val byRows: Array2D[Double] = (
for(i <- sideRange) yield (
for(j <- sideRange) yield data(i*n + j)
).toArray
).toArray
lazy val byCols: Array2D[Double] = (
for(j <- sideRange) yield (
for(i <- sideRange) yield data(i*n + j)
).toArray
).toArray
def mean = ADescriptor.this.mean
def std = ADescriptor.this.std
def range = ADescriptor.this.range
def iqr = ADescriptor.this.iqr
}
}
}
|
fehu/comp-vis
|
harris-app/src/main/scala/feh/tec/cvis/DescriptorsSupport.scala
|
Scala
|
mit
| 7,504 |
package com.googlecode.kanbanik.commands
import com.googlecode.kanbanik.builders.TaskBuilder
import com.googlecode.kanbanik.model.{User, Task, Board}
import com.googlecode.kanbanik.dtos.{ErrorDto, GetTasksDto, TasksDto}
class GetTasksCommand extends Command[GetTasksDto, TasksDto] {
lazy val taskBuilder = new TaskBuilder
override def execute(dto: GetTasksDto, user: User): Either[TasksDto, ErrorDto] = {
val res = for (
board <- Board.all(includeTasks = true, includeTaskDescription = dto.includeDescription, None, None, user);
task <- board.tasks) yield taskBuilder.buildDto(task)
Left(TasksDto(res))
}
}
|
gudtago/kanbanik
|
kanbanik-server/src/main/scala/com/googlecode/kanbanik/commands/GetTasksCommand.scala
|
Scala
|
apache-2.0
| 638 |
/*
* Copyright 2013 David Savage
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.chronologicalthought.modula
/**
* @author David Savage
*/
// TODO javadoc
trait Part {
// TODO doesn't part need def namespace: String for part type?
def name: String
def version: Version
def capabilities: List[Capability]
def requirements: List[Requirement]
override def toString = super.toString + "->" + name + ":" + version
// TODO move these helper methods to implicit RichPart type?
// TODO investigate implicit flatten method
def flatten: List[Part] = {
val f = this match {
case c@CompositePart(parts) => {
val start = List[Part]()
val merged: List[Part] = parts.foldLeft(start) {
(ps, p) => {
assert(p != null)
assert(ps != null, "Invalid parts in " + parts)
p.flatten ::: ps
}
}
merged
}
case p: Part => p :: Nil
}
assert(f != null)
f
}
def newCapability(namespace: String, attributes: Map[String, Any], directives: Map[String, Any]) = {
new Capability(namespace, attributes, directives, this)
}
def newRequirement(namespace: String, attributes: Map[String, Any], directives: Map[String, Any]) = {
new Requirement(namespace, attributes, directives, Some(this))
}
def inheritCapabilities(caps: List[Capability]): List[Capability] = {
caps.map(inheritCapability(_)).toList
}
def inheritRequirements(reqs: List[Requirement]): List[Requirement] = {
reqs.map(inheritRequirement(_)).toList
}
def inheritCapability(cap: Capability) = {
if (cap.part == this) {
cap
}
else {
newCapability(cap.namespace, cap.attributes, cap.directives)
}
}
def inheritRequirement(req: Requirement) = {
req.part match {
case Some(p) => {
if (p == this) {
req
}
else {
newRequirement(req.namespace, req.attributes, req.directives)
}
}
case None => newRequirement(req.namespace, req.attributes, req.directives)
}
}
}
object PrimordialPart extends Part {
val name = "primordial"
val version = Version.Empty
val capabilities = Nil
val requirements = Nil
}
final case class CompositePart(parts: List[Part]) extends Part {
thisPart =>
assert(parts != null)
assert(!parts.isEmpty)
val name = parts.head.name
val version = parts.head.version
lazy val requirements = parts.flatMap(p => inheritRequirements(p.requirements)).toList
lazy val capabilities = parts.flatMap(p => inheritCapabilities(p.capabilities)).toList
// TODO is equals/hashCode needed?
override def equals(other: Any): Boolean = {
// TODO this equals method id flaky
val x = other.asInstanceOf[AnyRef]
if (this eq x) true
else if (x eq null) false
else if (other.isInstanceOf[CompositePart]) {
// chuckle cpo - r2d2?
val cpo = other.asInstanceOf[CompositePart]
return parts == cpo.parts
}
else {
false
}
}
override lazy val hashCode: Int = {
parts.hashCode * 7
}
override def toString = parts.mkString("Composite(", ":", ")")
}
|
davemssavage/modula
|
api/src/main/scala/org/chronologicalthought/modula/Part.scala
|
Scala
|
apache-2.0
| 3,686 |
package com.lucidchart.aspell
import org.apache.commons.io.IOUtils
import java.io._
import resource._
private[aspell] object NativeLibraryLoader {
def load(name: String) = {
val tempDirectory = new File(System.getProperty("java.io.tmpdir"))
val fileName = new File(getClass.getResource(name).getPath).getName
val file = new File(tempDirectory + File.separator + fileName)
for {
libraryStream <- managed(getClass.getResourceAsStream(name))
fileStream <- managed(new FileOutputStream(file))
} {
IOUtils.copy(libraryStream, fileStream)
}
System.load(file.getAbsolutePath)
}
}
|
pauldraper/lucid-aspell
|
src/main/scala/com/lucidchart/aspell/NativeLibraryLoader.scala
|
Scala
|
apache-2.0
| 627 |
package breeze.linalg
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import org.scalatest._
import org.scalatest.junit._
import org.scalatest.prop._
import org.junit.runner.RunWith
import breeze.math.Complex
import breeze.numerics._
import org.scalatest.matchers.ShouldMatchers
import breeze.util.DoubleImplicits
@RunWith(classOf[JUnitRunner])
class DenseMatrixTest extends FunSuite with Checkers with ShouldMatchers with DoubleImplicits {
test("Slicing") {
val m = DenseMatrix((0,1,2),
(3,4,5))
// slice sub-matrix
val s1 = m(0 to 1, 1 to 2)
assert(s1 === DenseMatrix((1,2),(4,5)))
s1 += 1
assert(m === DenseMatrix((0,2,3),(3,5,6)))
// slice row
val s2 = m(0, ::)
assert(s2 === DenseVector(0,2,3).t)
s2 *= 2
assert(m === DenseMatrix((0,4,6),(3,5,6)))
// slice column
val s3 : DenseVector[Int] = m(::, 1)
assert(s3 === DenseVector(4,5))
s3 -= 1
assert(m === DenseMatrix((0,3,6),(3,4,6)))
// slice rows
val s4 = m(1 to 1, ::)
assert(s4 === DenseMatrix((3,4,6)))
val mbig = DenseMatrix(
(0,1,2,3,4,5),
(3,4,5,6,7,8),
(3,4,5,6,7,8),
(5,4,5,9,7,8)
)
val sbig1 = mbig(::, 0 to 2 by 2)
assert(sbig1 === DenseMatrix(
(0,2),
(3,5),
(3,5),
(5,5)
))
// slice columns
val s5 = m(::, 1 to 2)
assert(s5 === DenseMatrix((3,6),(4,6)))
// slice part of a row
val s6a = m(0, 1 to 2)
s6a += 1
assert(m === DenseMatrix((0,4,7),(3,4,6)))
// slice part of a column
val s7a = m(0 to 1, 0)
s7a += 2
val s7b = m(0 to 1,0)
s7b += 1
assert(m === DenseMatrix((3,4,7),(6,4,6)))
}
test("Multiple Slicing") {
val m = new DenseMatrix(6, (1 to 36).toArray)
val slice1 = m(1 to 3, 1 to 3)
assert(slice1(::, 1) === DenseVector(14, 15, 16))
assert(slice1(::, 1 to 2) === DenseMatrix((14, 20), (15, 21), (16, 22)))
}
test("Transpose") {
val m = DenseMatrix((1,2,3),(4,5,6))
// check that the double transpose gives us back the original
assert(m.t.t == m)
// check static type and write-through
val t = m.t
assert(t === DenseMatrix((1,4),(2,5),(3,6)))
t(0,1) = 0
assert(m === DenseMatrix((1,2,3),(0,5,6)))
}
test("Sliced Transpose") {
val m = DenseMatrix((0, 1, 2),
(3, 4, 5))
// column of original looks same as row of tranpose
val sm1 = m(::, 1)
val smt1 = m.t(1, ::)
assert(sm1.t === smt1)
val sm2 = m(::, 2)
val smt2 = m.t(2, ::)
assert(sm2.t === smt2)
val sm1c = m(1, ::)
val smt1c = m.t(::, 1)
assert(sm1c === smt1c.t)
val sm2c = m(0, ::)
val smt2c = m.t(::, 0)
assert(sm2c === smt2c.t)
// slice sub-matrix
val s1 = m(0 to 1, 1 to 2)
assert(s1 === DenseMatrix((1, 2), (4, 5)))
val t1 = s1.t
assert(t1 === DenseMatrix((1, 4), (2, 5)))
val t1b = m.t(1 to 2, 0 to 1)
assert(t1 === t1b)
val s2 = m(0 to 1, 1)
val t2 = m.t(1, 0 to 1)
assert(s2 === t2.t)
val s3 = m(0, 0 to 1)
val t3 = m.t(0 to 1, 0)
assert(s3.t === t3)
{
val s2 = m(0 to 1, ::)
val t2 = m.t(::, 0 to 1)
assert(s2.t === t2)
assert(s2 === t2.t)
val s3 = m(::, 0 to 1)
val t3 = m.t(0 to 1, ::)
assert(s3.t === t3)
assert(s3 === t3.t)
}
}
test("Min/Max") {
val m = DenseMatrix((1,0,0),(2,3,-1))
assert(argmin(m) === (1,2))
assert(argmax(m) === (1,1))
assert(min(m) === -1)
assert(max(m) === 3)
}
test("MapValues") {
val a : DenseMatrix[Int] = DenseMatrix((1,0,0),(2,3,-1))
val b1 : DenseMatrix[Int] = a.mapValues(_ + 1)
assert(b1 === DenseMatrix((2,1,1),(3,4,0)))
val b2 : DenseMatrix[Double] = a.mapValues(_ + 1.0)
assert(b2 === DenseMatrix((2.0,1.0,1.0),(3.0,4.0,0.0)))
}
/*
test("Map Triples") {
val a : DenseMatrix[Int] = DenseMatrix((1,0,0),(2,3,-1))
val b1 : DenseMatrix[Int] = a.mapTriples((i,j,v) => i + v)
assert(b1 === DenseMatrix((1,0,0),(3,4,0)))
val b2 : DenseMatrix[Double] = a.mapTriples((i,j,v) => j + v.toDouble)
assert(b2 === DenseMatrix((1.0,1.0,2.0),(2.0,4.0,1.0)))
}
test("Triples") {
val a : DenseMatrix[Int] = DenseMatrix((1,0,0),(2,3,-1))
var s = 0
// foreach
s = 0
for ((i,j,v) <- a.triples) s += v
assert(s === sum(a))
// filter
s = 0
for ((i,j,v) <- a.triples; if i % 2 == 0 || j % 2 == 0) s += v
assert(s === 1+2-1)
// // map
// val b1 : DenseMatrix[Double] = for ((i,j,v) <- a) yield v * 2.0
// assert(b1 === DenseMatrix((2.0,0.0,0.0),(4.0,6.0,-2.0)))
//
// // map with filter
// val b2 : DenseMatrix[Int] = for ((i,j,v) <- a; if j == 0) yield v * 2
// assert(b2 === DenseMatrix((2,0,0),(4,0,0)))
}
*/
test("set") {
{
val a = DenseMatrix.zeros[Int](2,2)
val b = DenseMatrix((1,0),(2,3))
a := b
assert(a === b)
}
val a = DenseMatrix.zeros[Int](2,3)
val b = DenseMatrix((1,0,5),(2,3,-1))
a := b
assert(a === b)
}
test("horzcat") {
val a : DenseMatrix[Int] = DenseMatrix((1,0,5),(2,3,-1))
val result: DenseMatrix[Int] = DenseMatrix((1,0,5,1,0, 5),(2,3,-1,2,3,-1))
assert(DenseMatrix.horzcat(a,a) === result)
}
test("vertcat") {
val a : DenseMatrix[Int] = DenseMatrix((1,0,5),(2,3,-1))
val result: DenseMatrix[Int] = DenseMatrix((1,0,5),(2,3,-1),(1,0,5),(2,3,-1))
assert(DenseMatrix.vertcat(a,a) === result)
}
test("Multiply") {
val a = DenseMatrix((1.0, 2.0, 3.0),(4.0, 5.0, 6.0))
val b = DenseMatrix((7.0, -2.0, 8.0),(-3.0, -3.0, 1.0),(12.0, 0.0, 5.0))
val c = DenseVector(6.0,2.0,3.0)
val cs = SparseVector(6.0,2.0,3.0)
assert(a * b === DenseMatrix((37.0, -8.0, 25.0), (85.0, -23.0, 67.0)))
assert(a * c === DenseVector(19.0,52.0))
assert(b * c === DenseVector(62.0, -21.0, 87.0))
assert(a * cs === DenseVector(19.0,52.0))
assert(b * cs === DenseVector(62.0, -21.0, 87.0))
assert(b.t * c === DenseVector(72.0, -18.0, 65.0))
assert(a.t * DenseVector(4.0, 3.0) === DenseVector(16.0, 23.0, 30.0))
// should be dense
val x:DenseMatrix[Double] = a * a.t
assert(x === DenseMatrix((14.0,32.0),(32.0,77.0)))
// should be dense
val y:DenseMatrix[Double] = a.t * a
assert(y === DenseMatrix((17.0,22.0,27.0),(22.0,29.0,36.0),(27.0,36.0,45.0)))
val z : DenseMatrix[Double] = b * (b + 1.0)
assert(z === DenseMatrix((164.0,5.0,107.0),(-5.0,10.0,-27.0),(161.0,-7.0,138.0)))
}
test("Multiply Int") {
val a = DenseMatrix((1, 2, 3),(4, 5, 6))
val b = DenseMatrix((7, -2, 8),(-3, -3, 1),(12, 0, 5))
val c = DenseVector(6,2,3)
assert(a * b === DenseMatrix((37, -8, 25), (85, -23, 67)))
assert(a * c === DenseVector(19,52))
assert(b * c === DenseVector(62, -21, 87))
assert(b.t * c === DenseVector(72, -18, 65))
assert(a.t * DenseVector(4, 3) === DenseVector(16, 23, 30))
// should be dense
val x = a * a.t
assert(x === DenseMatrix((14,32),(32,77)))
// should be dense
val y = a.t * a
assert(y === DenseMatrix((17,22,27),(22,29,36),(27,36,45)))
val z : DenseMatrix[Int] = b * ((b + 1):DenseMatrix[Int])
assert(z === DenseMatrix((164,5,107),(-5,10,-27),(161,-7,138)))
}
test("Multiply Boolean") {
val a = DenseMatrix((true, true, true),(true, true, true))
val b = DenseMatrix((true, false, true),(true, false, true),(true, false, true))
assert(a * b === DenseMatrix((true, false, true),(true, false, true)))
}
test("Multiply Float") {
val a = DenseMatrix((1.0f, 2.0f, 3.0f),(4.0f, 5.0f, 6.0f))
val b = DenseMatrix((7.0f, -2.0f, 8.0f),(-3.0f, -3.0f, 1.0f),(12.0f, 0.0f, 5.0f))
val c = DenseVector(6.0f,2.0f,3.0f)
val cs = SparseVector(6.0f,2.0f,3.0f)
assert(a * b === DenseMatrix((37.0f, -8.0f, 25.0f), (85.0f, -23.0f, 67.0f)))
assert(a * c === DenseVector(19.0f,52.0f))
assert(b * c === DenseVector(62.0f, -21.0f, 87.0f))
assert(a * cs === DenseVector(19.0f,52.0f))
assert(b * cs === DenseVector(62.0f, -21.0f, 87.0f))
assert(b.t * c === DenseVector(72.0f, -18.0f, 65.0f))
assert(a.t * DenseVector(4.0f, 3.0f) === DenseVector(16.0f, 23.0f, 30.0f))
// should be dense
val x = a * a.t
assert(x === DenseMatrix((14.0f,32.0f),(32.0f,77.0f)))
// should be dense
val y = a.t * a
assert(y === DenseMatrix((17.0f,22.0f,27.0f),(22.0f,29.0f,36.0f),(27.0f,36.0f,45.0f)))
val z : DenseMatrix[Float] = b * (b + 1.0f)
assert(z === DenseMatrix((164.0f,5.0f,107.0f),(-5.0f,10.0f,-27.0f),(161.0f,-7.0f,138.0f)))
}
test("Multiply Complex") {
val a = DenseMatrix((Complex(1,1), Complex(2,2), Complex(3,3)),
(Complex(4,4), Complex(5,5), Complex(6,6)))
val b = DenseMatrix((Complex(7,7), Complex(-2,-2), Complex(8,8)),
(Complex(-3,-3), Complex(-3,-3), Complex(1,1)),
(Complex(12,12), Complex(0,0), Complex(5,5)))
val c = DenseVector(Complex(6,0), Complex(2,0), Complex(3,0))
val cs = SparseVector(Complex(6,0), Complex(2,0), Complex(3,0))
assert(a * b === DenseMatrix((Complex(0,74), Complex(0,-16), Complex(0,50)),
(Complex(0,170), Complex(0,-46), Complex(0,134))))
assert(b * c === DenseVector(Complex(62,62), Complex(-21,-21), Complex(87,87)))
assert(b * cs === DenseVector(Complex(62,62), Complex(-21,-21), Complex(87,87)))
assert(b.t * c === DenseVector(Complex(72,-72), Complex(-18,18), Complex(65,-65)))
}
test("toDenseVector") {
val a = DenseMatrix((1,2,3), (4,5,6))
val b = a(0 to 1, 1 to 2)
val c = b.t
assert(a.toDenseVector === DenseVector(1,4,2,5,3,6))
assert(b.toDenseVector === DenseVector(2,5,3,6))
assert(c.toDenseVector === DenseVector(2,3,5,6))
}
test("flattenView") {
val a = DenseMatrix((1,2,3), (4,5,6))
a.flatten(true)(2) = 4
assert(a === DenseMatrix((1,4,3), (4,5,6)))
}
test("Trace") {
assert(trace(DenseMatrix((1,2),(4,5))) === 1 + 5)
assert(trace(DenseMatrix((1,2,3),(3,4,5),(5,6,7))) == 1 + 4 + 7)
assert(trace(DenseMatrix((1,2,3),(4,5,6),(7,8,9))) === 1 + 5 + 9)
}
test("Reshape") {
val m : DenseMatrix[Int] = DenseMatrix((1,2,3),(4,5,6))
val r : DenseMatrix[Int] = m.reshape(3, 2, true)
assert(m.data eq r.data)
assert(r.rows === 3)
assert(r.cols === 2)
assert(r === DenseMatrix((1,5),(4,3),(2,6)))
}
test("Reshape transpose") {
val m : DenseMatrix[Int] = DenseMatrix((1,2,3),(4,5,6)).t
val r : DenseMatrix[Int] = m.reshape(2, 3, true)
assert(m.data eq r.data)
assert(r.rows === 2)
assert(r.cols === 3)
assert(r === DenseMatrix((1,5),(4,3),(2,6)).t)
}
test("Solve") {
// square solve
val r1 : DenseMatrix[Double] = DenseMatrix((1.0,3.0),(2.0,0.0)) \\ DenseMatrix((1.0,2.0),(3.0,4.0))
assert(r1 === DenseMatrix((1.5, 2.0), (-1.0/6, 0.0)))
// matrix-vector solve
val r2 : DenseVector[Double] = DenseMatrix((1.0,3.0,4.0),(2.0,0.0,6.0)) \\ DenseVector(1.0,3.0)
assert( (r2 - DenseVector(0.1813186813186811, -0.3131868131868131, 0.43956043956043944)).norm(inf) < 1E-5)
// wide matrix solve
val r3 : DenseMatrix[Double] = DenseMatrix((1.0,3.0,4.0),(2.0,0.0,6.0)) \\ DenseMatrix((1.0,2.0),(3.0,4.0))
matricesNearlyEqual(r3,
DenseMatrix((0.1813186813186811, 0.2197802197802196),
(-0.3131868131868131, -0.1978021978021977),
(0.43956043956043944, 0.5934065934065933)))
// tall matrix solve
val r4 : DenseMatrix[Double] = DenseMatrix((1.0,3.0),(2.0,0.0),(4.0,6.0)) \\ DenseMatrix((1.0,4.0),(2.0,5.0),(3.0,6.0))
assert( max(abs(r4 - DenseMatrix((0.9166666666666667, 1.9166666666666672),
(-0.08333333333333352, -0.08333333333333436)))) < 1E-5)
}
test("Solve Float") {
// square solve
val r1 : DenseMatrix[Float] = DenseMatrix((1.0f,3.0f),(2.0f,0.0f)) \\ DenseMatrix((1.0f,2.0f),(3.0f,4.0f))
assert(r1 === DenseMatrix((1.5f, 2.0f), (-1.0f/6, 0.0f)))
// matrix-vector solve
val r2 : DenseVector[Float] = DenseMatrix((1.0f,3.0f,4.0f),(2.0f,0.0f,6.0f)) \\ DenseVector(1.0f,3.0f)
assert( (r2 - DenseVector(0.1813186813186811f, -0.3131868131868131f, 0.43956043956043944f)).norm(inf) < 1E-5)
// wide matrix solve
val r3 : DenseMatrix[Float] = DenseMatrix((1.0f,3.0f,4.0f),(2.0f,0.0f,6.0f)) \\ DenseMatrix((1.0f,2.0f),(3.0f,4.0f))
assert( max(abs(r3 - DenseMatrix((0.1813186813186811f, 0.2197802197802196f),
(-0.3131868131868131f, -0.1978021978021977f),
(0.43956043956043944f, 0.5934065934065933f)))) < 1E-5)
// tall matrix solve
val r4 : DenseMatrix[Float] = DenseMatrix((1.0f,3.0f),(2.0f,0.0f),(4.0f,6.0f)) \\ DenseMatrix((1.0f,4.0f),(2.0f,5.0f),(3.0f,6.0f))
assert( max(abs(r4 - DenseMatrix((0.9166666666666667f, 1.9166666666666672f),
(-0.08333333333333352f, -0.08333333333333436f)))) < 1E-5)
}
test("GH#29 transpose solve is broken") {
val A = DenseMatrix((1.0,0.0),(1.0,-1.0))
val t = DenseVector(1.0,0.0)
assert(A \\ t === DenseVector(1.0, 1.0))
assert(A.t \\ t === DenseVector(1.0, 0.0))
}
test("sum") {
// Test square and rectangular matrices
assert(sum(DenseMatrix((1.0,3.0),(2.0,4.0)), Axis._0) === DenseMatrix((3.0, 7.0)))
assert(sum(DenseMatrix((1.0,3.0,5.0),(2.0,4.0,6.0)), Axis._0) === DenseMatrix((3.0, 7.0,11.0)))
assert(sum(DenseMatrix((1.0,3.0),(2.0,4.0),(5.0, 6.0)), Axis._0) === DenseMatrix((8.0, 13.0)))
assert(sum(DenseMatrix((1.0,3.0),(2.0,4.0)), Axis._1) === DenseVector(4.0, 6.0))
assert(sum(DenseMatrix((1.0,3.0,5.0),(2.0,4.0,6.0)), Axis._1) === DenseVector(9.0, 12.0))
assert(sum(DenseMatrix((1.0,3.0),(2.0,4.0),(5.0, 6.0)), Axis._1) === DenseVector(4.0, 6.0, 11.0))
assert(sum(DenseMatrix((1.0,3.0),(2.0,4.0))) === 10.0)
}
test("normalize rows and columns") {
assert(normalize(DenseMatrix((1.0,3.0),(2.0,4.0)), Axis._0, 1) === DenseMatrix((1.0/3.0, 3.0/7.0), (2.0/3.0,4.0/7.0)))
assert(normalize(DenseMatrix((1.0,3.0),(2.0,4.0)), Axis._1, 1) === DenseMatrix((1.0/4.0, 3.0/4.0), (2.0/6.0,4.0/6.0)))
// handle odd sized matrices (test for a bug.)
val dm = DenseMatrix.tabulate(2,5)( (i,j) => i * j * 1.0 + 1)
dm := normalize(dm, Axis._1, 2)
assert(abs(sum(dm(0,::).t.map(x => x * x)) - 1.0) < 1E-4, dm.toString + " not normalized!")
}
test("Generic Dense ops") {
// mostly for coverage
val a = DenseMatrix.create[String](1,1, Array("SSS"))
intercept[IndexOutOfBoundsException] {
a(3,3) = ":("
assert(false, "Shouldn't be here!")
}
assert(a(0,0) === "SSS")
intercept[IndexOutOfBoundsException] {
a(3,3)
assert(false, "Shouldn't be here!")
}
a(0,0) = ":("
assert(a(0,0) === ":(")
a := ":)"
assert(a(0,0) === ":)")
val b = DenseMatrix.zeros[String](1,1)
b := a
assert(b === a)
}
test("toString with no rows doesn't throw") {
DenseMatrix.zeros[Double](0, 2).toString
}
test("GH #30: Shaped solve of transposed and slice matrix does not work") {
val A=DenseMatrix((1.0,0.0),(1.0,-1.0))
val i = DenseMatrix.eye[Double](2)
val res = i \\ A.t(::,1)
assert(res === DenseVector(1.0,-1.0))
val res2 = i \\ A(1,::).t
assert(res2 === DenseVector(1.0,-1.0))
}
test("GH #148: out of bounds slice throws") {
val temp2 = DenseMatrix.tabulate(5,5)( (x: Int, y: Int) => x + y*10 )
intercept[IndexOutOfBoundsException] {
temp2( Range( 4, 6 ), 3 )
}
}
test("softmax on dm slices") {
val a = DenseMatrix((1.0, 2.0, 3.0))
assert(softmax(a(::, 1)) === 2.0)
}
test("Delete") {
val a = DenseMatrix((1, 2, 3),(4, 5, 6), (7,8,9))
assert(a.delete(0, Axis._0) === DenseMatrix((4, 5, 6), (7,8,9)))
assert(a.delete(1, Axis._0) === DenseMatrix((1, 2, 3), (7,8,9)))
assert(a.delete(2, Axis._0) === DenseMatrix((1, 2, 3), (4,5,6)))
assert(a.delete(0, Axis._1) === DenseMatrix((2, 3), (5,6), (8,9)))
assert(a.delete(1, Axis._1) === DenseMatrix((1, 3), (4,6), (7,9)))
assert(a.delete(2, Axis._1) === DenseMatrix((1, 2), (4,5), (7,8)))
assert(a.delete(Seq(0,2), Axis._1) === DenseMatrix(2, 5, 8))
assert(a.delete(Seq(1, 2), Axis._1) === DenseMatrix(1, 4, 7))
assert(a.delete(Seq(0,2), Axis._0) === DenseMatrix((4, 5, 6)))
assert(a.delete(Seq(1,2), Axis._0) === DenseMatrix((1, 2, 3)))
}
test("Big Int zeros are the right thing") {
val dm = DenseMatrix.zeros[BigInt](1,1)
assert(dm(0, 0) === BigInt(0))
}
test("BigInt multiply") {
val m = DenseMatrix((BigInt(1), BigInt(1)), (BigInt(1), BigInt(0)))
val m2 = DenseMatrix((1, 1), (1, 0))
assert(m * m === convert(m2 * m2, Int))
}
test("comparisons") {
val one = DenseMatrix.ones[Double](5, 6)
val zero = DenseMatrix.zeros[Double](5, 6)
assert( (one :> zero) === DenseMatrix.ones[Boolean](5, 6))
}
test("Some ill-typedness") {
import shapeless.test.illTyped
illTyped {
"""
val one = DenseMatrix.ones[Double](5, 6)
val z = DenseVector.zeros[Double](5)
(z + one)
"""
}
}
test("ensure we don't crash on weird strides") {
val dm = DenseMatrix.zeros[Double](3,3)
assert( (dm(::, 0 until 0) * dm(0 until 0, ::)) === dm)
assert( (dm(0 until 0, ::) * dm(::, 0 until 0)) === DenseMatrix.zeros[Double](0, 0))
// assert( (dm(::, 2 until 0 by -1) * dm(2 until 0 by -1, ::)) === dm)
}
test("Ensure a += a.t gives the right result") {
val dm = DenseMatrix.rand[Double](3,3)
val dmdmt = dm + dm.t
dm += dm.t
assert(dm === dmdmt)
}
def matricesNearlyEqual(A: DenseMatrix[Double], B: DenseMatrix[Double], threshold: Double = 1E-6) {
for(i <- 0 until A.rows; j <- 0 until A.cols)
A(i,j) should be (B(i, j) plusOrMinus threshold)
}
}
|
wavelets/breeze
|
src/test/scala/breeze/linalg/DenseMatrixTest.scala
|
Scala
|
apache-2.0
| 18,378 |
class C() { var x : AnyVal = {} }
object Main { def main(args: Array[String]) { } }
|
tobast/compil-petitscala
|
tests/typing/good/testfile-block-3.scala
|
Scala
|
gpl-3.0
| 85 |
package ca.uqam.euler.nicolas
/**
* If p is the perimeter of a right angle triangle with integral length
* sides, {a,b,c}, there are exactly three solutions for p = 120.
*
* {20,48,52}, {24,45,51}, {30,40,50}
*
* For which value of p ≤ 1000, is the number of solutions maximised?
*
*/
object Problem039 {
import math._
def nbSolutions(p: Int) =
(for {
a ← 1 to p - 2
b ← a + 1 to p - a - 1
c = p - a - b
if (a * a) + (b * b) == (c * c)
} yield 1).sum
def main(args: Array[String]): Unit = Answer {
1 to 1000 maxBy nbSolutions
}
}
|
nicolaspayette/project-euler
|
src/main/scala/ca/uqam/euler/nicolas/Problem039.scala
|
Scala
|
mit
| 590 |
import collection.mutable.Buffer
import collection.mutable.{ArrayBuffer => Buffer}
println(/* resolved: false */ ArrayBuffer.getClass)
println(classOf[/* resolved: false */ ArrayBuffer])
println(/* path: scala.collection.mutable.Buffer, type: org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject */ Buffer.getClass)
println(classOf[/* path: scala.collection.mutable.Buffer */ Buffer])
|
ilinum/intellij-scala
|
testdata/resolve2/import/alias/clash/RenameValue2.scala
|
Scala
|
apache-2.0
| 400 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.history.yarn.integration
import java.io.{ByteArrayInputStream, FileNotFoundException, IOException}
import java.net.URI
import com.sun.jersey.api.client.{ClientHandlerException, ClientResponse, UniformInterfaceException}
import org.apache.spark.deploy.history.yarn.rest.{JerseyBinding, UnauthorizedRequestException}
import org.apache.spark.deploy.history.yarn.testtools.AbstractYarnHistoryTests
/**
* Unit test of how well the Jersey Binding works -especially some error handling logic
* Which can follow different paths
*/
class JerseyBindingSuite extends AbstractYarnHistoryTests {
val uriPath = "http://spark.apache.org"
val uri = new URI(uriPath)
def translate(ex: Throwable): Throwable = {
JerseyBinding.translateException("GET", uri, ex)
}
/**
* Build a [[UniformInterfaceException]] with the given string body
* @param status status code
* @param body body message
* @param buffer buffer flag
* @return new instance
*/
def newUIE(status: Int, body: String, buffer: Boolean ): UniformInterfaceException = {
val response = new ClientResponse(status,
null,
new ByteArrayInputStream(body.getBytes("UTF-8")),
null)
new UniformInterfaceException(response, buffer)
}
/**
* If a [[ClientHandlerException]] contains an IOE, it
* is unwrapped and returned
*/
test("UnwrapIOEinClientHandler") {
val fnfe = new FileNotFoundException("/tmp")
val che = new ClientHandlerException(fnfe)
assertResult(fnfe) {
translate(che)
}
}
/**
* If a [[ClientHandlerException]] does not contains an IOE, it
* is wrapped, but the inner text is extracted
*/
test("BuildIOEinClientHandler") {
val npe = new NullPointerException("oops")
val che = new ClientHandlerException(npe)
val ex = translate(che)
assert(che === ex.getCause)
assertExceptionDetails(ex, "oops", uriPath)
}
/**
* If a [[ClientHandlerException]] does not contains an IOE, it
* is unwrapped and returned
*/
test("EmptyClientHandlerException") {
val che = new ClientHandlerException("che")
val ex = translate(che)
assert(che === ex.getCause)
assertExceptionDetails(ex, "che", uriPath)
}
/**
* If the URI passed into translating a CHE is null, no
* URI is printed
*/
test("Null URI for ClientHandlerException") {
val che = new ClientHandlerException("che")
val ex = JerseyBinding.translateException("POST", null, che)
assert(che === ex.getCause)
assertExceptionDetails(ex, "POST", "unknown")
}
test("UniformInterfaceException null response") {
// bufferResponseEntity must be false to avoid triggering NPE in constructor
val uie = new UniformInterfaceException("uae", null, false)
val ex = translate(uie)
assert(uie === ex.getCause)
assertExceptionDetails(ex, "uae", uriPath)
}
test("UniformInterfaceException 404 no body response") {
val uie = newUIE(404, "", false)
val ex = translate(uie)
assert(uie === ex.getCause)
assert(ex.isInstanceOf[FileNotFoundException], s"not FileNotFoundException: $ex")
assertExceptionDetails(ex, uriPath, uriPath)
}
test("UniformInterfaceException 403 forbidden") {
val uie = newUIE(403, "forbidden", false)
val ex = translate(uie)
assert(uie === ex.getCause)
assert(ex.isInstanceOf[UnauthorizedRequestException], s"not UnauthorizedRequestException: $ex")
assertExceptionDetails(ex, "Forbidden", uriPath)
}
test("UniformInterfaceException 500 response") {
val uie = newUIE(500, "internal error", false)
val ex = translate(uie)
assert(uie === ex.getCause)
assert(ex.isInstanceOf[IOException], s"not IOException: $ex")
assertExceptionDetails(ex, "500", uriPath)
}
}
|
steveloughran/spark-timeline-integration
|
yarn-timeline-history/src/test/scala/org/apache/spark/deploy/history/yarn/integration/JerseyBindingSuite.scala
|
Scala
|
apache-2.0
| 4,604 |
package mm4s.api
import akka.NotUsed
import akka.actor.ActorSystem
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport
import akka.http.scaladsl.marshalling._
import akka.http.scaladsl.model.headers.`Set-Cookie`
import akka.http.scaladsl.model.{HttpMethods, HttpRequest, HttpResponse, MessageEntity}
import akka.http.scaladsl.unmarshalling.Unmarshal
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Flow, Source}
import spray.json._
/**
* User related API components
*
* @see [[https://github.com/mattermost/platform/blob/master/api/user.go]]
*/
object Users {
import Streams._
import UserModels._
import UserProtocols._
def create(createUser: CreateUser)(implicit system: ActorSystem) = {
request("/users/create") { r =>
Marshal(createUser).to[MessageEntity].map(r.withMethod(HttpMethods.POST).withEntity)
}
}
def list(team: String, token: String)(implicit system: ActorSystem): Source[HttpRequest, NotUsed] = {
get("/users/profiles").map(withAuth(token))
}
def login(byUsername: LoginByUsername)(implicit system: ActorSystem) = {
request("/users/login") { r =>
Marshal(byUsername).to[MessageEntity].map(r.withMethod(HttpMethods.POST).withEntity)
}
}
def extractSession()(implicit system: ActorSystem, mat: ActorMaterializer) = {
Flow[HttpResponse].mapAsync(1) { r =>
val cookie = r.headers.collect { case `Set-Cookie`(x) ⇒ x }.head.value
Unmarshal(r).to[LoginDetails].map(d => LoggedIn(cookie, d))
}
}
}
object UserModels {
case class User(id: String, username: String)
case class CreateUser(username: String, password: String, email: String, team_id: String)
case class UserCreated(id: String, username: String, email: String, team_id: String)
case class LoginByUsername(username: String, password: String, name: String /*team name*/)
case class LoginDetails(id: String, team_id: String, username: String, email: String)
trait Session {
def details: LoginDetails
def token: String
}
case class LoggedIn(token: String, details: LoginDetails) extends Session
case class LoggedInToChannel(token: String, channelId: String, details: LoginDetails) extends Session
}
object UserProtocols extends DefaultJsonProtocol with SprayJsonSupport {
import UserModels._
implicit val UserFormat: RootJsonFormat[User] = jsonFormat2(User)
implicit val CreateUserFormat: RootJsonFormat[CreateUser] = jsonFormat4(CreateUser)
implicit val UserCreatedFormat: RootJsonFormat[UserCreated] = jsonFormat4(UserCreated)
implicit val LoginByUsernameFormat: RootJsonFormat[LoginByUsername] = jsonFormat3(LoginByUsername)
implicit val LoginDetailsFormat: RootJsonFormat[LoginDetails] = jsonFormat4(LoginDetails)
implicit val LoggedInFormat: RootJsonFormat[LoggedIn] = jsonFormat2(LoggedIn)
}
|
jw3/mm4s
|
api/src/main/scala/mm4s/api/Users.scala
|
Scala
|
apache-2.0
| 2,825 |
package com.nekopiano.scala.sandbox
import akka.actor.{Actor, ActorRef, ActorSystem, Props}
/**
* Created on 3/30/16.
*/
object AkkaTest extends App {
val system = ActorSystem("system")
val actor = system.actorOf(Props[HelloActor])
actor ! "Hello"
actor ! "Hi"
import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent.duration._
import scala.util.{Success,Failure}
import scala.concurrent.ExecutionContext.Implicits.global
implicit val timeout = Timeout(5 seconds)
val reply = actor ? "How are you?"
reply.onSuccess{
case msg:String => println("reply from actor: " + msg)
}
val reply2 = actor ? "How's it going?"
reply2.onComplete{
case Success(msg: String) => println("reply from actor: " + msg)
case Failure(e) => println("Message Failure e:" + e)
case others => println("An unexpected result: " + others)
}
// Actor hierarchy
val superActor = system.actorOf(Props[SupervisorActor], "supervisorActor")
superActor ! "Hello"
superActor ! MessageForChild("Hello")
val reply3 = superActor ? ChildMessage
reply3.onSuccess{
case actor:ActorRef => println(actor)
}
}
class HelloActor extends Actor{
def receive = {
case "Hello" => println("World")
case "How are you?" => sender ! "I'm fine thank you!"
}
}
class ChildActor extends Actor {
def receive = {
case message:String => println("A message in a child: " + message)
}
}
class SupervisorActor extends Actor {
override def preStart = context.actorOf(Props[ChildActor], "childActor")
def receive = {
case message:String => println("A message in a supervisor: " + message)
case MessageForChild(message:String) => context.actorSelection("childActor") ! message
case ChildMessage => sender ! context.actorSelection("childActor")
}
}
case class MessageForChild(message:String)
case class ChildMessage()
|
lamusique/ExcelPasswordFinder
|
src/test/scala/com/nekopiano/scala/sandbox/AkkaTest.scala
|
Scala
|
apache-2.0
| 1,890 |
package com.markglh.blog
import java.io.File
import com.typesafe.config.ConfigFactory
import org.http4s.server.ServerApp
import org.http4s.server.blaze.BlazeBuilder
import scala.concurrent.ExecutionContext
object Bootstrap extends ServerApp {
implicit val executionContext = ExecutionContext.global
// This looks to (an optional) boot-configuration.conf first, then falls back to application.conf for any values not found
// The idea is we can easily define an env specific config at runtime using volume mounts at $APP_CONF
lazy val config = ConfigFactory
.parseFile(new File(s"${sys.env.getOrElse("APP_CONF", ".")}/boot-configuration.conf"))
.withFallback(ConfigFactory.load())
override def server(args: List[String]) = BlazeBuilder.bindHttp(80, "0.0.0.0")
.mountService(AggregatorService.routes(config.getString("tracking.service.host"),
config.getString("beacon.service.host")), "/")
.start
}
|
markglh/composing-microservices-with-sbt-docker
|
aggregator-service/src/main/scala/com/markglh/blog/Bootstrap.scala
|
Scala
|
apache-2.0
| 935 |
package org.psesd.srx.shared.core.sif
import java.net.URI
import org.psesd.srx.shared.core.exceptions.{ArgumentInvalidException, ArgumentNullOrEmptyOrWhitespaceException}
import org.psesd.srx.shared.core.extensions.TypeExtensions._
/** Represents a SIF-specific URI.
*
* @version 1.0
* @since 1.0
* @author David S. Dennison (iTrellis, LLC)
* @author Stephen Pugmire (iTrellis, LLC)
**/
object SifUri {
def apply(sifUri: String): SifUri = new SifUri(sifUri)
def isValid(sifUri: String): Boolean = {
if (sifUri.isNullOrEmpty) {
false
} else {
try {
val check = URI.create(sifUri.trim)
!check.getScheme.isNullOrEmpty &&
!check.getHost.isNullOrEmpty
} catch {
case _: Throwable => false
}
}
}
}
class SifUri(sifUri: String) {
if (sifUri.isNullOrEmpty) {
throw new ArgumentNullOrEmptyOrWhitespaceException("sifUri")
}
if (!SifUri.isValid(sifUri)) {
throw new ArgumentInvalidException("sifUri")
}
override def toString: String = {
uri.toString
}
/** Holds the URI. **/
private val uri = URI.create(sifUri)
/** The URI scheme name. **/
val scheme: String = uri.getScheme
/** The URI host name. **/
val host: String = uri.getHost
/** Holds the URI path segments. **/
private val path: Array[String] = uri.getPath.split('/')
/** Holds the matrix parameters.
*
* @note If the matrix parameter key (lowercased) is '''not''' ''zoneid'' or ''contextid''
* then the corresponding value is `None`.
**/
private val matrixParams: MatrixParams = {
val params: Map[String, String] = path.last.split(';').drop(1).foldLeft(Map[String, String]()) {
(map, pair) =>
val split = pair.split('=')
split.length match {
case 2 => map + (split.head.toLowerCase -> split.last)
case _ => map
}
}
new MatrixParams(
if (params.contains(SifMatrixParameter.ZoneId.toString.toLowerCase)) Option(params(SifMatrixParameter.ZoneId.toString.toLowerCase)) else None,
if (params.contains(SifMatrixParameter.ContextId.toString.toLowerCase)) Option(params(SifMatrixParameter.ContextId.toString.toLowerCase)) else None
)
}
/** The HostedZone zone ID value or `None`. **/
val zoneId: Option[String] = matrixParams.zoneId
/** The HostedZone context ID value or `None`. **/
val contextId: Option[String] = matrixParams.contextId
private val pathParams: PathParams = {
val params: List[String] = path.drop(1).foldLeft(List[String]()) { (list, value) =>
if (value.contains(";")) list :+ value.split(';').head else list :+ value
}
new PathParams(
if (params.headOption.isEmpty) None else Option(params.headOption.orNull),
if (params.length < 2 || params.drop(1).headOption.isEmpty) None else Option(params.drop(1).headOption.orNull)
)
}
/** The SIF service name or `None`. **/
val service: Option[String] = pathParams.service
/** The SIF service object name or `None`. **/
val serviceObject: Option[String] = pathParams.serviceObject
/** Holds parsed matrix parameter values.
*
* @param zoneId a zone ID value or `None`.
* @param contextId a context ID value or `None`.
**/
private case class MatrixParams(zoneId: Option[String], contextId: Option[String])
/** Holds parsed path parameter values (''service'' and ''service object'').
*
* @param service the ''service'' URI segment or `None`.
* @param serviceObject the ''service object'' URI segment or `None`.
**/
private case class PathParams(service: Option[String], serviceObject: Option[String])
}
|
PSESD/srx-shared-core
|
src/main/scala/org/psesd/srx/shared/core/sif/SifUri.scala
|
Scala
|
mit
| 3,663 |
/* sbt -- Simple Build Tool
* Copyright 2010 Mark Harrah
*/
package sbt
package inc
import java.io.File
import java.util.zip.ZipFile
import Function.const
object Locate
{
type DefinesClass = File => String => Boolean
/** Right(src) provides the value for the found class
* Left(true) means that the class was found, but it had no associated value
* Left(false) means that the class was not found */
def value[S](classpath: Seq[File], get: File => String => Option[S]): String => Either[Boolean, S] =
{
val gets = classpath.toStream.map(getValue(get))
className => find(className, gets)
}
def find[S](name: String, gets: Stream[String => Either[Boolean, S]]): Either[Boolean, S] =
if(gets.isEmpty)
Left(false)
else
gets.head(name) match
{
case Left(false) => find(name, gets.tail)
case x => x
}
/** Returns a function that searches the provided class path for
* a class name and returns the entry that defines that class.*/
def entry(classpath: Seq[File], f: DefinesClass): String => Option[File] =
{
val entries = classpath.toStream.map { entry => (entry, f(entry)) }
className => entries collect { case (entry, defines) if defines(className) => entry } headOption;
}
def resolve(f: File, className: String): File = if(f.isDirectory) classFile(f, className) else f
def getValue[S](get: File => String => Option[S])(entry: File): String => Either[Boolean, S] =
{
val defClass = definesClass(entry)
val getF = get(entry)
className => if(defClass(className)) getF(className).toRight(true) else Left(false)
}
def definesClass(entry: File): String => Boolean =
if(entry.isDirectory)
directoryDefinesClass(entry)
else if(entry.exists)
jarDefinesClass(entry)
else
const(false)
def jarDefinesClass(entry: File): String => Boolean =
{
import collection.JavaConversions._
val jar = new ZipFile(entry, ZipFile.OPEN_READ)
val entries = try { jar.entries.map(e => toClassName(e.getName)).toSet } finally { jar.close() }
entries.contains _
}
def toClassName(entry: String): String =
entry.stripSuffix(ClassExt).replace('/', '.')
val ClassExt = ".class"
def directoryDefinesClass(entry: File): String => Boolean =
className => classFile(entry, className).isFile
def classFile(baseDir: File, className: String): File =
{
val (pkg, name) = components(className)
val dir = subDirectory(baseDir, pkg)
new File(dir, name + ClassExt)
}
def subDirectory(base: File, parts: Seq[String]): File =
(base /: parts) ( (b, p) => new File(b,p) )
def components(className: String): (Seq[String], String) =
{
assume(!className.isEmpty)
val parts = className.split("\\.")
if(parts.length == 1) (Nil, parts(0)) else (parts.init, parts.last)
}
}
|
ornicar/xsbt
|
compile/inc/Locate.scala
|
Scala
|
bsd-3-clause
| 2,755 |
package com.wellfactored.restless.play.json
import com.wellfactored.restless.query.QueryAST.Path
import org.scalatest.{FlatSpec, Matchers, WordSpecLike}
import play.api.libs.json.{JsObject, JsString, Json}
class Selection$Spec extends WordSpecLike with Matchers {
import Selection._
case class Foo(id: Long, s: Option[String] = None, i: Option[Int] = None)
implicit val fooW = Json.writes[Foo]
"selectT" should {
"sort by id" in {
val foos = Seq(Foo(2), Foo(1))
val expected = Seq(Foo(1), Foo(2)).map(Json.toJson(_))
selectT(foos, None, None, None, None)(_.id) shouldBe expected
}
"limit number of results" in {
val foos = (1 to 50).map(Foo(_))
val expected = foos.take(10).map(Json.toJson(_))
selectT(foos, None, None, Some(10), None)(_.id) shouldBe expected
}
"not limit number of results" in {
val foos = (1 to 50).map(Foo(_))
val expected = foos.map(Json.toJson(_))
selectT(foos, None, None, None, None)(_.id) shouldBe expected
}
"project the right fields" in {
val foos = (1 to 50).map(n => Foo(n, Some(s"s$n"), Some(3)))
val projection = List(Path("id"), Path("s"))
val expected = (1 to 50).map(n => Foo(n, Some(s"s$n"), None)).map(Json.toJson(_))
selectT(foos, None, Some(projection), None, None)(_.id) shouldBe expected
}
"filter empty objects" in {
val foos = (1 to 10).map(i => Foo(i, if (i % 2 == 0) Some(s"s$i") else None, None))
val projection = List(Path("s"))
val expected = (1 to 10).flatMap(i => if (i % 2 == 0) Some(JsObject(Seq("s" -> JsString(s"s$i")))) else None)
val results = selectT(foos, None, Some(projection), None, None)(_.id)
results shouldBe expected
}
"de-duplicate results" in {
val foos = (1 to 10).map(Foo(_, Some("s"), None))
val projection = List(Path("s"))
val expected = Vector(JsObject(Seq("s" -> JsString("s"))))
val results = selectT(foos, None, Some(projection), None, None)(_.id)
results shouldBe expected
}
}
}
|
WellFactored/restless
|
play-json/src/test/scala/com/wellfactored/restless/play/json/Selection$Spec.scala
|
Scala
|
mit
| 2,068 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package synthesis
package rules
import leon.utils.SeqUtils._
import solvers._
import purescala.Path
import purescala.Expressions._
import purescala.Common._
import purescala.Definitions._
import purescala.Types._
import purescala.TypeOps._
import purescala.ExprOps._
import purescala.DefOps._
import purescala.Constructors._
case object HOFDecomp extends Rule("HOFDecomp") {
def instantiateOn(implicit hctx: SearchContext, p: Problem): Traversable[RuleInstantiation] = {
// Look for HOFs to call that are only missing a HOF argument
val fd = hctx.functionContext
val program = hctx.program
val tpe = tupleTypeWrap(p.xs.map(_.getType))
val recursives = program.callGraph.transitiveCallers(hctx.functionContext) + fd
val solverf = SolverFactory.getFromSettings(hctx, program).withTimeout(1000L)
def toExclude(fd: FunDef) = {
fd.isSynthetic ||
fd.isInner ||
!fd.body.exists(isDeterministic) ||
recursives(fd)
}
def isHOFParam(vd: ValDef): Boolean = vd.getType.isInstanceOf[FunctionType]
def isHOF(fd: FunDef): Boolean = fd.params.exists(isHOFParam)
def getCandidates(fd: FunDef): Seq[RuleInstantiation] = {
val free = fd.tparams.map(_.tp)
instantiation_<:(fd.returnType, tpe) match {
case Some(tpsMap1) =>
/* Finding compatible calls:
* Example candidate:
* map[T, Int](List[T], (T => Int): List[Int] where T is still free
*
* We now need to infer T based on ''as''
*/
val tfd = fd.typed(free.map(tp => tpsMap1.getOrElse(tp, tp)))
val hofParams = tfd.params.filter(isHOFParam)
// Only one HO-parameter allowed, for now
if (hofParams.size != 1) {
return Nil
}
val hofId = FreshIdentifier("F", hofParams.head.getType, true)
/* Given a function 'map(l: List[T], f: T => B): List[B]' found in
* scope, we extract the HO-parameter f, and make sure 'normal'
* arguments l, are either directly mapped to an existing input, or
* to a free variable.
*
* We first instantiate type params according to return value.
*/
var freeVariables = Set[Identifier]()
val altsPerArgs = tfd.params.zipWithIndex.map { case (vd, i) =>
if (isHOFParam(vd)) {
// Only one possibility for the HOF argument
Seq(hofId)
} else {
// For normal arguments, we either map to a free variable (and
// then ask solver for a model), or to an input
val optFree = if (i > 0) {
// Hack: First argument is the most important, we should not
// obtain its value from a model. We don't want:
// Nil.fold(..)
Some(FreshIdentifier("v", vd.getType, true))
} else {
None
}
freeVariables ++= optFree
// Note that this is an over-approximation, since
// Int <: T and
// Bool <: T can both be true in two distinct calls to
// canBeSubtypeOf with T free
//
// We refine later.
val compatibleInputs = {
p.as.filter(a => instantiation_>:(vd.getType, a.getType).nonEmpty)
}
compatibleInputs ++ optFree
}
}
//println("-"*80)
//println("Considering function: "+tfd.asString)
val asSet = p.as.toSet
val calls = cartesianProduct(altsPerArgs).flatMap { vs =>
/*
* We want at least one input to be used.
*/
if (vs.exists(v => asSet(v))) {
/*
* We then instantiate remaining type params based on available
* arguments.
*/
val argsTpe = tupleTypeWrap(vs.map(_.getType))
val paramsTpe = tupleTypeWrap(tfd.params.map(_.getType))
//println(s"${paramsTpe.asString} >: ${argsTpe.asString} (${stillFree.map(_.asString).mkString(", ")})")
// Check that all arguments are compatible, together.
val paramsTpeInst = instantiateType(paramsTpe, tpsMap1)
instantiation_<:(paramsTpeInst, argsTpe) match {
case Some(tpsMap2) =>
val tpsMap = tpsMap1 ++ tpsMap2
val tfd = fd.typed(free.map(tp => tpsMap.getOrElse(tp, tp)))
val hofId2 = instantiateType(hofId, tpsMap)
val vs2 = vs.map { v => subst(hofId -> hofId2.toVariable, v.toVariable) }
Some((FunctionInvocation(tfd, vs2), hofId2))
case _ =>
None
}
} else {
None
}
}
val results = for ((c, hofId) <- calls) yield {
println("-"*80)
println("Considering call: "+c.asString)
/* All variables that are used for the call are considered as
* captured, they are not closed over within the closure.
*
* The rationale is that if the variable is used for the HOF call,
* it is likely not needed within the closure as well. For example:
*
* list.foldLeft(x, { (a: Int, b: Int) => ...do not close over x or list... })
*/
val captured = variablesOf(c)
val free = captured & freeVariables
val env = p.as.filterNot(captured)
/* Instead of using our hofId directly, like:
* F: A => B
* we use a function Fgen(env): A => B
*/
val fgen = FreshIdentifier("Fgen", FunctionType(env.map(_.getType), hofId.getType))
val fgenCall = Application(fgen.toVariable, env.map(_.toVariable))
val paramC = subst(hofId -> fgenCall, c)
/* Build constraints to detect if HOF call is compatible with
* tests. Env specializes the calls.
*
* We check if there exists a model for Fgen(env) that satisfies
* all tests. This translates to checking if a model exists for F
* given a specific env.
*/
val cnstrs = p.eb.valids.collect {
case InOutExample(ins, outs) =>
equality(substAll(p.as.zip(ins).toMap, paramC), tupleWrap(outs))
}
val cnstr = andJoin(cnstrs)
//println("Constraint: "+cnstr.asString)
val solver = solverf.getNewSolver()
try {
solver.assertCnstr(cnstr)
solver.check match {
case Some(true) =>
val model = solver.getModel
//println("Model: "+model.asString)
val freeValuations = free.flatMap { id =>
model.get(id).map { m => id -> m }
}.toMap
/* We extract valuations from ''fgen'' which gives us a model
* for F per env. */
val tests = model.get(fgen) match {
case Some(FiniteLambda(envToF, _, _)) =>
envToF.flatMap {
case (envValuation, FiniteLambda(values, _, _)) =>
values.flatMap {
case (ins, out) =>
//println("Test:")
//println(s"$ins ---> $out")
/* Given a model with Fgen(env*)(X) -> Y,
* we check if we can use ''(env*, X) -> Y'' as
* in/out test for the closure. This is done by
* making sure we don't over-commit with this
* test (Y really is the only acceptable output)
*/
val solver2 = solverf.getNewSolver()
solver2.assertCnstr(substAll(freeValuations, cnstr))
val f = Application(fgen.toVariable, envValuation)
solver2.assertCnstr(not(equality(Application(f, ins), out)))
val isUnique = solver2.check.contains(false)
//println("IsUnique? "+isUnique)
//if (!isUnique) {
// println("Alternative: "+solver2.getModel.asString)
//}
solverf.reclaim(solver2)
if (isUnique) {
Some(InOutExample(envValuation ++ ins, Seq(out)))
} else {
None
}
}
}
case res =>
//println("Model of "+fgen+": "+res)
Nil
}
val eb = ExamplesBank(tests, Nil)
println(eb.asString("Tests:"))
// Heuristic: we don't want to synthesize HOFs with only one tests, those are trivial and ininteresting
if (tests.size > 1) {
val cAssigned = substAll(freeValuations, c)
val (ls, xs) = hofId.getType match {
case FunctionType(froms, to) =>
(froms.toList.map(tpe => FreshIdentifier("a", tpe, true)), List(FreshIdentifier("x", to, true)))
}
val as = env ++ ls
// TODO: collect pc that concerns ''env''
val subs = List(
Problem(as, BooleanLiteral(true), Path.empty, BooleanLiteral(true), xs, eb)
)
val onSuccess: List[Solution] => Option[Solution] = {
case List(sol) =>
if (sol.pre == BooleanLiteral(true)) {
val term = subst(hofId -> Lambda(ls.map(ValDef), sol.term), cAssigned)
Some(Solution(BooleanLiteral(true), sol.defs, term, sol.isTrusted))
} else {
None
}
case _ =>
None
}
val desc = {
val hole = FreshIdentifier("_", hofId.getType).toVariable
"Call HOF function "+subst(hofId -> hole, cAssigned).asString
}
Some(decomp(subs, onSuccess, desc))
} else {
None
}
case Some(false) =>
println("UNSAT")
None
case None =>
println("UNKNOWN")
None
}
} finally {
solverf.reclaim(solver)
}
}
results.flatten
case None =>
Nil
}
}
visibleFunDefsFromMain(program).filter(isHOF).filterNot(toExclude).toSeq.sortBy(_.id).flatMap(getCandidates)
}
}
|
epfl-lara/leon
|
src/main/scala/leon/synthesis/rules/HOFDecomp.scala
|
Scala
|
gpl-3.0
| 11,433 |
package com.socrata.soql.exceptions
import scala.collection.compat.immutable.LazyList
import scala.util.parsing.input.Position
import scala.reflect.ClassTag
import com.rojoma.json.v3.ast._
import com.rojoma.json.v3.util.{SimpleHierarchyCodecBuilder, InternalTag, AutomaticJsonCodecBuilder}
import com.rojoma.json.v3.codec.{JsonEncode, JsonDecode, DecodeError}
import com.rojoma.json.v3.matcher._
import com.socrata.soql.environment.{TypeName, FunctionName, ColumnName}
import com.socrata.soql.parsing.SoQLPosition
import com.socrata.soql.parsing.RecursiveDescentParser.{Reader, ParseException}
import com.socrata.soql.parsing.RecursiveDescentParser
sealed abstract class SoQLException(m: String, p: Position) extends RuntimeException(m + ":\\n" + p.longString) {
def position: Position
}
object SoQLException {
private def nameDecode[T](v: JValue, f: String => T) =
v match {
case JString(s) => Right(f(s))
case JArray(Seq(JString(_), JString(s))) => Right(f(s))
case otherArray : JArray => Left(DecodeError.InvalidValue(otherArray))
case other => Left(DecodeError.join(Seq(
DecodeError.InvalidType(got = other.jsonType, expected = JString),
DecodeError.InvalidType(got = other.jsonType, expected = JArray))))
}
private implicit object ColumnNameCodec extends JsonEncode[ColumnName] with JsonDecode[ColumnName] {
def encode(v: ColumnName) = JsonEncode.toJValue(v.caseFolded, v.name)
def decode(v: JValue) = nameDecode(v, ColumnName)
}
private implicit object FunctionNameCodec extends JsonEncode[FunctionName] with JsonDecode[FunctionName] {
def encode(v: FunctionName) = JsonEncode.toJValue(v.caseFolded, v.name)
def decode(v: JValue) = nameDecode(v, FunctionName)
}
private implicit object TypeNameCodec extends JsonEncode[TypeName] with JsonDecode[TypeName] {
def encode(v: TypeName) = JsonEncode.toJValue(v.caseFolded, v.name)
def decode(v: JValue) = nameDecode(v, TypeName)
}
private implicit object PositionCodec extends JsonEncode[Position] with JsonDecode[Position] {
private val row = Variable[Int]()
private val col = Variable[Int]()
private val text = Variable[String]()
private val pattern =
PObject(
"row" -> row,
"column" -> col,
"text" -> text
)
def encode(p: Position) =
pattern.generate(row := p.line, col := p.column, text := p.longString.split('\\n')(0))
def decode(v: JValue) =
pattern.matches(v).map { results =>
SoQLPosition(row(results), col(results), text(results), col(results))
}
}
private implicit object CharCodec extends JsonEncode[Char] with JsonDecode[Char] {
def encode(c: Char) = JString(c.toString)
def decode(v: JValue) = v match {
case JString(s) if s.length == 1 => Right(s.charAt(0))
case other: JString => Left(DecodeError.InvalidValue(other))
case other => Left(DecodeError.InvalidType(expected = JString, got = other.jsonType))
}
}
private implicit class AugCodec(shc: SimpleHierarchyCodecBuilder[SoQLException]) {
def and[T <: SoQLException : ClassTag](tag: String, codec: JsonEncode[T] with JsonDecode[T]) =
shc.branch[T](tag)(codec, codec, implicitly)
}
implicit val jCodec = new JsonEncode[SoQLException] with JsonDecode[SoQLException] {
// ugggh
private val rawCodec = SimpleHierarchyCodecBuilder[SoQLException](InternalTag("type")).
and("bad-parse", AutomaticJsonCodecBuilder[BadParse]).
and("reserved-table-alias", AutomaticJsonCodecBuilder[ReservedTableAlias]).
// AggregateCheckException
and("aggregate-in-ungrouped-context", AutomaticJsonCodecBuilder[AggregateInUngroupedContext]).
and("column-not-in-group-bys", AutomaticJsonCodecBuilder[ColumnNotInGroupBys]).
// AliasAnalysisException
and("repeated-exclusion", AutomaticJsonCodecBuilder[RepeatedException]).
and("duplicate-alias", AutomaticJsonCodecBuilder[DuplicateAlias]).
and("no-such-column", AutomaticJsonCodecBuilder[NoSuchColumn]).
and("no-such-table", AutomaticJsonCodecBuilder[NoSuchTable]).
and("circular-alias", AutomaticJsonCodecBuilder[CircularAliasDefinition]).
// LexerException
and("unexpected-escape", AutomaticJsonCodecBuilder[UnexpectedEscape]).
and("bad-unicode-escape", AutomaticJsonCodecBuilder[BadUnicodeEscapeCharacter]).
and("unicode-character-out-of-range", AutomaticJsonCodecBuilder[UnicodeCharacterOutOfRange]).
and("unexpected-character", AutomaticJsonCodecBuilder[UnexpectedCharacter]).
and("unexpected-eof", AutomaticJsonCodecBuilder[UnexpectedEOF]).
and("unterminated-string", AutomaticJsonCodecBuilder[UnterminatedString]).
// TypecheckException
and("no-such-function", AutomaticJsonCodecBuilder[NoSuchFunction]).
and("type-mismatch", AutomaticJsonCodecBuilder[TypeMismatch]).
and("ambiguous-call", AutomaticJsonCodecBuilder[AmbiguousCall]).
and("number-of-columns-mismatch", AutomaticJsonCodecBuilder[NumberOfColumnsMismatch]).
and("type-of-columns-mismatch", AutomaticJsonCodecBuilder[TypeOfColumnsMismatch]).
and("function-requires-window-info", AutomaticJsonCodecBuilder[FunctionRequiresWindowInfo]).
and("function-does-not-accept-window-info", AutomaticJsonCodecBuilder[FunctionDoesNotAcceptWindowInfo]).
and("non-boolean-where", AutomaticJsonCodecBuilder[NonBooleanWhere]).
and("non-groupable-group-by", AutomaticJsonCodecBuilder[NonGroupableGroupBy]).
and("non-boolean-having", AutomaticJsonCodecBuilder[NonBooleanHaving]).
and("unorderable-order-by", AutomaticJsonCodecBuilder[UnorderableOrderBy]).
// QueryOperationException
and("right-side-of-chain-query-must-be-leaf", AutomaticJsonCodecBuilder[RightSideOfChainQueryMustBeLeaf]).
build
def encode(e: SoQLException) = {
val JObject(fields) = rawCodec.encode(e)
JObject(fields.toMap + ("english" -> JString(e.getMessage)))
}
def decode(v: JValue) = rawCodec.decode(v)
}
}
case class BadParse(message: String, position: Position) extends SoQLException(message, position)
object BadParse {
class ExpectedToken(val reader: Reader)
extends BadParse(ExpectedToken.msg(reader), reader.first.position)
with ParseException
{
override val position = super.position
}
object ExpectedToken {
private def msg(reader: Reader) = {
RecursiveDescentParser.expectationsToEnglish(reader.alternates, reader.first)
}
}
class ExpectedLeafQuery(val reader: Reader)
extends BadParse(ExpectedLeafQuery.msg(reader), reader.first.position)
with ParseException
{
override val position = super.position
}
object ExpectedLeafQuery {
private def msg(reader: Reader) = {
"Expected a non-compound query on the right side of a pipe operator"
}
}
class UnexpectedStarSelect(val reader: Reader)
extends BadParse(UnexpectedStarSelect.msg(reader), reader.first.position)
with ParseException
{
override val position = super.position
}
object UnexpectedStarSelect {
private def msg(reader: Reader) = {
"Star selections must come at the start of the select-list"
}
}
class UnexpectedSystemStarSelect(val reader: Reader)
extends BadParse(UnexpectedSystemStarSelect.msg(reader), reader.first.position)
with ParseException
{
override val position = super.position
}
object UnexpectedSystemStarSelect {
private def msg(reader: Reader) = {
"System column star selections must come before user column star selections"
}
}
}
case class ReservedTableAlias(alias: String, position: Position) extends SoQLException("Reserved table alias", position)
sealed trait AggregateCheckException extends SoQLException
case class AggregateInUngroupedContext(function: FunctionName, clause: String, position: Position) extends SoQLException("Cannot use aggregate function `" + function + "' in " + clause, position) with AggregateCheckException
case class ColumnNotInGroupBys(column: ColumnName, position: Position) extends SoQLException("Column `" + column + "' not in group bys", position) with AggregateCheckException
sealed trait AliasAnalysisException extends SoQLException
case class RepeatedException(name: ColumnName, position: Position) extends SoQLException("Column `" + name + "' has already been excluded", position) with AliasAnalysisException // this should be called RepeatedExclusion
case class DuplicateAlias(name: ColumnName, position: Position) extends SoQLException("There is already a column named `" + name + "' selected", position) with AliasAnalysisException
case class NoSuchColumn(name: ColumnName, position: Position) extends SoQLException("No such column `" + name + "'", position) with AliasAnalysisException with TypecheckException
case class NoSuchTable(qualifier: String, position: Position) extends SoQLException("No such table `" + qualifier + "'", position) with AliasAnalysisException with TypecheckException
case class CircularAliasDefinition(name: ColumnName, position: Position) extends SoQLException("Circular reference while defining alias `" + name + "'", position) with AliasAnalysisException
sealed trait LexerException extends SoQLException
case class UnexpectedEscape(char: Char, position: Position) extends SoQLException("Unexpected escape character", position) with LexerException
case class BadUnicodeEscapeCharacter(char: Char, position: Position) extends SoQLException("Bad character in unicode escape", position) with LexerException
case class UnicodeCharacterOutOfRange(value: Int, position:Position) extends SoQLException("Unicode character out of range", position) with LexerException
case class UnexpectedCharacter(char: Char, position: Position) extends SoQLException("Unexpected character", position) with LexerException
case class UnexpectedEOF(position: Position) extends SoQLException("Unexpected end of input", position) with LexerException
case class UnterminatedString(position: Position) extends SoQLException("Unterminated string", position) with LexerException
sealed trait TypecheckException extends SoQLException
case class NoSuchFunction(name: FunctionName, arity: Int, position: Position) extends SoQLException("No such function `" + name + "/" + arity + "'", position) with TypecheckException
case class TypeMismatch(name: FunctionName, actual: TypeName, position: Position) extends SoQLException("Cannot pass a value of type `" + actual + "' to function `" + name + "'", position) with TypecheckException
case class AmbiguousCall(name: FunctionName, position: Position) extends SoQLException("Ambiguous call to `" + name + "'", position) with TypecheckException
case class NumberOfColumnsMismatch(leftNumberOfColumns: Int, rightNumberOfColumns: Int, position: Position) extends SoQLException(s"Two selects must have the same number of columns: ${leftNumberOfColumns}, ${rightNumberOfColumns}", position) with TypecheckException
case class TypeOfColumnsMismatch(leftType: String, rightType: String, position: Position) extends SoQLException(s"Two selects must have the same column type: ${leftType}, ${rightType}", position) with TypecheckException
case class FunctionRequiresWindowInfo(name: FunctionName, position: Position) extends SoQLException(s"Function ${name} requires window information", position) with TypecheckException
case class FunctionDoesNotAcceptWindowInfo(name: FunctionName, position: Position) extends SoQLException(s"Function ${name} does not accept window information", position) with TypecheckException
case class NonBooleanWhere(typ: TypeName, position: Position) extends SoQLException("Cannot filter by an expression of type `" + typ + "'", position) with TypecheckException
case class NonGroupableGroupBy(typ: TypeName, position: Position) extends SoQLException("Cannot group by an expression of type `" + typ + "'", position) with TypecheckException
case class NonBooleanHaving(typ: TypeName, position: Position) extends SoQLException("Cannot filter by an expression of type `" + typ + "'", position) with TypecheckException
case class UnorderableOrderBy(typ: TypeName, position: Position) extends SoQLException("Cannot order by an expression of type `" + typ + "'", position) with TypecheckException
sealed trait QueryOperationException extends SoQLException
case class RightSideOfChainQueryMustBeLeaf(position: Position) extends SoQLException("Right side of a chain query must be a leaf query.", position) with QueryOperationException
// This class represents a problem that end-users should not be able
// to cause (e.g., finding a Hole node during typechecking)
sealed abstract class SoQLLogicException(message: String) extends Exception(message)
case class UnexpectedJoinFunc() extends SoQLLogicException("Unexpected join function")
case class UnexpectedHole() extends SoQLLogicException("Unexpected hole")
|
socrata-platform/soql-reference
|
soql-analyzer/src/main/scala/com/socrata/soql/exceptions/Exceptions.scala
|
Scala
|
apache-2.0
| 12,965 |
package controllers
import anorm.{Row, SQL}
import play.api.Play.current
import play.api.db.DB
import play.api.libs.json.{JsString, JsUndefined, JsValue, Json}
import play.api.mvc.{Action, Controller}
import utils.Resource
trait ValidationResult
class ValidationSuccess(val name: String, val value: String) extends ValidationResult
trait ValidationFailure extends ValidationResult {
val failureMessage: String
}
class NotProvided(val name: String) extends ValidationFailure {
val failureMessage: String = "Missing json object parameter '"+name+"'"
}
class InvalidType(val name: String, val value: JsValue) extends ValidationFailure {
val failureMessage: String = "Value '"+value+"' supplied for "+name+" is invalid"
}
class ByteTooSmall(val name: String, val actual: String) extends ValidationFailure {
val failureMessage: String = name + " number '" + actual + "' is too low, try 0 <= " + name + " < 256"
}
class ByteTooLarge(val name: String, val actual: String) extends ValidationFailure {
val failureMessage: String = name + " number '" + actual + "' is too high, try 0 <= " + name + " < 256"
}
class ShortTooSmall(val name: String, val actual: String) extends ValidationFailure {
val failureMessage: String = name + " number '" + actual + "' is too low, try 0 <= " + name + " < 32768"
}
class ShortTooLarge(val name: String, val actual: String) extends ValidationFailure {
val failureMessage: String = name + " number '" + actual + "' is too high, try 0 <= " + name + " < 32768"
}
class IntTooSmall(val name: String, val actual: String) extends ValidationFailure {
val failureMessage: String = name + " number '" + actual + "' is too low, try 0 <= " + name + " < " + Int.MaxValue
}
class ExpectedOneValue(val name: String, val actual: Seq[String]) extends ValidationFailure {
val failureMessage: String = "Found sequence of values " + actual.mkString(",") + " rather than a single value for '" + name + "'"
}
trait REST extends Controller {
protected val parameters: Map[String, (String, String) => ValidationResult]
protected val tableName: String
protected def single(row: Row): JsValue
// Controller Routes
def create = Action { request =>
request.body.asJson.map { json =>
val (successes: List[ValidationSuccess], failures: List[ValidationFailure]) = allValidationResultsFromBody(json)
if(failures.isEmpty) {
val insertQuery = formInsertQuery(successes)
try {
DB.withConnection { implicit c =>
SQL(insertQuery).executeInsert() match {
case Some(id) => {
val selectQuery = getByIdQuery(id)
formatUnique(SQL(selectQuery)()) match {
case Some(json) => Ok(json)
case None => InternalServerError(Resource.errorStructure(List("Create failed for an unknown reason")))
}
}
case None => InternalServerError(Resource.errorStructure(List("Create failed for an unknown reason")))
}
}
} catch {
case e: Exception => BadRequest(Resource.errorStructure(List(e.getMessage)))
}
} else {
BadRequest(Resource.errorStructure(failures.map(fail => fail.failureMessage)))
}
}.getOrElse {
BadRequest(Resource.errorStructure(List("Expecting Json data, try setting the 'Content-Type' header to 'application/json'")))
}
}
def update(id: Int) = Action { request =>
request.body.asJson.map { json =>
val (successes: List[ValidationSuccess], failures: List[ValidationFailure]) = allValidationResultsFromBody(json)
if(!successes.isEmpty) {
val updateQuery = formUpdateQuery(id, successes)
try {
DB.withConnection { implicit c =>
val result: Int = SQL(updateQuery).executeUpdate()
result match {
case 0 => NotFound(Resource.errorStructure(List("Not found with id "+id)))
case res => formatUnique(SQL(getByIdQuery(id))()) match {
case Some(json) => Ok(json)
case None => InternalServerError(Resource.errorStructure(List("Unable to retrieve updated row")))
}
}
}
} catch {
case e: Exception => BadRequest(Resource.errorStructure(List(e.getMessage)))
}
} else {
BadRequest(Resource.errorStructure("Unable to find one parameter specified correctly - nothing to update" :: failures.map(fail => fail.failureMessage)))
}
}.getOrElse {
BadRequest(Resource.errorStructure(List("Expecting Json data, try setting the 'Content-Type' header to 'application/json")))
}
}
def getWithParams = Action { request =>
val queryParams = request.queryString
val (successes: List[ValidationSuccess], failures: List[ValidationFailure]) = allVaidationResultsFromQueryParams(queryParams)
val selectQuery = formSelectQuery(successes)
DB.withConnection { implicit c =>
Ok(formatMany(SQL(selectQuery)()))
}
}
def getById(id: Int) = Action {
DB.withConnection { implicit c =>
formatUnique(SQL(getByIdQuery(id))()) match {
case Some(json) => Ok(json)
case None => NotFound(Resource.errorStructure(List("No generation found")))
}
}
}
def delete(id: Int) = Action {
DB.withConnection { implicit c =>
val result: Int = SQL(deleteByIdQuery(id)).executeUpdate()
result match {
case 0 => NotFound(Resource.errorStructure(List("No generation found")))
case res => Ok(Resource.successStructureWithoutData)
}
}
}
// Validation
private def separateIntoSuccessesAndFailures(validationResults: Set[ValidationResult]) = {
validationResults.foldLeft[(List[ValidationSuccess], List[ValidationFailure])]((Nil, Nil))((lists, result) => {
val (succs: List[ValidationSuccess], fails: List[ValidationFailure]) = lists
result match {
case success: ValidationSuccess => (success :: succs, fails)
case failure: ValidationFailure => (succs, failure :: fails)
}
})
}
private def allVaidationResultsFromQueryParams(queryParams: Map[String, Seq[String]]) = {
val validationResults = parameters.keySet.map( parameter =>
queryParams.get(parameter) match {
case Some(list) => {
if(list.size == 1) parameters.get(parameter).get(parameter, list.head)
else new ExpectedOneValue(parameter, list)
}
case None => new NotProvided(parameter)
}
)
separateIntoSuccessesAndFailures(validationResults)
}
private def allValidationResultsFromBody(json: JsValue) = {
val validationResults = parameters.keySet.map( parameter =>
(json \\ parameter) match {
case string: JsString => parameters.get(parameter).get(parameter, string.value)
case undef: JsUndefined => new NotProvided(parameter)
case value => new InvalidType(parameter, value)
}
)
separateIntoSuccessesAndFailures(validationResults)
}
protected def validateByte(name: String, number: String) = number.toInt match {
case num if num < 0 => new ByteTooSmall(name, num.toString)
case num if num > 255 => new ByteTooLarge(name, num.toString)
case num => new ValidationSuccess(name, num.toString)
}
protected def validateShort(name: String, number: String) = number.toInt match {
case num if num < 0 => new ShortTooSmall(name, num.toString)
case num if num > 32767 => new ShortTooLarge(name, num.toString)
case num => new ValidationSuccess(name, num.toString)
}
protected def validateInt(name: String, number: String) = number.toInt match {
case num if num < 0 => new IntTooSmall(name, num.toString)
case num => new ValidationSuccess(name, num.toString)
}
protected def validateFloat(name: String, number: String) = number.toFloat match {
case num => new ValidationSuccess(name, num.toString)
}
protected def allStringsValidator(name: String, value: String) = new ValidationSuccess(name, "'" + value.replaceAll("'","''") + "'")
// Resource formatting
private def formatUnique(rows: Seq[Row]): Option[JsValue] = {
rows.toList match {
case Nil => None
case List(value) => Some(Resource.successStructureWithData(single(value)))
case _ => throw new IllegalStateException("Expecting 0 or 1 elements!")
}
}
private def formatMany(rows: Seq[Row]): JsValue = {
val result: Seq[JsValue] = rows.map { row =>
single(row)
}
Resource.successStructureWithData(Json.toJson(result))
}
// Query Stuff
private def formInsertQuery(params: Seq[ValidationSuccess]) = {
val columns = params.map(param => param.name)
val values = params.map(param => param.value)
"INSERT INTO " + tableName + " (" + columns.mkString(",") + ") VALUES (" + values.mkString(",") + ");"
}
private def formSelectQuery(params: Seq[ValidationSuccess]) = {
if(params.isEmpty) "SELECT * FROM " + tableName + ";"
else {
val newValues = params.map(param => param.name + "=" + param.value)
"SELECT * FROM " + tableName + " WHERE " + newValues.mkString(" AND ") + ";"
}
}
private def getByIdQuery(id: Long) = {
"SELECT * FROM " + tableName + " WHERE id=" + id + ";"
}
private def formUpdateQuery(id: Int, params: Seq[ValidationSuccess]) = {
val newValues = params.map(param => param.name + "=" + param.value)
"UPDATE " + tableName + " SET " + newValues.mkString(",") + " WHERE id=" + id + ";"
}
private def deleteByIdQuery(id: Long) = {
"DELETE FROM generations WHERE id=" + id + ";"
}
}
|
ishakir/PokeStat
|
app/controllers/REST.scala
|
Scala
|
mit
| 9,689 |
/* sbt -- Simple Build Tool
* Copyright 2008, 2009, 2010, 2011 Mark Harrah
*/
package xsbt.boot
import Pre._
import BootConfiguration.{ CompilerModuleName, JAnsiVersion, LibraryModuleName }
import java.io.File
import java.net.{ URL, URLClassLoader, URI }
import java.util.concurrent.Callable
import scala.collection.immutable.List
import scala.annotation.tailrec
import ConfigurationStorageState._
class LauncherArguments(val args: List[String], val isLocate: Boolean)
object Launch {
def apply(arguments: LauncherArguments): Option[Int] = apply((new File("")).getAbsoluteFile, arguments)
def apply(currentDirectory: File, arguments: LauncherArguments): Option[Int] = {
val (configLocation, newArgs2, state) = Configuration.find(arguments.args, currentDirectory)
val config = state match {
case SerializedFile => LaunchConfiguration.restore(configLocation)
case PropertiesFile => parseAndInitializeConfig(configLocation, currentDirectory)
}
if (arguments.isLocate) {
if (!newArgs2.isEmpty) {
// TODO - Print the arguments without exploding proguard size.
System.err.println("Warning: --locate option ignores arguments.")
}
locate(currentDirectory, config)
} else {
// First check to see if there are java system properties we need to set. Then launch the application.
updateProperties(config)
launch(run(Launcher(config)))(makeRunConfig(currentDirectory, config, newArgs2))
}
}
/** Locate a server, print where it is, and exit. */
def locate(currentDirectory: File, config: LaunchConfiguration): Option[Int] = {
config.serverConfig match {
case Some(_) =>
val uri = ServerLocator.locate(currentDirectory, config)
System.out.println(uri.toASCIIString)
Some(0)
case None => sys.error(s"${config.app.groupID}-${config.app.main} is not configured as a server.")
}
}
/**
* Some hackery to allow sys.props to be configured via a file. If this launch config has
* a valid file configured, we load the properties and and apply them to this jvm.
*/
def updateProperties(config: LaunchConfiguration): Unit = {
config.serverConfig match {
case Some(config) =>
config.jvmPropsFile match {
case Some(file) if file.exists =>
try setSystemProperties(readProperties(file))
catch {
case e: Exception => throw new RuntimeException(s"Unable to load server properties file: ${file}", e)
}
case _ =>
}
case None =>
}
}
/** Parses the configuration *and* runs the initialization code that will remove variable references. */
def parseAndInitializeConfig(configLocation: URL, currentDirectory: File): LaunchConfiguration =
{
val (parsed, bd) = parseConfiguration(configLocation, currentDirectory)
resolveConfig(parsed)
}
/** Parse configuration and return it and the baseDirectory of the launch. */
def parseConfiguration(configLocation: URL, currentDirectory: File): (LaunchConfiguration, File) =
Find(Configuration.parse(configLocation, currentDirectory), currentDirectory)
/** Setups the Initialize object so we can fill in system properties in the configuration */
def resolveConfig(parsed: LaunchConfiguration): LaunchConfiguration =
{
// Set up initialize.
val propertiesFile = parsed.boot.properties
import parsed.boot.{ enableQuick, promptCreate, promptFill }
if (isNonEmpty(promptCreate) && !propertiesFile.exists)
Initialize.create(propertiesFile, promptCreate, enableQuick, parsed.appProperties)
else if (promptFill)
Initialize.fill(propertiesFile, parsed.appProperties)
parsed.logging.debug("Parsed configuration: " + parsed)
val resolved = ResolveValues(parsed)
resolved.logging.debug("Resolved configuration: " + resolved)
resolved
}
/** Create run configuration we'll use to launch the app. */
def makeRunConfig(currentDirectory: File, config: LaunchConfiguration, arguments: List[String]): RunConfiguration =
new RunConfiguration(config.getScalaVersion, config.app.toID, currentDirectory, arguments)
/** The actual mechanism used to run a launched application. */
def run(launcher: xsbti.Launcher)(config: RunConfiguration): xsbti.MainResult =
{
import config._
val appProvider: xsbti.AppProvider = launcher.app(app, orNull(scalaVersion)) // takes ~40 ms when no update is required
val appConfig: xsbti.AppConfiguration = new AppConfiguration(toArray(arguments), workingDirectory, appProvider)
// TODO - Jansi probably should be configurable via some other mechanism...
JAnsi.install(launcher.topLoader)
try {
val main = appProvider.newMain()
try { withContextLoader(appProvider.loader)(main.run(appConfig)) }
catch { case e: xsbti.FullReload => if (e.clean) delete(launcher.bootDirectory); throw e }
} finally {
JAnsi.uninstall(launcher.topLoader)
}
}
final def launch(run: RunConfiguration => xsbti.MainResult)(config: RunConfiguration): Option[Int] =
{
run(config) match {
case e: xsbti.Exit => Some(e.code)
case c: xsbti.Continue => None
case r: xsbti.Reboot => launch(run)(new RunConfiguration(Option(r.scalaVersion), r.app, r.baseDirectory, r.arguments.toList))
case x => throw new BootException("Invalid main result: " + x + (if (x eq null) "" else " (class: " + x.getClass + ")"))
}
}
private[this] def withContextLoader[T](loader: ClassLoader)(eval: => T): T =
{
val oldLoader = Thread.currentThread.getContextClassLoader
Thread.currentThread.setContextClassLoader(loader)
try { eval } finally { Thread.currentThread.setContextClassLoader(oldLoader) }
}
// Cache of classes for lookup later.
val ServerMainClass = classOf[xsbti.ServerMain]
val AppMainClass = classOf[xsbti.AppMain]
}
final class RunConfiguration(val scalaVersion: Option[String], val app: xsbti.ApplicationID, val workingDirectory: File, val arguments: List[String])
import BootConfiguration.{ appDirectoryName, baseDirectoryName, extractScalaVersion, ScalaDirectoryName, TestLoadScalaClasses, ScalaOrg }
class Launch private[xsbt] (val bootDirectory: File, val lockBoot: Boolean, val ivyOptions: IvyOptions) extends xsbti.Launcher {
import ivyOptions.{ checksums => checksumsList, classifiers, repositories }
bootDirectory.mkdirs
private val scalaProviders = new Cache[(String, String), String, xsbti.ScalaProvider]((x, y) => getScalaProvider(x._1, x._2, y))
def getScala(version: String): xsbti.ScalaProvider = getScala(version, "")
def getScala(version: String, reason: String): xsbti.ScalaProvider = getScala(version, reason, ScalaOrg)
def getScala(version: String, reason: String, scalaOrg: String) = scalaProviders((scalaOrg, version), reason)
def app(id: xsbti.ApplicationID, version: String): xsbti.AppProvider = app(id, Option(version))
def app(id: xsbti.ApplicationID, scalaVersion: Option[String]): xsbti.AppProvider =
getAppProvider(id, scalaVersion, false)
val bootLoader = new BootFilteredLoader(getClass.getClassLoader)
val topLoader = if (isWindows && !isCygwin) jansiLoader(bootLoader) else bootLoader
val updateLockFile = if (lockBoot) Some(new File(bootDirectory, "sbt.boot.lock")) else None
def globalLock: xsbti.GlobalLock = Locks
def ivyHome = orNull(ivyOptions.ivyHome)
def ivyRepositories = (repositories: List[xsbti.Repository]).toArray
def appRepositories = ((repositories filterNot (_.bootOnly)): List[xsbti.Repository]).toArray
def isOverrideRepositories: Boolean = ivyOptions.isOverrideRepositories
def checksums = checksumsList.toArray[String]
// JAnsi needs to be shared between Scala and the application so there aren't two competing versions
def jansiLoader(parent: ClassLoader): ClassLoader =
{
val id = AppID("org.fusesource.jansi", "jansi", JAnsiVersion, "", toArray(Nil), xsbti.CrossValue.Disabled, array())
val configuration = makeConfiguration(ScalaOrg, None)
val jansiHome = appDirectory(new File(bootDirectory, baseDirectoryName(ScalaOrg, None)), id)
val module = appModule(id, None, false, "jansi")
def makeLoader(): ClassLoader = {
val urls = toURLs(wrapNull(jansiHome.listFiles(JarFilter)))
val loader = new URLClassLoader(urls, bootLoader)
checkLoader(loader, module, "org.fusesource.jansi.internal.WindowsSupport" :: Nil, loader)
}
val existingLoader =
if (jansiHome.exists)
try Some(makeLoader()) catch { case e: Exception => None }
else
None
existingLoader getOrElse {
update(module, "")
makeLoader()
}
}
def checkLoader[T](loader: ClassLoader, module: ModuleDefinition, testClasses: Seq[String], ifValid: T): T =
{
val missing = getMissing(loader, testClasses)
if (missing.isEmpty)
ifValid
else
module.retrieveCorrupt(missing)
}
private[this] def makeConfiguration(scalaOrg: String, version: Option[String]): UpdateConfiguration =
new UpdateConfiguration(bootDirectory, ivyOptions.ivyHome, scalaOrg, version, repositories, checksumsList)
final def getAppProvider(id: xsbti.ApplicationID, explicitScalaVersion: Option[String], forceAppUpdate: Boolean): xsbti.AppProvider =
locked(new Callable[xsbti.AppProvider] { def call = getAppProvider0(id, explicitScalaVersion, forceAppUpdate) })
@tailrec private[this] final def getAppProvider0(id: xsbti.ApplicationID, explicitScalaVersion: Option[String], forceAppUpdate: Boolean): xsbti.AppProvider =
{
val app = appModule(id, explicitScalaVersion, true, "app")
/** Replace the version of an ApplicationID with the given one, if set. */
def resolveId(appVersion: Option[String], id: xsbti.ApplicationID) = appVersion map { v =>
import id._
AppID(groupID(), name(), v, mainClass(), mainComponents(), crossVersionedValue(), classpathExtra())
} getOrElse id
val baseDirs = (resolvedVersion: Option[String]) => (base: File) => appBaseDirs(base, resolveId(resolvedVersion, id))
def retrieve() = {
val (appv, sv) = update(app, "")
val scalaVersion = strictOr(explicitScalaVersion, sv)
new RetrievedModule(true, app, sv, appv, baseDirs(appv)(scalaHome(ScalaOrg, scalaVersion)))
}
val retrievedApp =
if (forceAppUpdate)
retrieve()
else
existing(app, ScalaOrg, explicitScalaVersion, baseDirs(None)) getOrElse retrieve()
val scalaVersion = getOrError(strictOr(explicitScalaVersion, retrievedApp.detectedScalaVersion), "No Scala version specified or detected")
val scalaProvider = getScala(scalaVersion, "(for " + id.name + ")")
val resolvedId = resolveId(retrievedApp.resolvedAppVersion, id)
val (missing, appProvider) = checkedAppProvider(resolvedId, retrievedApp, scalaProvider)
if (missing.isEmpty)
appProvider
else if (retrievedApp.fresh)
app.retrieveCorrupt(missing)
else
getAppProvider0(resolvedId, explicitScalaVersion, true)
}
def scalaHome(scalaOrg: String, scalaVersion: Option[String]): File = new File(bootDirectory, baseDirectoryName(scalaOrg, scalaVersion))
def appHome(id: xsbti.ApplicationID, scalaVersion: Option[String]): File = appDirectory(scalaHome(ScalaOrg, scalaVersion), id)
def checkedAppProvider(id: xsbti.ApplicationID, module: RetrievedModule, scalaProvider: xsbti.ScalaProvider): (Iterable[String], xsbti.AppProvider) =
{
val p = appProvider(id, module, scalaProvider, appHome(id, Some(scalaProvider.version)))
val missing = getMissing(p.loader, id.mainClass :: Nil)
(missing, p)
}
private[this] def locked[T](c: Callable[T]): T = Locks(orNull(updateLockFile), c)
def getScalaProvider(scalaOrg: String, scalaVersion: String, reason: String): xsbti.ScalaProvider =
locked(new Callable[xsbti.ScalaProvider] { def call = getScalaProvider0(scalaOrg, scalaVersion, reason) })
private[this] final def getScalaProvider0(scalaOrg: String, scalaVersion: String, reason: String) =
{
val scalaM = scalaModule(scalaOrg, scalaVersion)
val (scalaHome, lib) = scalaDirs(scalaM, scalaOrg, scalaVersion)
val baseDirs = lib :: Nil
def provider(retrieved: RetrievedModule): xsbti.ScalaProvider = {
val p = scalaProvider(scalaVersion, retrieved, topLoader, lib)
checkLoader(p.loader, retrieved.definition, TestLoadScalaClasses, p)
}
existing(scalaM, scalaOrg, Some(scalaVersion), _ => baseDirs) flatMap { mod =>
try Some(provider(mod))
catch { case e: Exception => None }
} getOrElse {
val (_, scalaVersion) = update(scalaM, reason)
provider(new RetrievedModule(true, scalaM, scalaVersion, baseDirs))
}
}
def existing(module: ModuleDefinition, scalaOrg: String, explicitScalaVersion: Option[String], baseDirs: File => List[File]): Option[RetrievedModule] =
{
val filter = new java.io.FileFilter {
val explicitName = explicitScalaVersion.map(sv => baseDirectoryName(scalaOrg, Some(sv)))
def accept(file: File) = file.isDirectory && explicitName.forall(_ == file.getName)
}
val retrieved = wrapNull(bootDirectory.listFiles(filter)) flatMap { scalaDir =>
val appDir = directory(scalaDir, module.target)
if (appDir.exists)
new RetrievedModule(false, module, extractScalaVersion(scalaDir), baseDirs(scalaDir)) :: Nil
else
Nil
}
retrieved.headOption
}
def directory(scalaDir: File, target: UpdateTarget): File = target match {
case _: UpdateScala => scalaDir
case ua: UpdateApp => appDirectory(scalaDir, ua.id.toID)
}
def appBaseDirs(scalaHome: File, id: xsbti.ApplicationID): List[File] =
{
val appHome = appDirectory(scalaHome, id)
val components = componentProvider(appHome)
appHome :: id.mainComponents.map(components.componentLocation).toList
}
def appDirectory(base: File, id: xsbti.ApplicationID): File =
new File(base, appDirectoryName(id, File.separator))
def scalaDirs(module: ModuleDefinition, scalaOrg: String, scalaVersion: String): (File, File) =
{
val scalaHome = new File(bootDirectory, baseDirectoryName(scalaOrg, Some(scalaVersion)))
val libDirectory = new File(scalaHome, ScalaDirectoryName)
(scalaHome, libDirectory)
}
def appProvider(appID: xsbti.ApplicationID, app: RetrievedModule, scalaProvider0: xsbti.ScalaProvider, appHome: File): xsbti.AppProvider =
new xsbti.AppProvider {
import Launch.{ ServerMainClass, AppMainClass }
val scalaProvider = scalaProvider0
val id = appID
def mainClasspath = app.fullClasspath
lazy val loader = app.createLoader(scalaProvider.loader)
// TODO - For some reason we can't call this from vanilla scala. We get a
// no such method exception UNLESS we're in the same project.
lazy val entryPoint: Class[T] forSome { type T } =
{
val c = Class.forName(id.mainClass, true, loader)
if (classOf[xsbti.AppMain].isAssignableFrom(c)) c
else if (PlainApplication.isPlainApplication(c)) c
else if (ServerApplication.isServerApplication(c)) c
else sys.error(s"${c} is not an instance of xsbti.AppMain, xsbti.ServerMain nor does it have one of these static methods:\\n" +
" * void main(String[] args)\\n * int main(String[] args)\\n * xsbti.Exit main(String[] args)\\n")
}
// Deprecated API. Remove when we can.
def mainClass: Class[T] forSome { type T <: xsbti.AppMain } = entryPoint.asSubclass(AppMainClass)
def newMain(): xsbti.AppMain = {
if (ServerApplication.isServerApplication(entryPoint)) ServerApplication(this)
else if (PlainApplication.isPlainApplication(entryPoint)) PlainApplication(entryPoint)
else if (AppMainClass.isAssignableFrom(entryPoint)) mainClass.newInstance
else throw new IncompatibleClassChangeError(s"Main class ${entryPoint.getName} is not an instance of xsbti.AppMain, xsbti.ServerMain nor does it have a valid `main` method.")
}
lazy val components = componentProvider(appHome)
}
def componentProvider(appHome: File) = new ComponentProvider(appHome, lockBoot)
def scalaProvider(scalaVersion: String, module: RetrievedModule, parentLoader: ClassLoader, scalaLibDir: File): xsbti.ScalaProvider = new xsbti.ScalaProvider {
def launcher = Launch.this
def version = scalaVersion
lazy val loader = module.createLoader(parentLoader)
def compilerJar = new File(scalaLibDir, CompilerModuleName + ".jar")
def libraryJar = new File(scalaLibDir, LibraryModuleName + ".jar")
def jars = module.fullClasspath
def app(id: xsbti.ApplicationID) = Launch.this.app(id, Some(scalaVersion))
}
def appModule(id: xsbti.ApplicationID, scalaVersion: Option[String], getClassifiers: Boolean, tpe: String): ModuleDefinition = new ModuleDefinition(
configuration = makeConfiguration(ScalaOrg, scalaVersion),
target = new UpdateApp(Application(id), if (getClassifiers) Value.get(classifiers.app) else Nil, tpe),
failLabel = id.name + " " + id.version,
extraClasspath = id.classpathExtra
)
def scalaModule(org: String, version: String): ModuleDefinition = new ModuleDefinition(
configuration = makeConfiguration(org, Some(version)),
target = new UpdateScala(Value.get(classifiers.forScala)),
failLabel = "Scala " + version,
extraClasspath = array()
)
/** Returns the resolved appVersion (if this was an App), as well as the scalaVersion. */
def update(mm: ModuleDefinition, reason: String): (Option[String], Option[String]) =
{
val result = (new Update(mm.configuration))(mm.target, reason)
if (result.success) result.appVersion -> result.scalaVersion else mm.retrieveFailed
}
}
object Launcher {
def apply(bootDirectory: File, repositories: List[Repository.Repository]): xsbti.Launcher =
apply(bootDirectory, IvyOptions(None, Classifiers(Nil, Nil), repositories, BootConfiguration.DefaultChecksums, false))
def apply(bootDirectory: File, ivyOptions: IvyOptions): xsbti.Launcher =
apply(bootDirectory, ivyOptions, GetLocks.find)
def apply(bootDirectory: File, ivyOptions: IvyOptions, locks: xsbti.GlobalLock): xsbti.Launcher =
new Launch(bootDirectory, true, ivyOptions) {
override def globalLock = locks
}
def apply(explicit: LaunchConfiguration): xsbti.Launcher =
new Launch(explicit.boot.directory, explicit.boot.lock, explicit.ivyConfiguration)
def defaultAppProvider(baseDirectory: File): xsbti.AppProvider = getAppProvider(baseDirectory, Configuration.configurationOnClasspath)
def getAppProvider(baseDirectory: File, configLocation: URL): xsbti.AppProvider =
{
val parsed = ResolvePaths(Configuration.parse(configLocation, baseDirectory), baseDirectory)
Initialize.process(parsed.boot.properties, parsed.appProperties, Initialize.selectQuick)
val config = ResolveValues(parsed)
val launcher = apply(config)
launcher.app(config.app.toID, orNull(config.getScalaVersion))
}
}
class ComponentProvider(baseDirectory: File, lockBoot: Boolean) extends xsbti.ComponentProvider {
def componentLocation(id: String): File = new File(baseDirectory, id)
def component(id: String) = wrapNull(componentLocation(id).listFiles).filter(_.isFile)
def defineComponent(id: String, files: Array[File]) =
{
val location = componentLocation(id)
if (location.exists)
throw new BootException("Cannot redefine component. ID: " + id + ", files: " + files.mkString(","))
else
Copy(files.toList, location)
}
def addToComponent(id: String, files: Array[File]): Boolean =
Copy(files.toList, componentLocation(id))
def lockFile = if (lockBoot) ComponentProvider.lockFile(baseDirectory) else null // null for the Java interface
}
object ComponentProvider {
def lockFile(baseDirectory: File) =
{
baseDirectory.mkdirs()
new File(baseDirectory, "sbt.components.lock")
}
}
|
jaceklaskowski/sbt
|
launch/src/main/scala/xsbt/boot/Launch.scala
|
Scala
|
bsd-3-clause
| 20,267 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.benchmark
import scala.collection.mutable.ListBuffer
import org.apache.spark.benchmark.Benchmark
import org.apache.spark.sql.Column
import org.apache.spark.sql.SaveMode.Overwrite
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
/**
* Synthetic benchmark for interval functions.
* To run this benchmark:
* {{{
* 1. without sbt:
* bin/spark-submit --class <this class> --jars <spark core test jar> <sql core test jar>
* 2. build/sbt "sql/test:runMain <this class>"
* 3. generate result:
* SPARK_GENERATE_BENCHMARK_FILES=1 build/sbt "sql/test:runMain <this class>"
* Results will be written to "benchmarks/IntervalBenchmark-results.txt".
* }}}
*/
object IntervalBenchmark extends SqlBasedBenchmark {
import spark.implicits._
private def doBenchmark(cardinality: Long, exprs: Column*): Unit = {
withSQLConf(SQLConf.WHOLESTAGE_CODEGEN_ENABLED.key -> "true") {
spark
.range(0, cardinality, 1, 1)
.select(exprs: _*)
.write
.format("noop")
.mode(Overwrite)
.save()
}
}
private def addCase(
benchmark: Benchmark,
cardinality: Long,
name: String,
exprs: Column*): Unit = {
benchmark.addCase(name, numIters = 3) { _ =>
doBenchmark(cardinality, exprs: _*)
}
}
private def buildString(withPrefix: Boolean, units: Seq[String] = Seq.empty): Column = {
val init = lit(if (withPrefix) "interval" else "") ::
($"id" % 10000).cast("string") ::
lit("years") :: Nil
concat_ws(" ", (init ++ units.map(lit)): _*)
}
private def addCase(benchmark: Benchmark, cardinality: Long, units: Seq[String]): Unit = {
Seq(true, false).foreach { withPrefix =>
val expr = buildString(withPrefix, units).cast("interval")
val note = if (withPrefix) "w/ interval" else "w/o interval"
benchmark.addCase(s"${units.length + 1} units $note", numIters = 3) { _ =>
doBenchmark(cardinality, expr)
}
}
}
override def runBenchmarkSuite(mainArgs: Array[String]): Unit = {
val N = 1000000
val timeUnits = Seq(
"13 months", " 1 months",
"100 weeks", "9 days", "12 hours", "- 3 hours",
"5 minutes", "45 seconds", "123 milliseconds", "567 microseconds")
val intervalToTest = ListBuffer[String]()
val benchmark = new Benchmark("cast strings to intervals", N, output = output)
// The first 2 cases are used to show the overhead of preparing the interval string.
addCase(benchmark, N, "prepare string w/ interval", buildString(true, timeUnits))
addCase(benchmark, N, "prepare string w/o interval", buildString(false, timeUnits))
addCase(benchmark, N, intervalToTest) // Only years
for (unit <- timeUnits) {
intervalToTest.append(unit)
addCase(benchmark, N, intervalToTest)
}
benchmark.run()
}
}
|
caneGuy/spark
|
sql/core/src/test/scala/org/apache/spark/sql/execution/benchmark/IntervalBenchmark.scala
|
Scala
|
apache-2.0
| 3,775 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.recommendation
import scala.collection.JavaConversions._
import scala.util.Random
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
import org.jblas._
import org.apache.spark.mllib.util.LocalSparkContext
object ALSSuite {
def generateRatingsAsJavaList(
users: Int,
products: Int,
features: Int,
samplingRate: Double,
implicitPrefs: Boolean): (java.util.List[Rating], DoubleMatrix, DoubleMatrix) = {
val (sampledRatings, trueRatings, truePrefs) =
generateRatings(users, products, features, samplingRate, implicitPrefs)
(seqAsJavaList(sampledRatings), trueRatings, truePrefs)
}
def generateRatings(
users: Int,
products: Int,
features: Int,
samplingRate: Double,
implicitPrefs: Boolean = false): (Seq[Rating], DoubleMatrix, DoubleMatrix) = {
val rand = new Random(42)
// Create a random matrix with uniform values from -1 to 1
def randomMatrix(m: Int, n: Int) =
new DoubleMatrix(m, n, Array.fill(m * n)(rand.nextDouble() * 2 - 1): _*)
val userMatrix = randomMatrix(users, features)
val productMatrix = randomMatrix(features, products)
val (trueRatings, truePrefs) = implicitPrefs match {
case true =>
val raw = new DoubleMatrix(users, products, Array.fill(users * products)(rand.nextInt(10).toDouble): _*)
val prefs = new DoubleMatrix(users, products, raw.data.map(v => if (v > 0) 1.0 else 0.0): _*)
(raw, prefs)
case false => (userMatrix.mmul(productMatrix), null)
}
val sampledRatings = {
for (u <- 0 until users; p <- 0 until products if rand.nextDouble() < samplingRate)
yield Rating(u, p, trueRatings.get(u, p))
}
(sampledRatings, trueRatings, truePrefs)
}
}
class ALSSuite extends FunSuite with LocalSparkContext {
test("rank-1 matrices") {
testALS(50, 100, 1, 15, 0.7, 0.3)
}
test("rank-1 matrices bulk") {
testALS(50, 100, 1, 15, 0.7, 0.3, false, true)
}
test("rank-2 matrices") {
testALS(100, 200, 2, 15, 0.7, 0.3)
}
test("rank-2 matrices bulk") {
testALS(100, 200, 2, 15, 0.7, 0.3, false, true)
}
test("rank-1 matrices implicit") {
testALS(80, 160, 1, 15, 0.7, 0.4, true)
}
test("rank-1 matrices implicit bulk") {
testALS(80, 160, 1, 15, 0.7, 0.4, true, true)
}
test("rank-2 matrices implicit") {
testALS(100, 200, 2, 15, 0.7, 0.4, true)
}
test("rank-2 matrices implicit bulk") {
testALS(100, 200, 2, 15, 0.7, 0.4, true, true)
}
/**
* Test if we can correctly factorize R = U * P where U and P are of known rank.
*
* @param users number of users
* @param products number of products
* @param features number of features (rank of problem)
* @param iterations number of iterations to run
* @param samplingRate what fraction of the user-product pairs are known
* @param matchThreshold max difference allowed to consider a predicted rating correct
* @param implicitPrefs flag to test implicit feedback
* @param bulkPredict flag to test bulk prediciton
*/
def testALS(users: Int, products: Int, features: Int, iterations: Int,
samplingRate: Double, matchThreshold: Double, implicitPrefs: Boolean = false,
bulkPredict: Boolean = false)
{
val (sampledRatings, trueRatings, truePrefs) = ALSSuite.generateRatings(users, products,
features, samplingRate, implicitPrefs)
val model = implicitPrefs match {
case false => ALS.train(sc.parallelize(sampledRatings), features, iterations)
case true => ALS.trainImplicit(sc.parallelize(sampledRatings), features, iterations)
}
val predictedU = new DoubleMatrix(users, features)
for ((u, vec) <- model.userFeatures.collect(); i <- 0 until features) {
predictedU.put(u, i, vec(i))
}
val predictedP = new DoubleMatrix(products, features)
for ((p, vec) <- model.productFeatures.collect(); i <- 0 until features) {
predictedP.put(p, i, vec(i))
}
val predictedRatings = bulkPredict match {
case false => predictedU.mmul(predictedP.transpose)
case true =>
val allRatings = new DoubleMatrix(users, products)
val usersProducts = for (u <- 0 until users; p <- 0 until products) yield (u, p)
val userProductsRDD = sc.parallelize(usersProducts)
model.predict(userProductsRDD).collect().foreach { elem =>
allRatings.put(elem.user, elem.product, elem.rating)
}
allRatings
}
if (!implicitPrefs) {
for (u <- 0 until users; p <- 0 until products) {
val prediction = predictedRatings.get(u, p)
val correct = trueRatings.get(u, p)
if (math.abs(prediction - correct) > matchThreshold) {
fail("Model failed to predict (%d, %d): %f vs %f\\ncorr: %s\\npred: %s\\nU: %s\\n P: %s".format(
u, p, correct, prediction, trueRatings, predictedRatings, predictedU, predictedP))
}
}
} else {
// For implicit prefs we use the confidence-weighted RMSE to test (ref Mahout's tests)
var sqErr = 0.0
var denom = 0.0
for (u <- 0 until users; p <- 0 until products) {
val prediction = predictedRatings.get(u, p)
val truePref = truePrefs.get(u, p)
val confidence = 1 + 1.0 * trueRatings.get(u, p)
val err = confidence * (truePref - prediction) * (truePref - prediction)
sqErr += err
denom += 1
}
val rmse = math.sqrt(sqErr / denom)
if (math.abs(rmse) > matchThreshold) {
fail("Model failed to predict RMSE: %f\\ncorr: %s\\npred: %s\\nU: %s\\n P: %s".format(
rmse, truePrefs, predictedRatings, predictedU, predictedP))
}
}
}
}
|
dotunolafunmiloye/spark
|
mllib/src/test/scala/org/apache/spark/mllib/recommendation/ALSSuite.scala
|
Scala
|
apache-2.0
| 6,554 |
package lila.hub
import akka.actor._
import com.softwaremill.macwire._
import com.typesafe.config.Config
import play.api.Configuration
object actors {
trait Actor {
val actor: ActorSelection
val ! = actor ! _
}
case class GameSearch(actor: ActorSelection) extends Actor
case class ForumSearch(actor: ActorSelection) extends Actor
case class TeamSearch(actor: ActorSelection) extends Actor
case class Fishnet(actor: ActorSelection) extends Actor
case class TournamentApi(actor: ActorSelection) extends Actor
case class Bookmark(actor: ActorSelection) extends Actor
case class Shutup(actor: ActorSelection) extends Actor
case class Notification(actor: ActorSelection) extends Actor
case class Timeline(actor: ActorSelection) extends Actor
case class Report(actor: ActorSelection) extends Actor
case class Renderer(actor: ActorSelection) extends Actor
case class Captcher(actor: ActorSelection) extends Actor
}
@Module
final class Env(
appConfig: Configuration,
system: ActorSystem
) {
import actors._
private val config = appConfig.get[Config]("hub")
val gameSearch = GameSearch(select("actor.game.search"))
val renderer = Renderer(select("actor.renderer"))
val captcher = Captcher(select("actor.captcher"))
val forumSearch = ForumSearch(select("actor.forum.search"))
val teamSearch = TeamSearch(select("actor.team.search"))
val fishnet = Fishnet(select("actor.fishnet"))
val tournamentApi = TournamentApi(select("actor.tournament.api"))
val timeline = Timeline(select("actor.timeline.user"))
val bookmark = Bookmark(select("actor.bookmark"))
val report = Report(select("actor.report"))
val shutup = Shutup(select("actor.shutup"))
val notification = Notification(select("actor.notify"))
private def select(name: String) =
system.actorSelection("/user/" + config.getString(name))
}
|
luanlv/lila
|
modules/hub/src/main/Env.scala
|
Scala
|
mit
| 1,962 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package scalaguide.detailed.filters.csp
// #scala-csp-dynamic-action
package controllers {
import akka.stream.Materializer
import javax.inject._
import play.api.mvc._
import play.filters.csp._
import scala.concurrent.ExecutionContext
// Custom CSP action
class AssetAwareCSPActionBuilder @Inject() (bodyParsers: PlayBodyParsers,
cspConfig: CSPConfig,
assetCache: AssetCache)(
implicit
override protected val executionContext: ExecutionContext,
override protected val mat: Materializer)
extends CSPActionBuilder {
override def parser: BodyParser[AnyContent] = bodyParsers.default
// processor with dynamically generated config
override protected def cspResultProcessor: CSPResultProcessor = {
val modifiedDirectives: Seq[CSPDirective] = cspConfig.directives.map {
case CSPDirective(name, value) if name == "script-src" =>
CSPDirective(name, value + assetCache.cspDigests.mkString(" "))
case csp: CSPDirective =>
csp
}
CSPResultProcessor(CSPProcessor(cspConfig.copy(directives = modifiedDirectives)))
}
}
// Dummy class that can have a dynamically changing list of csp-hashes
class AssetCache {
def cspDigests: Seq[String] = {
Seq(
"sha256-HELLO",
"sha256-WORLD"
)
}
}
class HomeController @Inject()(cc: ControllerComponents,
myCSPAction: AssetAwareCSPActionBuilder)
extends AbstractController(cc) {
def index() = myCSPAction {
Ok("I have an asset aware header!")
}
}
}
import com.google.inject.AbstractModule
class CSPModule extends AbstractModule {
override def configure(): Unit = {
bind(classOf[controllers.AssetCache]).asEagerSingleton()
bind(classOf[controllers.AssetAwareCSPActionBuilder]).asEagerSingleton()
}
}
// #scala-csp-dynamic-action
|
Shenker93/playframework
|
documentation/manual/working/commonGuide/filters/code/scalaguide/detailed/filters/csp/DynamicCSPAction.scala
|
Scala
|
apache-2.0
| 2,037 |
// Wei Chen - Random Cut Tree
// 2022-03-04
package com.scalaml.algorithm
class RandomCutTree() extends Abnormal {
val algoname: String = "RandomCutTree"
val version: String = "0.1"
var maxLayer = 5
var tree: DecisionNode = null
override def clear(): Boolean = {
maxLayer = 5
true
}
override def config(paras: Map[String, Any]): Boolean = try {
maxLayer = paras.getOrElse("maxLayer", 5.0).asInstanceOf[Double].toInt
true
} catch { case e: Exception =>
Console.err.println(e)
false
}
private def buildtree(data: Array[Array[Double]], layer: Int = 0): DecisionNode = {
val dataSize: Int = data.size
val columnSize: Int = data.head.size
val colMinMax = (0 until columnSize).map { col =>
val colData = data.map(d => d(col))
(colData.min, colData.max)
}
val baseSum = colMinMax.foldLeft(0.0)((a, b) => a + b._2 - b._1)
var baseValue: Double = baseSum * scala.util.Random.nextDouble()
var bestColumn: Int = 0
for (col <- 0 until columnSize) {
val (colMin, colMax) = colMinMax(col)
val colRange = colMax - colMin
if (baseValue > 0 && colRange > baseValue) {
bestColumn = col
}
baseValue -= colRange
}
val (minV, maxV) = colMinMax(bestColumn)
val value = (maxV - minV) * scala.util.Random.nextDouble() + minV
val (tData, fData) = data.partition { d =>
d(bestColumn) >= value
}
if (tData.size > 0 && fData.size > 0 && layer < maxLayer) {
val tnode = buildtree(tData, layer + 1)
val fnode = buildtree(fData, layer + 1)
new DecisionNode(bestColumn, value, tnode, fnode)
} else new DecisionNode(0, 0, null, null, layer)
}
override def train(data: Array[Array[Double]]): Boolean = try {
tree = buildtree(data)
true
} catch { case e: Exception =>
Console.err.println(e)
false
}
override def predict(x: Array[Array[Double]]): Array[Double] = x.map(xi => tree.predict(xi))
}
|
Wei-1/Scala-Machine-Learning
|
src/main/scala/algorithm/abnormal/RandomCutTree.scala
|
Scala
|
mit
| 2,174 |
package io.livingston.ditto
import com.twitter.util.{Await, Future}
import com.typesafe.scalalogging.LazyLogging
object Ditto extends App with DittoSettings with LazyLogging {
yamlConfigs.foreach { case (protocol, yaml) =>
LoadResponder.load(protocol, yaml)
}
Await.ready(Future.never)
}
|
scottlivingston/ditto
|
src/main/scala/io/livingston/ditto/Ditto.scala
|
Scala
|
mit
| 302 |
import scala.quoted._
object Test
def run(using qctx: QuoteContext)(tree: qctx.tasty.Tree): Unit =
'{ ${ makeExpr(tree) } + 1 }
def makeExpr(using qctx: QuoteContext)(tree: qctx.tasty.Tree): Expr[Int] = ???
|
som-snytt/dotty
|
tests/pos/i8045.scala
|
Scala
|
apache-2.0
| 217 |
package org.http4s
import scala.collection.concurrent.TrieMap
import scalaz._
import org.http4s.parser.Rfc2616BasicRules
import org.http4s.util.{Writer, Renderable}
import Method.Semantics
/**
* An HTTP method.
*
* @see [http://tools.ietf.org/html/rfc7231#section-4 RFC7321, Section 4]
* @see [http://www.iana.org/assignments/http-methods/http-methods.xhtml IANA HTTP Method Registry]
*/
sealed abstract case class Method private (name: String) extends Renderable with Semantics {
final override def render(writer: Writer): writer.type = writer << name
}
object Method extends MethodInstances {
sealed trait Semantics {
def isIdempotent: Boolean
def isSafe: Boolean
}
object Semantics {
trait Default extends Semantics {
def isIdempotent: Boolean = false
def isSafe: Boolean = false
}
trait Idempotent extends Semantics {
def isIdempotent: Boolean = true
def isSafe: Boolean = false
}
trait Safe extends Semantics {
def isIdempotent: Boolean = true
def isSafe: Boolean = true
}
}
// Type tags for a method allowing a body or not
sealed trait PermitsBody extends Method
sealed trait NoBody extends Method
def fromString(s: String): ParseResult[Method] =
registry.getOrElse(s, Rfc2616BasicRules.token(s).bimap(
e => ParseFailure("Invalid method", e.details),
new Method(_) with Semantics.Default
))
import Semantics._
// Lookups will usually be on fromString, so we store it wrapped in a \/-
private val registry = TrieMap[String, \/-[Method]]()
private def register[M <: Method](method: M): method.type = {
registry(method.name) = \/-(method)
method
}
def registered: Iterable[Method] = registry.readOnlySnapshot().values.map(_.b)
// TODO: find out the rest of the body permissions. http://www.iana.org/assignments/http-methods/http-methods.xhtml#methods
val ACL = register(new Method("ACL") with Idempotent)
val `BASELINE-CONTROL` = register(new Method("BASELINE-CONTROL") with Idempotent)
val BIND = register(new Method("BIND") with Idempotent)
val CHECKIN = register(new Method("CHECKIN") with Idempotent)
val CHECKOUT = register(new Method("CHECKOUT") with Idempotent)
val CONNECT = register(new Method("CONNECT") with Default with NoBody)
val COPY = register(new Method("COPY") with Idempotent)
val DELETE = register(new Method("DELETE") with Idempotent with NoBody)
val GET = register(new Method("GET") with Safe with NoBody)
val HEAD = register(new Method("HEAD") with Safe with NoBody)
val LABEL = register(new Method("LABEL") with Idempotent with PermitsBody)
val LINK = register(new Method("LINK") with Idempotent)
val LOCK = register(new Method("LOCK") with Default)
val MERGE = register(new Method("MERGE") with Idempotent)
val MKACTIVITY = register(new Method("MKACTIVITY") with Idempotent)
val MKCALENDAR = register(new Method("MKCALENDAR") with Idempotent)
val MKCOL = register(new Method("MKCOL") with Idempotent)
val MKREDIRECTREF = register(new Method("MKREDIRECTREF") with Idempotent)
val MKWORKSPACE = register(new Method("MKWORKSPACE") with Idempotent)
val MOVE = register(new Method("MOVE") with Idempotent)
val OPTIONS = register(new Method("OPTIONS") with Safe with PermitsBody)
val ORDERPATCH = register(new Method("ORDERPATCH") with Idempotent)
val PATCH = register(new Method("PATCH") with Default with PermitsBody)
val POST = register(new Method("POST") with Default with PermitsBody)
val PROPFIND = register(new Method("PROPFIND") with Safe)
val PROPPATCH = register(new Method("PROPPATCH") with Idempotent)
val PUT = register(new Method("PUT") with Idempotent with PermitsBody)
val REBIND = register(new Method("REBIND") with Idempotent)
val REPORT = register(new Method("REPORT") with Safe)
val SEARCH = register(new Method("SEARCH") with Safe)
val TRACE = register(new Method("TRACE") with Safe with PermitsBody)
val UNBIND = register(new Method("UNBIND") with Idempotent)
val UNCHECKOUT = register(new Method("UNCHECKOUT") with Idempotent)
val UNLINK = register(new Method("UNLINK") with Idempotent)
val UNLOCK = register(new Method("UNLOCK") with Idempotent)
val UPDATE = register(new Method("UPDATE") with Idempotent)
val UPDATEREDIRECTREF = register(new Method("UPDATEREDIRECTREF") with Idempotent)
val `VERSION-CONTROL` = register(new Method("VERSION-CONTROL") with Idempotent)
}
trait MethodInstances {
implicit val MethodInstances = new Show[Method] with Equal[Method] {
override def shows(f: Method): String = f.toString
override def equal(a1: Method, a2: Method): Boolean = a1 == a2
}
}
|
hvesalai/http4s
|
core/src/main/scala/org/http4s/Method.scala
|
Scala
|
apache-2.0
| 5,484 |
package com.cave.metrics.data.evaluator
import java.security.MessageDigest
import org.joda.time.LocalTime
import scala.concurrent.duration._
import scala.util.parsing.combinator.JavaTokenParsers
trait AlertParser extends JavaTokenParsers {
def anyAggregator = (
for {
value <- Aggregator.values
} yield value.toString: Parser[String]
).reduce(_ | _)
object Operator extends Enumeration {
type Operator = Value
val LessThan, LessThanOrEqual, GreaterThan, GreaterThanOrEqual, Equal, NotEqual = Value
private val symbols = Seq(
LessThanOrEqual -> "<=",
LessThan -> "<",
GreaterThanOrEqual -> ">=",
GreaterThan -> ">",
Equal -> "==",
NotEqual -> "!="
)
def byName(name: String): Operator =
symbols.find(_._2 == name).map(_._1).getOrElse(sys.error(s"Unknown symbol $name"))
def anyValue = symbols.map(f => f._2: Parser[String]).reduce(_ | _)
}
import Operator._
import com.cave.metrics.data.evaluator.Aggregator._
sealed trait Source
case class ValueSource(value: Double) extends Source
case class MetricSource(metric: String, tags: Map[String, String]) extends Source {
override def toString = metric + tags.toSeq.sortBy(_._1).map { case (key, value) => key + "." + value}.mkString("__", ".", "")
}
case class AggregatedSource(metricSource: MetricSource, aggregator: Aggregator, duration: FiniteDuration) extends Source {
override def toString: String = {
val key = metricSource + "__" + aggregator.toString + "__" + duration.toSeconds
val md = MessageDigest.getInstance("SHA-512")
md.update(key.getBytes())
md.digest().map("%02x" format _).mkString
}
}
case class FactoredSource(source: Source, factor: Double) extends Source
sealed trait AlertEntity
case class SimpleAlert(sourceLeft: Source, operator: Operator, sourceRight: Source, times: Int, delay: FiniteDuration) extends AlertEntity
case class MissingDataAlert(metricSource: MetricSource, duration: FiniteDuration) extends AlertEntity
/** *** Parsers *****/
def operator: Parser[Operator] = Operator.anyValue ^^ Operator.byName
def aggregator: Parser[Aggregator] = anyAggregator ^^ Aggregator.withName
def valueSource: Parser[ValueSource] = floatingPointNumber ^^ {
case num => ValueSource(num.toDouble)
}
def word: Parser[String] = """[a-zA-Z][_a-zA-Z0-9.-]*""".r
def metricTag: Parser[(String, String)] = (word <~ ":") ~ word ^^ {
case name ~ value => name -> value
}
def metricTags: Parser[Map[String, String]] = repsep(metricTag, ",") ^^ {
case list => list.toMap
}
def metricSourceWithTags: Parser[MetricSource] = word ~ ("[" ~> metricTags <~ "]") ^^ {
case metric ~ tagMap => MetricSource(metric, tagMap)
}
def metricSourceWithoutTags: Parser[MetricSource] = word ^^ {
case metric => MetricSource(metric, Map.empty[String, String])
}
def metricSource = metricSourceWithTags | metricSourceWithoutTags
def duration: Parser[FiniteDuration] = wholeNumber ~ ("s" | "m" | "h" | "d") ^^ {
case time ~ "s" => time.toInt.seconds
case time ~ "m" => time.toInt.minutes
case time ~ "h" => time.toInt.hours
case time ~ "d" => time.toInt.days
}
def dailyHours: Parser[LocalTime] = ("@" ~> wholeNumber) ^^ {
case hours => new LocalTime(hours.toInt, 0)
}
def dailyMinutes: Parser[LocalTime] = ("@" ~> wholeNumber) ~ (":" ~> wholeNumber) ^^ {
case hours ~ minutes => new LocalTime(hours.toInt, minutes.toInt)
}
def dailySeconds: Parser[LocalTime] = ("@" ~> wholeNumber) ~ (":" ~> wholeNumber) ~ (":" ~> wholeNumber) ^^ {
case hours ~ minutes ~ seconds => new LocalTime(hours.toInt, minutes.toInt, seconds.toInt)
}
def daily: Parser[LocalTime] = dailySeconds | dailyMinutes | dailyHours
def anyPeriod = duration | daily
def repeater: Parser[Int] = "at least" ~> wholeNumber <~ "times" ^^ {
case num => num.toInt
}
def delay: Parser[FiniteDuration] = "delayed by" ~> duration ^^ {
case duration => duration
}
def aggregatedSource: Parser[AggregatedSource] = metricSource ~ ("." ~> aggregator) ~ ("." ~> duration) ^^ {
case met ~ agg ~ dur => AggregatedSource(met, agg, dur)
}
def anySimpleSource: Parser[Source] = valueSource | aggregatedSource | metricSource
def factoredSourceLeft: Parser[FactoredSource] = (floatingPointNumber <~ "*") ~ anySimpleSource ^^ {
case factor ~ source => FactoredSource(source, factor.toDouble)
}
def factoredSourceRight: Parser[FactoredSource] = anySimpleSource ~ ("*" ~> floatingPointNumber) ^^ {
case source ~ factor => FactoredSource(source, factor.toDouble)
}
def anySource: Parser[Source] = factoredSourceRight | factoredSourceLeft | anySimpleSource
def missingDataAlert: Parser[MissingDataAlert] = metricSource ~ ("missing for" ~> duration) ^^ {
case source ~ d => MissingDataAlert(source, d)
}
def simpleAlert: Parser[SimpleAlert] = anySource ~ operator ~ anySource ^^ {
case sourceLeft ~ op ~ sourceRight => SimpleAlert(sourceLeft, op, sourceRight, 1, 0.minutes)
}
def simpleAlertWithRepeater: Parser[SimpleAlert] = anySource ~ operator ~ anySource ~ repeater ^^ {
case sourceLeft ~ op ~ sourceRight ~ num => SimpleAlert(sourceLeft, op, sourceRight, num, 0.minutes)
}
def simpleAlertWithDelay: Parser[SimpleAlert] = anySource ~ operator ~ anySource ~ delay ^^ {
case sourceLeft ~ op ~ sourceRight ~ delay => SimpleAlert(sourceLeft, op, sourceRight, 1, delay)
}
def simpleAlertWithRepeaterAndDelay: Parser[SimpleAlert] = anySource ~ operator ~ anySource ~ repeater ~ delay ^^ {
case sourceLeft ~ op ~ sourceRight ~ num ~ delay => SimpleAlert(sourceLeft, op, sourceRight, num, delay)
}
// order is important here: look for the more complex case first
def anyAlert: Parser[AlertEntity] = missingDataAlert | simpleAlertWithRepeaterAndDelay | simpleAlertWithRepeater | simpleAlertWithDelay | simpleAlert
}
|
gilt/cave
|
core/src/main/scala/com/cave/metrics/data/evaluator/AlertParser.scala
|
Scala
|
mit
| 5,970 |
package keycloakapi
import play.api.libs.functional.syntax._
import play.api.libs.json.Reads._
import play.api.libs.json._
case class KeycloakUser(
firstname: String,
lastname: String,
email: String,
systemId: String,
campusId: String,
degreeAbbrev: Option[String],
registrationId: Option[String]
)
object KeycloakUser {
def firstStringOf(path: JsPath): Reads[String] =
path.read[List[String]].map(_.head)
def firstStringOrNullOf(path: JsPath): Reads[Option[String]] =
path.readNullable[List[String]].map(_.flatMap(_.headOption))
implicit val reads: Reads[KeycloakUser] = (
(JsPath \\ "firstName").read[String] and
(JsPath \\ "lastName").read[String] and
(JsPath \\ "email").read[String] and
firstStringOf(JsPath \\ "attributes" \\ "systemId") and
firstStringOf(JsPath \\ "attributes" \\ "campusId") and
firstStringOrNullOf(JsPath \\ "attributes" \\ "degreeAbbrev") and
firstStringOrNullOf(JsPath \\ "attributes" \\ "registrationId")
)(KeycloakUser.apply _)
}
|
THK-ADV/lwm-reloaded
|
app/keycloakapi/KeycloakUser.scala
|
Scala
|
mit
| 1,038 |
import sbt.Keys._
import sbt._
object Dependencies {
val AwsSdkVersion = "1.11.774"
val AwsCloudformationDeps = Seq(
libraryDependencies ++= Seq(
"com.amazonaws" % "aws-java-sdk-cloudformation" % AwsSdkVersion,
"com.amazonaws" % "aws-java-sdk-sts" % AwsSdkVersion,
"com.amazonaws" % "aws-java-sdk-s3" % AwsSdkVersion,
"com.amazonaws" % "aws-java-sdk-ecr" % AwsSdkVersion,
"com.amazonaws" % "aws-java-sdk-lambda" % AwsSdkVersion,
"com.amazonaws" % "aws-java-sdk-api-gateway" % AwsSdkVersion,
"com.amazonaws" % "aws-java-sdk-ecs" % AwsSdkVersion,
"com.amazonaws" % "aws-java-sdk-autoscaling" % AwsSdkVersion,
"com.amazonaws" % "aws-java-sdk-applicationautoscaling" % AwsSdkVersion,
"org.scalatest" %% "scalatest" % "3.1.1" % Test
)
)
}
|
PigumerGroup/sbt-aws-cloudformation
|
project/Dependencies.scala
|
Scala
|
mit
| 817 |
package jigg.util
/*
Copyright 2013-2015 Hiroshi Noji
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
object LogUtil {
/** A helper to measure time.
* If multiple commands are nested, use multipleTrack.
*
* TODO: Integrate track and multipleTrack to automatically choose indent and appropriate format.
* Currently track[A](beginMessage: String, ...) "manually" handles the indent level.
*/
def track[A](message: String)(body: => A): A = {
// System.out.print(message)
// val (result, time) = recordTime { body }
// System.out.println("done [%.1f sec]".format(time))
// result
track(message, "done", 0) { body }
}
def multipleTrack[A](message: String)(body: => A): A = {
// System.out.println("{ " + message)
// val (result, time) = recordTime { body }
// System.out.println("} [%.1f sec]".format(time))
// result
track(message + " {\\n", "}", 0) { body }
}
def track[A](beginMessage: String, endMessage: String, indent: Int)(body: => A): A = {
def print(raw: String) = {
(0 until indent) foreach { _ => System.out.print(" ") }
System.out.print(raw)
}
print(beginMessage)
val (result, time) = recordTime { body }
System.out.println(endMessage + " [%.1f sec]".format(time))
result
}
def recordTime[A](body: => A): (A, Double) = {
val before = System.currentTimeMillis
val result = body
val time = (System.currentTimeMillis - before).toDouble / 1000.0
(result, time)
}
}
|
sakabar/jigg
|
src/main/scala/jigg/util/LogUtil.scala
|
Scala
|
apache-2.0
| 1,992 |
package leo.modules.calculus
import leo.Out
import leo.datastructures.Literal.Side
import leo.datastructures._
import leo.modules.HOLSignature.{LitTrue, o}
import leo.modules.output.{SZS_CounterTheorem, SZS_EquiSatisfiable, SZS_Theorem}
import scala.annotation.tailrec
////////////////////////////////////////////////////////////////
////////// Extensionality
////////////////////////////////////////////////////////////////
object FuncExt extends CalculusRule {
final val name = "func_ext"
final val inferenceStatus = SZS_EquiSatisfiable
type ExtLits = Literal
type OtherLits = Literal
final def canApply(l: Literal): Boolean = l.equational && l.left.ty.isFunType
final def canApply(lits: Seq[Literal]): (Boolean, Seq[ExtLits], Seq[OtherLits]) = {
var can = false
var extLits:Seq[Literal] = Vector()
var otherLits: Seq[Literal] = Vector()
val literals = lits.iterator
while (literals.hasNext) {
val l = literals.next()
if (canApply(l)) {
extLits = extLits :+ l
can = true
} else {
otherLits = otherLits :+ l
}
}
(can, extLits, otherLits)
}
final def canApply(cl: Clause): (Boolean, Seq[ExtLits], Seq[OtherLits]) = canApply(cl.lits)
final def applyExhaust(lit: Literal, vargen: FreshVarGen)(implicit sig: Signature): Literal = {
assert(lit.left.ty.isFunType, "Trying to apply func ext on non fun-ty literal")
assert(lit.equational, "Trying to apply func ext on non-eq literal")
val funArgTys = lit.left.ty.funParamTypes
if (lit.polarity) {
val newVars = funArgTys.map {ty => vargen(ty)}
val appliedLeft = Term.mkTermApp(lit.left, newVars).betaNormalize
val appliedRight = Term.mkTermApp(lit.right, newVars).betaNormalize
assert(Term.wellTyped(appliedLeft), s"[FuncExt]: Positive polarity left result not well typed: ${appliedLeft.pretty(sig)}")
assert(Term.wellTyped(appliedRight), s"[FuncExt]: Positive polarity right result not well typed: ${appliedRight.pretty(sig)}")
Literal.mkOrdered(appliedLeft, appliedRight, true)(sig)
} else {
val skTerms = funArgTys.map(skTerm(_, vargen.existingVars, vargen.existingTyVars)(sig))
val appliedLeft = Term.mkTermApp(lit.left, skTerms).betaNormalize
val appliedRight = Term.mkTermApp(lit.right, skTerms).betaNormalize
assert(Term.wellTyped(appliedLeft), s"[FuncExt]: Negative polarity left result not well typed: ${appliedLeft.pretty(sig)}")
assert(Term.wellTyped(appliedRight), s"[FuncExt]: Negative polarity right result not well typed: ${appliedRight.pretty(sig)}")
Literal.mkOrdered(appliedLeft, appliedRight, false)(sig)
}
}
final def applyNew(lit: Literal, vargen: FreshVarGen)(implicit sig: Signature): Literal = {
assert(lit.left.ty.isFunType, "Trying to apply func ext on non fun-ty literal")
assert(lit.equational, "Trying to apply func ext on non-eq literal")
val argType = lit.left.ty._funDomainType
if (lit.polarity) {
val newVar = vargen(argType)
val appliedLeft = Term.mkTermApp(lit.left, newVar).betaNormalize
val appliedRight = Term.mkTermApp(lit.right, newVar).betaNormalize
assert(Term.wellTyped(appliedLeft), s"[FuncExt]: Positive polarity left result not well typed: ${appliedLeft.pretty(sig)}")
assert(Term.wellTyped(appliedRight), s"[FuncExt]: Positive polarity right result not well typed: ${appliedRight.pretty(sig)}")
Literal.mkOrdered(appliedLeft, appliedRight, true)(sig)
} else {
val newSkArg = skTerm(argType, vargen.existingVars, vargen.existingTyVars)(sig)
val appliedLeft = Term.mkTermApp(lit.left, newSkArg).betaNormalize
val appliedRight = Term.mkTermApp(lit.right, newSkArg).betaNormalize
assert(Term.wellTyped(appliedLeft), s"[FuncExt]: Negative polarity left result not well typed: ${appliedLeft.pretty(sig)}")
assert(Term.wellTyped(appliedRight), s"[FuncExt]: Negative polarity right result not well typed: ${appliedRight.pretty(sig)}")
Literal.mkOrdered(appliedLeft, appliedRight, false)(sig)
}
}
final def apply(lit: Literal, vargen: leo.modules.calculus.FreshVarGen, initFV: Seq[(Int, Type)])(implicit sig: Signature): Literal = {
assert(lit.left.ty.isFunType, "Trying to apply func ext on non fun-ty literal")
assert(lit.equational, "Trying to apply func ext on non-eq literal")
val funArgTys = lit.left.ty.funParamTypes
if (lit.polarity) {
val newVars = funArgTys.map {ty => vargen(ty)}
val appliedLeft = Term.mkTermApp(lit.left, newVars).betaNormalize
val appliedRight = Term.mkTermApp(lit.right, newVars).betaNormalize
assert(Term.wellTyped(appliedLeft), s"[FuncExt]: Positive polarity left result not well typed: ${appliedLeft.pretty(sig)}")
assert(Term.wellTyped(appliedRight), s"[FuncExt]: Positive polarity right result not well typed: ${appliedRight.pretty(sig)}")
Literal.mkOrdered(appliedLeft, appliedRight, true)(sig)
} else {
val skTerms = funArgTys.map(leo.modules.calculus.skTerm(_, initFV, vargen.existingTyVars)(sig)) //initFV: We only use the
// free vars that were existent at the very beginning, i.e. simulating
// that we applies func_ext to all negative literals first
// in order to minimize the FVs inside the sk-term
val appliedLeft = Term.mkTermApp(lit.left, skTerms).betaNormalize
val appliedRight = Term.mkTermApp(lit.right, skTerms).betaNormalize
assert(Term.wellTyped(appliedLeft), s"[FuncExt]: Negative polarity left result not well typed: ${appliedLeft.pretty(sig)}")
assert(Term.wellTyped(appliedRight), s"[FuncExt]: Negative polarity right result not well typed: ${appliedRight.pretty(sig)}")
Literal.mkOrdered(appliedLeft, appliedRight, false)(sig)
}
}
final def apply(vargen: leo.modules.calculus.FreshVarGen, lits: Seq[Literal])(implicit sig: Signature): Seq[Literal] = {
val initFV = vargen.existingVars
lits.map(apply(_,vargen, initFV)(sig))
}
}
object BoolExt extends CalculusRule {
final val name = "bool_ext"
final val inferenceStatus = SZS_Theorem
type ExtLits = Seq[Literal]
type OtherLits = Seq[Literal]
final def canApply(l: Literal): Boolean = l.equational && l.left.ty == o
final def canApply(cl: Clause): (Boolean, ExtLits, OtherLits) = {
var can = false
var extLits:Seq[Literal] = Vector()
var otherLits: Seq[Literal] = Vector()
val lits = cl.lits.iterator
while (lits.hasNext) {
val l = lits.next()
if (canApply(l)) {
extLits = extLits :+ l
can = true
} else {
otherLits = otherLits :+ l
}
}
(can, extLits, otherLits)
}
final def apply(cl: Clause, extLits: ExtLits, otherLits: OtherLits): Set[Clause] = {
var transformed = Set(otherLits)
val extIt = extLits.iterator
while (extIt.hasNext) {
val extLit = extIt.next()
val nu = apply(extLit)
transformed = transformed.map(_ ++ nu._1) union transformed.map(_ ++ nu._2)
}
transformed.map(c => Clause.apply(c, cl.implicitlyBound, cl.typeVars)) //TODO CHECK THIS
}
final def apply(l: Literal): (ExtLits, ExtLits) = {
assert(l.equational, "Trying to apply bool ext on non-eq literal")
assert(l.left.ty == o && l.right.ty == o, "Trying to apply bool ext on non-bool literal")
if (l.polarity) {
(Seq(Literal.mkLit(l.left, false), Literal.mkLit(l.right, true)), Seq(Literal.mkLit(l.left, true), Literal.mkLit(l.right, false)))
} else {
(Seq(Literal.mkLit(l.left, false), Literal.mkLit(l.right, false)), Seq(Literal.mkLit(l.left, true), Literal.mkLit(l.right, true)))
}
}
}
////////////////////////////////////////////////////////////////
////////// pre-Unification
////////////////////////////////////////////////////////////////
protected[calculus] abstract class AnyUni extends CalculusRule {
final val inferenceStatus = SZS_Theorem
type UniLits = Seq[(Term, Term)]
type OtherLits = Seq[Literal]
type UniResult = (Clause, (Unification#TermSubst, Unification#TypeSubst))
def canApply(l: Literal): Boolean
final def canApply(cl: Clause): (Boolean, UniLits, OtherLits) = {
var can = false
var uniLits: UniLits = Vector()
var otherLits: OtherLits = Vector()
val lits = cl.lits.iterator
while (lits.hasNext) {
val l = lits.next()
if (canApply(l)) {
uniLits = uniLits :+ (l.left, l.right)
can = true
} else {
otherLits = otherLits :+ l
}
}
(can, uniLits, otherLits)
}
}
object PreUni extends AnyUni {
final val name = "pre_uni"
final def canApply(l: Literal): Boolean = l.uni
final def apply(vargen: FreshVarGen, uniLits: UniLits,
otherLits: OtherLits, uniDepth: Int)(implicit sig: Signature): Iterator[UniResult] = {
import leo.modules.myAssert
Out.trace(s"Unification on:\\n\\t${uniLits.map(eq => eq._1.pretty(sig) + " = " + eq._2.pretty(sig)).mkString("\\n\\t")}")
myAssert(uniLits.forall{case (l,r) => Term.wellTyped(l) && Term.wellTyped(r) && l.ty == r.ty})
val result = HuetsPreUnification.unifyAll(vargen, uniLits, uniDepth).iterator
result.map {case (subst, flexflex) =>
val newLiteralsFromFlexFlex = flexflex.map(eq => Literal.mkNeg(eq._1, eq._2))
val updatedOtherLits = otherLits.map(_.substituteOrdered(subst._1, subst._2)(sig)) // FIXME this one is slow
val resultClause = Clause(updatedOtherLits ++ newLiteralsFromFlexFlex)
(resultClause, subst)
}
}
}
object PatternUni extends AnyUni {
final val name = "pattern_uni"
final def canApply(l: Literal): Boolean =
l.uni && PatternUnification.isPattern(l.left) && PatternUnification.isPattern(l.right)
final def apply(vargen: FreshVarGen, uniLits: UniLits,
otherLits: OtherLits)(implicit sig: Signature): Option[UniResult] = {
import leo.modules.myAssert
Out.trace(s"Pattern unification on:\\n\\t${uniLits.map(eq => eq._1.pretty(sig) + " = " + eq._2.pretty(sig)).mkString("\\n\\t")}")
myAssert(uniLits.forall{case (l,r) => Term.wellTyped(l) && Term.wellTyped(r) && l.ty == r.ty})
val result = PatternUnification.unifyAll(vargen, uniLits, -1) // depth is dont care
if (result.isEmpty) {
Out.trace(s"Pattern unification failed.")
None
} else {
val subst = result.head._1
Out.trace(s"Pattern unification successful: ${subst._1.pretty}")
Out.trace(s"ty subst: ${subst._2.pretty}")
val updatedOtherLits = otherLits.map(_.substituteOrdered(subst._1, subst._2)(sig))
val resultClause = Clause(updatedOtherLits)
Some((resultClause, subst))
}
}
}
////////////////////////////////////////////////////////////////
////////// Choice
////////////////////////////////////////////////////////////////
object Choice extends CalculusRule {
final val name = "choice"
final val inferenceStatus = SZS_EquiSatisfiable
final def detectChoice(clause: Clause): Option[Term] = {
import leo.datastructures.Term.TermApp
if (clause.lits.size == 2) {
val lit1 = clause.lits.head
val lit2 = clause.lits.tail.head
val posLit = if (lit1.polarity) lit1 else if (lit2.polarity) lit2 else null
val negLit = if (!lit1.polarity) lit1 else if (!lit2.polarity) lit2 else null
if (posLit == null || negLit == null || posLit.equational || negLit.equational) None
else {
val witnessTerm = negLit.left
val choiceTerm = posLit.left
witnessTerm match {
case TermApp(prop, Seq(witness)) if prop.isVariable && isVariableModuloEta(witness) =>
choiceTerm match {
case TermApp(`prop`, Seq(arg0)) =>
val arg = arg0.etaContract
arg match {
case TermApp(f, Seq(prop0)) if prop0.etaContract == prop.etaContract => Some(f)
case _ => None
}
case _ => None
}
case _ => None
}
}
} else
None
}
final def canApply(clause: Clause, choiceFuns: Map[Type, Set[Term]])(implicit sig: Signature): Set[Term] = {
var result: Set[Term] = Set()
val litIt = clause.lits.iterator
while (litIt.hasNext) {
val lit = litIt.next()
val leftOcc = lit.left.feasibleOccurrences
val leftOccIt = leftOcc.keysIterator
while (leftOccIt.hasNext) {
val o = leftOccIt.next()
val occ0 = prefixApplications(o)
occ0.foreach { occ =>
leo.Out.trace(s"[Choice Rule] Current occurence: ${occ.pretty(sig)}")
val findResult = findChoice(occ, choiceFuns, leftOcc(o).head)
if (findResult != null) leo.Out.trace(s"[Choice Rule] Taken: ${findResult.pretty(sig)}")
if (findResult != null)
result = result + findResult
}
}
if (lit.equational) {
val rightOcc = lit.right.feasibleOccurrences
val rightOccIt = rightOcc.keysIterator
while (rightOccIt.hasNext) {
val occ = rightOccIt.next()
val findResult = findChoice(occ, choiceFuns, rightOcc(occ).head)
if (findResult != null)
result = result + findResult
}
}
}
result
}
private final def findChoice(occ: Term, choiceFuns: Map[Type, Set[Term]], occPos: Position): Term =
findChoice0(occ, choiceFuns, occPos, 0)
private final def findChoice0(occ: Term, choiceFuns: Map[Type, Set[Term]], occPos: Position, depth: Int): Term = {
import leo.datastructures.Term.{Symbol, Bound,TermApp}
import leo.modules.HOLSignature.{Choice => ChoiceSymb}
occ match {
case ChoiceSymb(arg) => arg
case TermApp(hd, args) if compatibleType(hd.ty) && args.size == 1 =>
val arg = args.head
hd match {
case Bound(_,idx) if idx > occPos.abstractionCount+depth =>
arg
case Symbol(_) =>
// hd.ty = (a -> o) -> a
val choiceType =hd.ty._funDomainType._funDomainType
// choiceType = a
val choiceFuns0 = choiceFuns.getOrElse(choiceType, Set.empty)
if (choiceFuns0.contains(hd)) arg else null
case _ => null/* skip */
}
case _ => null/* skip */
}
}
private final def compatibleType(ty: Type): Boolean = {
if (ty.isFunType) {
val domain = ty._funDomainType
val codomain = ty.codomainType
if (domain.isFunType)
if (domain._funDomainType == codomain && domain.codomainType == o) true
else false
else false
} else false
}
final def apply(term: Term, choiceFun: Term): Clause = {
// We dont need to adjust the free variables of `term` since there
// is no variable capture (we create a fresh clause).
val newVarIndex = if (term.looseBounds.isEmpty) 1 else term.looseBounds.max + 1
val newVar: Term = Term.mkBound(term.ty._funDomainType, newVarIndex)
// lit1: [term y]^f
val lit1 = Literal.mkLit(Term.mkTermApp(term, newVar).betaNormalize.etaExpand, false)
// lit2: [term (choicefun term)]^t
val lit2 = Literal.mkLit(Term.mkTermApp(term, Term.mkTermApp(choiceFun, term)).betaNormalize.etaExpand, true)
Clause(Vector(lit1, lit2))
}
}
object SolveFuncSpec extends CalculusRule {
import leo.datastructures.Term.{λ, mkBound}
import leo.modules.HOLSignature.{Choice => ε, Impl, &, ===}
import leo.modules.myAssert
final val name: String = "solveFuncSpec"
override final val inferenceStatus = SZS_Theorem
type Argument = Term
type Result = Term
/**
* Suppose we have a specification of a function F with
* {{{F(s11,s12,...,s1J) = t1,
* ...
* F(sN1,sN2,...,sNJ) = tN}}},
* represented as an input `((s_ij)_{1<=j<=J},t_i)_{1<=i<=N}`,
* return the term
* `λx_1....λ.x_J.ε(λy. ⋀_i<=N. (⋀_j<=J. x_j = s_ij) => y = t_i)`.
*
* This term represents the specfication as a choice-term.
*
* @param funTy The type of the function `F`
* @param spec The specification of the function `F`.
* @return A choice term representing a function with specification `spec`
*/
final def apply(funTy: Type, spec: Seq[(Seq[Argument], Result)])
(implicit sig: Signature): Term = {
assert(spec.nonEmpty)
val (paramTypes, resultType) = funTy.splitFunParamTypes
val paramCount = paramTypes.size
myAssert(spec.forall(s => s._1.size == paramCount))
myAssert(spec.forall(s => s._1.map(_.ty) == paramTypes))
myAssert(spec.forall(s => s._2.ty == resultType))
/* Result var is the y in `SOME y. p`, i.e.
* ε(λy.p). */
val resultVar: Term = mkBound(resultType, 1)
/* paramVar(i) is the i+1-th input variable for the term as in
* `λx_1....λxi...λx_J.ε(λy. ...)`, 0<=0<J */
def paramVar(i: Int): Term = mkBound(paramTypes(i), paramCount-i+1) // +1 b/c of y
val specIt = spec.iterator
/* Iteratively build-up `choiceTerm` */
var choiceTerm: Term = null
while (specIt.hasNext) {
val (args,res0) = specIt.next() // (sij_j,ti)
val res = res0.lift(paramCount+1)
val argsIt = args.iterator
var i = 0
var caseTerm: Term = null // a single input `⋀_j<=J. x_j = s_ij` for a fixed i
while (argsIt.hasNext) {
val arg0 = argsIt.next()
val arg = arg0.lift(paramCount+1)
if (caseTerm == null) {
caseTerm = ===(paramVar(i), arg)
} else {
caseTerm = &(caseTerm, ===(paramVar(i), arg))
}
i = i+1
}
val caseTerm0: Term = Impl(caseTerm, ===(resultVar,res))
if (choiceTerm == null) {
choiceTerm = caseTerm0
} else {
choiceTerm = &(choiceTerm, caseTerm0)
}
}
val result: Term = λ(paramTypes)(ε(λ(resultType)(choiceTerm)))
leo.Out.trace(s"[SolveFuncSpec] Result: ${result.pretty(sig)}")
result
}
}
////////////////////////////////////////////////////////////////
////////// Inferences
////////////////////////////////////////////////////////////////
object PrimSubst extends CalculusRule {
type FlexHeads = Set[Term]
final val name = "prim_subst"
final val inferenceStatus = SZS_Theorem
final def canApply(cl: Clause): (Boolean, FlexHeads) = {
var can = false
var flexheads: FlexHeads = Set()
val lits = cl.lits.iterator
while (lits.hasNext) {
val l = lits.next()
if (l.flexHead) {
flexheads = flexheads + l.left.headSymbol
can = true
}
}
Out.trace(s"flexHeads: ${flexheads.map(_.pretty).mkString(",")}")
(can, flexheads)
}
final def apply(cl: Clause, flexHeads: FlexHeads, hdSymbs: Set[Term])(implicit sig: Signature): Set[(Clause, Subst)] = hdSymbs.flatMap {hdSymb =>
flexHeads.map {hd =>
val vargen = leo.modules.calculus.freshVarGen(cl)
val binding = leo.modules.calculus.partialBinding(vargen,hd.ty, hdSymb)
val subst = Subst.singleton(hd.fv.head._1, binding)
(cl.substituteOrdered(subst)(sig),subst)
}
}
}
/**
* Representation of an (ordered) equality factoring step.
* For details, see [[leo.modules.calculus.OrderedEqFac#apply]].
*/
object OrderedEqFac extends CalculusRule {
final val name = "eqfactor_ordered"
final val inferenceStatus = SZS_Theorem
/**
* Let `l = cl.lits(maxLitIndex)` and `l' = cl.lits(withLitIndex)` be literals
* called `maxLit` and `withLit` in the following.
* The method performs a single factoring step between `maxLit` and `withLit`.
* Unification constraints `c1 = [a = b]^f` and `c2 = [c = d]^f` are appended to the literal list,
* where `a` and `b` are the sides of the `maxLit` and `withLit`, respectively,
* according to `maxLitSide` and `withLitSide`. `c` and `d` are the remaining terms in those literals.
*
* @note Precondition:
* - `maxLit` and `withLit` have the same polarity.
* - `maxLitIndex != otherLitIndex`
* @note The rule does not validate that `maxLit` is indeed a maximal literal, i.e.
* this is not required for the soundness of the application.
*
* @param cl The clause in which the factoring step is performed
* @param maxLitIndex The index of the (maximal) literal `l`
* @param maxLitSide The side of the literal that is taken as the left side `s` of literal `l`
* @param withLitIndex The index of the literal `l'`
* @param withLitSide The side of the literal that is taken as the left side `t` of literal `l'`
* @param sig The signature
* @return A new clause containing of all literals of `cl` except for `maxLit` add two appended unification contraints
* `c1` and `c2`.
*/
final def apply(cl: Clause, maxLitIndex: Int, maxLitSide: Side,
withLitIndex: Int, withLitSide: Side)(implicit sig: Signature): Clause = {
assert(cl.lits.isDefinedAt(maxLitIndex))
assert(cl.lits.isDefinedAt(withLitIndex))
assert(maxLitIndex != withLitIndex)
val maxLit = cl.lits(maxLitIndex)
val withLit = cl.lits(withLitIndex)
assert(maxLit.polarity == withLit.polarity)
val (maxLitSide1, maxLitSide2) = Literal.getSidesOrdered(maxLit, maxLitSide)
val (withLitSide1, withLitSide2) = Literal.getSidesOrdered(withLit, withLitSide)
/* We cannot delete an element from the list, thats way we replace it by a trivially false literal,
* that is later eliminated using Simp. */
val lits_without_maxLit = cl.lits.updated(maxLitIndex, Literal.mkLit(LitTrue(),false))
val unification_task1: Literal = Literal.mkNegOrdered(maxLitSide1, withLitSide1)(sig)
val unification_task2: Literal = Literal.mkNegOrdered(maxLitSide2, withLitSide2)(sig)
val newlitsSimp = Simp.shallowSimp(lits_without_maxLit)(sig):+ unification_task1 :+ unification_task2
Clause(newlitsSimp)
}
}
/**
* Representation of an (ordered) paramodulation step.
* For details, see [[leo.modules.calculus.OrderedParamod#apply]].
*/
object OrderedParamod extends CalculusRule {
final val name = "paramod_ordered"
final val inferenceStatus = SZS_Theorem
/**
* Performs a paramodulation step on the given configuration.
* @note It is assumed that both clauses have distinct variables. This must be ensured
* before using this method.
* @note Preconditions:
* - withClause.lits(withIndex).polarity == true
* - withSide == right => !withClause.lits(withIndex).oriented || simulateResolution
* - intoSide == right => !intoClause.lits(intoIndex).oriented || simulateResolution
* - if `t` is the `intoSide` of intoClause.lits(intoIndex), then
* u.fv = intoClause.implicitlyBound where `u` is a subterm of `t`
* @param withClause clause that contains the literal used for rewriting
* @param withIndex index of literal `s=t` in `withClause` that is used for rewriting
* @param withSide `left` or `right`, depending on which side of `s=t` we search in `into`
* @param intoClause clause that is rewritten
* @param intoIndex index of literal `l=r` in `intoClause` that is rewritten
* @param intoSide side of `l=r` that is rewritten
* @param intoPosition position in `side(l=r)` that is rewritten
*/
final def apply(withClause: Clause, withIndex: Int, withSide: Literal.Side,
intoClause: Clause, intoIndex: Int, intoSide: Literal.Side, intoPosition: Position, intoSubterm: Term,
simulateResolution: Boolean = false)(implicit sig: Signature): Clause = {
assert(withClause.lits.isDefinedAt(withIndex))
assert(intoClause.lits.isDefinedAt(intoIndex))
assert(withClause.lits(withIndex).polarity)
assert(!(withSide == Literal.rightSide) || !withClause.lits(withIndex).oriented || simulateResolution)
assert(!(intoSide == Literal.rightSide) || !intoClause.lits(intoIndex).oriented || simulateResolution)
val withLiteral = withClause.lits(withIndex)
val (toFind, replaceBy) = if (withSide == Literal.leftSide) (withLiteral.left,withLiteral.right) else (withLiteral.right,withLiteral.left)
Out.finest(s"toFind: ${toFind.pretty(sig)}")
Out.finest(s"replaceBy: ${replaceBy.pretty(sig)}")
/* We cannot delete an element from the list, thats way we replace it by a trivially false literal,
* i.e. it is lated eliminated using Simp. */
val withLits_without_withLiteral = withClause.lits.updated(withIndex, Literal.mkLit(LitTrue(),false)).map(l =>
Literal.mkLit(l.left.etaExpand, l.right.etaExpand, l.polarity, l.oriented)
)
Out.finest(s"withLits_without_withLiteral: \\n\\t${withLits_without_withLiteral.map(_.pretty(sig)).mkString("\\n\\t")}")
/* We shift all lits from intoClause to make the universally quantified variables distinct from those of withClause. */
val shiftedIntoLits = intoClause.lits
val intoLiteral = shiftedIntoLits(intoIndex)
val (findWithin, otherSide) = Literal.getSidesOrdered(intoLiteral, intoSide)
Out.finest(s"findWithin: ${findWithin.pretty(sig)}")
Out.finest(s"otherSide (rewrittenIntolit right): ${otherSide.pretty(sig)}")
Out.finest(s"rewrittenIntoLit left: ${findWithin.replaceAt(intoPosition,replaceBy.substitute(Subst.shift(intoPosition.abstractionCount))).betaNormalize.pretty(sig)}")
/* Replace subterm (and shift accordingly) */
val rewrittenIntoLit = Literal.mkOrdered(findWithin.replaceAt(intoPosition,replaceBy.substitute(Subst.shift(intoPosition.abstractionCount))).betaNormalize,otherSide,intoLiteral.polarity)(sig)
/* Replace old literal in intoClause (at index intoIndex) by the new literal `rewrittenIntoLit` */
val rewrittenIntoLits = shiftedIntoLits.updated(intoIndex, rewrittenIntoLit).map(l =>
Literal.mkLit(l.left.etaExpand, l.right.etaExpand, l.polarity, l.oriented)
)
/* unification literal between subterm of intoLiteral (in findWithin side) and right side of withLiteral. */
Out.finest(s"withClause.maxImpBound: ${Clause.maxImplicitlyBound(withClause)}")
Out.finest(s"intoSubterm: ${intoSubterm.pretty(sig)}")
val unificationLit = Literal.mkNegOrdered(toFind.etaExpand, intoSubterm.etaExpand)(sig)
Out.finest(s"unificationLit: ${unificationLit.pretty(sig)}")
val newlits_simp = Simp.shallowSimp(withLits_without_withLiteral ++ rewrittenIntoLits)(sig) :+ unificationLit
val result = Clause(newlits_simp)
Out.finest(s"result: ${result.pretty(sig)}")
result
}
}
object NegateConjecture extends CalculusRule {
final val name: String = "neg_conjecture"
final val inferenceStatus = SZS_CounterTheorem
}
|
lex-lex/Leo-III
|
src/main/scala/leo/modules/calculus/Rules.scala
|
Scala
|
bsd-3-clause
| 26,483 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.