code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.util
import org.apache.spark.unsafe.types.UTF8String
object NumberConverter {
/**
* Decode v into value[].
*
* @param v is treated as an unsigned 64-bit integer
* @param radix must be between MIN_RADIX and MAX_RADIX
*/
private def decode(v: Long, radix: Int, value: Array[Byte]): Unit = {
var tmpV = v
java.util.Arrays.fill(value, 0.asInstanceOf[Byte])
var i = value.length - 1
while (tmpV != 0) {
val q = java.lang.Long.divideUnsigned(tmpV, radix)
value(i) = (tmpV - q * radix).asInstanceOf[Byte]
tmpV = q
i -= 1
}
}
/**
* Convert value[] into a long. On overflow, return -1 (as mySQL does). If a
* negative digit is found, ignore the suffix starting there.
*
* @param radix must be between MIN_RADIX and MAX_RADIX
* @param fromPos is the first element that should be considered
* @return the result should be treated as an unsigned 64-bit integer.
*/
private def encode(radix: Int, fromPos: Int, value: Array[Byte]): Long = {
var v: Long = 0L
// bound will always be positive since radix >= 2
// Note that: -1 is equivalent to 11111111...1111 which is the largest unsigned long value
val bound = java.lang.Long.divideUnsigned(-1 - radix, radix)
var i = fromPos
while (i < value.length && value(i) >= 0) {
// if v < 0, which mean its bit presentation starts with 1, so v * radix will cause
// overflow since radix is greater than 2
if (v < 0) {
return -1
}
// check if v greater than bound
// if v is greater than bound, v * radix + radix will cause overflow.
if (v >= bound) {
// However our target is checking whether v * radix + value(i) can cause overflow or not.
// Because radix >= 2,so (-1 - value(i)) / radix will be positive (its bit presentation
// will start with 0) and we can easily checking for overflow by checking
// (-1 - value(i)) / radix < v or not
if (java.lang.Long.divideUnsigned(-1 - value(i), radix) < v) {
return -1
}
}
v = v * radix + value(i)
i += 1
}
v
}
/**
* Convert the bytes in value[] to the corresponding chars.
*
* @param radix must be between MIN_RADIX and MAX_RADIX
* @param fromPos is the first nonzero element
*/
private def byte2char(radix: Int, fromPos: Int, value: Array[Byte]): Unit = {
var i = fromPos
while (i < value.length) {
value(i) = Character.toUpperCase(Character.forDigit(value(i), radix)).asInstanceOf[Byte]
i += 1
}
}
/**
* Convert the chars in value[] to the corresponding integers. If invalid
* character is found, convert it to -1 and ignore the suffix starting there.
*
* @param radix must be between MIN_RADIX and MAX_RADIX
* @param fromPos is the first nonzero element
*/
private def char2byte(radix: Int, fromPos: Int, value: Array[Byte]): Unit = {
var i = fromPos
while (i < value.length) {
value(i) = Character.digit(value(i), radix).asInstanceOf[Byte]
// if invalid characters are found, it no need to convert the suffix starting there
if (value(i) == -1) {
return
}
i += 1
}
}
/**
* Convert numbers between different number bases. If toBase>0 the result is
* unsigned, otherwise it is signed.
* NB: This logic is borrowed from org.apache.hadoop.hive.ql.ud.UDFConv
*/
def convert(n: Array[Byte], fromBase: Int, toBase: Int ): UTF8String = {
if (fromBase < Character.MIN_RADIX || fromBase > Character.MAX_RADIX
|| Math.abs(toBase) < Character.MIN_RADIX
|| Math.abs(toBase) > Character.MAX_RADIX) {
return null
}
if (n.length == 0) {
return null
}
var (negative, first) = if (n(0) == '-') (true, 1) else (false, 0)
// Copy the digits in the right side of the array
val temp = new Array[Byte](Math.max(n.length, 64))
var v: Long = -1
System.arraycopy(n, first, temp, temp.length - n.length + first, n.length - first)
char2byte(fromBase, temp.length - n.length + first, temp)
// Do the conversion by going through a 64 bit integer
v = encode(fromBase, temp.length - n.length + first, temp)
if (negative && toBase > 0) {
if (v < 0) {
v = -1
} else {
v = -v
}
}
if (toBase < 0 && v < 0) {
v = -v
negative = true
}
decode(v, Math.abs(toBase), temp)
// Find the first non-zero digit or the last digits if all are zero.
val firstNonZeroPos = {
val firstNonZero = temp.indexWhere( _ != 0)
if (firstNonZero != -1) firstNonZero else temp.length - 1
}
byte2char(Math.abs(toBase), firstNonZeroPos, temp)
var resultStartPos = firstNonZeroPos
if (negative && toBase < 0) {
resultStartPos = firstNonZeroPos - 1
temp(resultStartPos) = '-'
}
UTF8String.fromBytes(java.util.Arrays.copyOfRange(temp, resultStartPos, temp.length))
}
def toBinary(l: Long): Array[Byte] = {
val result = new Array[Byte](8)
result(0) = (l >>> 56 & 0xFF).toByte
result(1) = (l >>> 48 & 0xFF).toByte
result(2) = (l >>> 40 & 0xFF).toByte
result(3) = (l >>> 32 & 0xFF).toByte
result(4) = (l >>> 24 & 0xFF).toByte
result(5) = (l >>> 16 & 0xFF).toByte
result(6) = (l >>> 8 & 0xFF).toByte
result(7) = (l & 0xFF).toByte
result
}
def toBinary(i: Int): Array[Byte] = {
val result = new Array[Byte](4)
result(0) = (i >>> 24 & 0xFF).toByte
result(1) = (i >>> 16 & 0xFF).toByte
result(2) = (i >>> 8 & 0xFF).toByte
result(3) = (i & 0xFF).toByte
result
}
def toBinary(s: Short): Array[Byte] = {
val result = new Array[Byte](2)
result(0) = (s >>> 8 & 0xFF).toByte
result(1) = (s & 0xFF).toByte
result
}
def toBinary(s: Byte): Array[Byte] = {
val result = new Array[Byte](1)
result(0) = s
result
}
}
|
mahak/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/NumberConverter.scala
|
Scala
|
apache-2.0
| 6,760 |
package mdtags
class link(val link: String, val mdElement: MdElement) extends MdElement {
override def toMarkdown(listIndent: Int = 0): String = s"[${mdElement.toMarkdown()}](${link})"
override def convertToMarkup(implicit indentSpaces: Int): String =
"link(\\n" +
indent(indentSpaces, "link = " + formatMarkupString(link)) + ",\\n" +
indent(indentSpaces, "text = " + mdElement.convertToMarkup) + "\\n" +
")"
}
object link {
def apply(link: String, text: String): link = new link(link, text)
def apply(link: String): link = apply(link, link)
def apply(link: String, image: image): link = new link(link, image)
}
|
timo-schmid/mdtags
|
src/main/scala/mdtags/link.scala
|
Scala
|
apache-2.0
| 647 |
package vedavaapi
import java.util.concurrent.TimeUnit
import java.util.regex.{Pattern, PatternSyntaxException}
import javax.ws.rs.Path
import akka.http.scaladsl.server.{Directives, Route}
import akka.util.Timeout
import io.swagger.annotations._
import sanskritnlp.transliteration.transliterator
import scala.concurrent.ExecutionContext
// Returns text/plain, so does not extend Json4sSupport trait unlike some other REST API services.
@Api(value = "/transliterations_v1")
@Path("/transliterations/v1")
class TransliteratorService()(implicit executionContext: ExecutionContext, requestTimeoutSecs: Int)
extends Directives {
// Actor ask timeout
implicit val timeout: Timeout = Timeout(requestTimeoutSecs, TimeUnit.SECONDS)
val route: Route = concat(transliterate)
def regexValid(pattern: String): Boolean = {
try {
Pattern.compile(pattern) != null
} catch {
case _: PatternSyntaxException => false
}
true
}
final val USAGE_TIPS = "Click on Try it out!"
@Path("/{sourceScript}/{destScript}")
@ApiOperation(value = "Transliterate from sourceScript to destScript.", notes = USAGE_TIPS, nickname = "transliterate", httpMethod = "POST", produces = "text/plain", consumes = "application/x-www-form-urlencoded")
@ApiImplicitParams(Array(
new ApiImplicitParam(name = "sourceScript", value = "Name of the script used for the inputString value.",
allowableValues = "iast, iastDcs, as, optitrans, dev, gujarati, gurmukhi, kannada, telugu, malayalam, oriya, bengali, assamese", defaultValue = "dev",
required = true, dataType = "string", paramType = "path"),
new ApiImplicitParam(name = "destScript", value = "Name of the script to be used for the output of this API.",
allowableValues = "iast, iastDcs, as, optitrans, dev, gujarati, gurmukhi, kannada, telugu, malayalam, oriya, bengali, assamese", defaultValue = "optitrans",
required = true, dataType = "string", paramType = "path"),
new ApiImplicitParam(name = "inputString", value = "A text in the input script/ scheme.",
example = "छिन्नमस्ते नमस्ते",
required = true, dataType = "string", paramType = "form"),
))
@ApiResponses(Array(
new ApiResponse(code = 200, message = "Return podcast feed", response = classOf[String]),
new ApiResponse(code = 500, message = "Internal server error")
))
def transliterate: Route =
path("transliterations" / "v1" / Segment / Segment)(
(sourceScript: String, destScript: String) => {
formFields('inputString) { (inputString) =>
post & complete(transliterator.transliterate(in_str = inputString, sourceScheme = sourceScript, destScheme = destScript))
}
}
)
}
|
vedavaapi/scala-akka-http-server
|
src/main/scala/vedavaapi/TransliteratorService.scala
|
Scala
|
apache-2.0
| 2,736 |
package dynamite
import zio.test._
import zio.test.Assertion._
import Ast._
import dynamite.Ast.Projection.Aggregate._
import dynamite.Ast.Projection.FieldSelector._
object ParserSpec extends DefaultRunnableSpec {
def parse(query: String) = Parser.parse(query)
def parseSuccess(query: String) =
assert(parse(query))(isRight(anything))
def validate(query: String, expected: Command) =
assert(parse(query).left.map(_.getCause.getMessage))(
isRight(equalTo(expected))
)
def spec = suite("parser")(
test("parse wildcard fields")(
validate("select * from playlist", Select(Seq(All), "playlist"))
),
test("allow dashes and underscores in ident")(
validate(
"select * from playlist-legacy_1",
Select(Seq(All), "playlist-legacy_1")
)
),
test("parse a single field")(
validate("select id from playlist", Select(Seq(Field("id")), "playlist"))
),
test("allow multiple fields")(
validate(
"select id,name from playlist",
Select(Seq(Field("id"), Field("name")), "playlist")
)
),
test("tolerate whitespace between fields")(
validate(
"select id, name from playlist",
Select(Seq(Field("id"), Field("name")), "playlist")
)
),
test("tolerate whitespace before from")(
validate(
"select id, name from playlist",
Select(Seq(Field("id"), Field("name")), "playlist")
)
),
test("tolerate whitespace before table name")(
validate(
"select id, name from playlist",
Select(Seq("id", "name").map(Field), "playlist")
)
),
test("support filtering by a hash key")(
validate(
"select id, name from playlist where id = 'user-id-1'",
Select(
Seq("id", "name").map(Field),
"playlist",
Option(PrimaryKey(Key("id", StringValue("user-id-1")), None))
)
)
),
test("support filtering by a hash and sort key")(
validate(
"select id, name from playlist where userId = 1 and id = 'user-id-1'",
Select(
Seq("id", "name").map(Field),
"playlist",
Option(
PrimaryKey(
Key("userId", IntValue("1")),
Some(Key("id", StringValue("user-id-1")))
)
)
)
)
),
test("support double-quoted string as well")(
validate(
"select id, name from playlist where id = \\"user-id-1\\"",
Select(
Seq("id", "name").map(Field),
"playlist",
Option(PrimaryKey(Key("id", StringValue("user-id-1")), None))
)
)
),
test("support integer values ")(
validate(
"select id, name from playlist where id = 1",
Select(
Seq("id", "name").map(Field),
"playlist",
Option(PrimaryKey(Key("id", IntValue("1")), None))
)
)
),
test("support floating point values")(
validate(
"update playlist set duration = 1.1 where id = 1",
Update(
"playlist",
Seq("duration" -> FloatValue("1.1")),
PrimaryKey(Key("id", IntValue("1")), None)
)
)
),
test("support floating point values without leading integer")(
validate(
"update playlist set duration = .1 where id = 1",
Update(
"playlist",
Seq("duration" -> FloatValue(".1")),
PrimaryKey(Key("id", IntValue("1")), None)
)
)
),
test("support negative floating point values")(
validate(
"update playlist set duration = -.1 where id = 1",
Update(
"playlist",
Seq("duration" -> FloatValue("-.1")),
PrimaryKey(Key("id", IntValue("1")), None)
)
)
),
test("support negative int values")(
validate(
"update playlist set duration = -1 where id = 1",
Update(
"playlist",
Seq("duration" -> IntValue("-1")),
PrimaryKey(Key("id", IntValue("1")), None)
)
)
),
test("support updating object values")(
validate(
"""update playlist set meta = { "tags": ["rock", "metal"], "visibility": "private" } where id = 1""",
Update(
"playlist",
Seq(
"meta" -> ObjectValue(
Seq(
"tags" -> ListValue(
Seq(
StringValue("rock"),
StringValue("metal")
)
),
"visibility" -> StringValue("private")
)
)
),
PrimaryKey(Key("id", IntValue("1")), None)
)
)
),
test("support tolerate whitespace in object values")(
validate(
"""update playlist set meta = {"tags":["rock" ,"metal"], "visibility": "private"} where id = 1""",
Update(
"playlist",
Seq(
"meta" -> ObjectValue(
Seq(
"tags" -> ListValue(
Seq(
StringValue("rock"),
StringValue("metal")
)
),
"visibility" -> StringValue("private")
)
)
),
PrimaryKey(Key("id", IntValue("1")), None)
)
)
),
test("support array values ")(
validate(
"insert into playlists (id, tracks) values (1, [1,2,3])",
Insert(
"playlists",
Seq(
"id" -> IntValue("1"),
"tracks" -> ListValue(
Seq(
IntValue("1"),
IntValue("2"),
IntValue("3")
)
)
)
)
)
),
test("support empty arrays")(
validate(
"insert into playlists (id, tracks) values (1, [])",
Insert(
"playlists",
Seq(
"id" -> IntValue("1"),
"tracks" -> ListValue(Seq.empty)
)
)
)
),
test("tolerate missing spaces around values")(
validate(
"insert into playlists (id, tracks) values(1, [])",
Insert(
"playlists",
Seq(
"id" -> IntValue("1"),
"tracks" -> ListValue(Seq.empty)
)
)
)
),
test("tolerate spaces around limit")(
validate(
"select id, name from playlist limit 10",
Select(
Seq("id", "name").map(Field),
"playlist",
limit = Some(10)
)
)
),
test("allow selecting ascending order")(
validate(
"select id, name from playlist order by id asc limit 10",
Select(
Seq("id", "name").map(Field),
"playlist",
None,
Some(OrderBy("id", Some(Ascending))),
Some(10)
)
)
),
test("allow selecting descending order")(
validate(
"select id, name from playlist order by name desc limit 10",
Select(
Seq("id", "name").map(Field),
"playlist",
None,
Some(OrderBy("name", Some(Descending))),
Some(10)
)
)
),
test("allow updating a field")(
validate(
"update users set name = 'dave' where id = 'user-id-1'",
Update(
"users",
Seq("name" -> StringValue("dave")),
PrimaryKey(Key("id", StringValue("user-id-1")), None)
)
)
),
test("allow updating a field with hash and sort key")(
validate(
"update playlists set name = 'Chillax' where userId = 'user-id-1' and id = 1",
Update(
"playlists",
Seq("name" -> StringValue("Chillax")),
PrimaryKey(
Key("userId", StringValue("user-id-1")),
Some(Key("id", IntValue("1")))
)
)
)
),
test("allow deleting a record")(
validate(
"delete from playlists where userId = 'user-id-1' and id = 1",
Delete(
"playlists",
PrimaryKey(
Key("userId", StringValue("user-id-1")),
Some(Key("id", IntValue("1")))
)
)
)
),
test("allow an item to be inserted")(
validate(
"insert into playlists (userId, id) values ('user-id-1', 1)",
Insert(
"playlists",
Seq(
"userId" -> StringValue("user-id-1"),
"id" -> IntValue("1")
)
)
)
),
test("allow listing of tables")(
validate("show tables", ShowTables)
),
test("support multiline queries")(
validate(
"""
|insert into playlists
|(userId, id)
|values ('user-id-1', 1)
|""".stripMargin,
Insert(
"playlists",
Seq(
"userId" -> StringValue("user-id-1"),
"id" -> IntValue("1")
)
)
)
),
test("allow an explicit index to be used")(
validate(
"select * from playlists use index playlist-length-keys-only",
Select(
Seq(All),
"playlists",
useIndex = Some("playlist-length-keys-only")
)
)
),
test("fail on empty table name")(
assert(Parser.parse("select * from ").left.map(_.getMessage))(
isLeft(equalTo("Failed parsing query"))
)
),
test("support count")(
validate("select count(*) from playlist", Select(Seq(Count), "playlist"))
),
test("fail on unknown aggregate")(
assert(
Parser
.parse("select nonexistent(*) from playlist")
.left
.map(_.getMessage)
)(isLeft(equalTo("Failed parsing query")))
),
test("support describing tables")(
validate("describe table playlist", DescribeTable("playlist"))
),
test("ignore case")(
parseSuccess(
"SELECT * FROM playlists ORDER BY ID ASC LIMIT 1 USE INDEX playlist-length-keys-only"
) &&
parseSuccess(
"SELECT COUNT(*) FROM playlists ORDER BY ID DESC LIMIT 1 USE INDEX playlist-length-keys-only"
) &&
parseSuccess("INSERT INTO playlists (id, tracks) VALUES (1, [1,2,3])") &&
parseSuccess(
"UPDATE playlists SET name = 'Chillax' WHERE userId = 'user-id-1' AND id = 1"
) &&
parseSuccess("SHOW TABLES") &&
parseSuccess("DESCRIBE TABLE playlist") &&
parseSuccess(
"DELETE FROM playlists WHERE userId = 'user-id-1' AND id = 1"
) &&
parseSuccess("FORMAT TABULAR") &&
parseSuccess("FORMAT JSON") &&
parseSuccess("SHOW FORMAT")
),
test("support describing current format")(
validate("show format", ShowFormat)
),
test("support setting json format")(
validate("format json", SetFormat(Ast.Format.Json))
),
test("support setting tabular format")(
validate("format tabular", SetFormat(Ast.Format.Tabular))
),
test("support true")(
validate(
"""insert into playlists (userId, id, curated) values ("user-1", "id-1", true)""",
Insert(
"playlists",
Seq(
"userId" -> StringValue("user-1"),
"id" -> StringValue("id-1"),
"curated" -> BoolValue(true)
)
)
)
),
test("support false")(
validate(
"""insert into playlists (userId, id, curated) values ("user-1", "id-1", false)""",
Insert(
"playlists",
Seq(
"userId" -> StringValue("user-1"),
"id" -> StringValue("id-1"),
"curated" -> BoolValue(false)
)
)
)
),
test("create table")(
validate(
"""create table users(userId string)""",
CreateTable(
tableName = "users",
name = "userId",
typeName = "string",
ignoreExisting = false
)
)
),
test("create table if not exists")(
validate(
"""create table if not exists users(userId string)""",
CreateTable(
tableName = "users",
name = "userId",
typeName = "string",
ignoreExisting = true
)
)
)
)
}
|
joprice/dynamite
|
src/test/scala/dynamite/ParserSpec.scala
|
Scala
|
apache-2.0
| 12,288 |
package code.model.repository
import code.model.base.CrudExampleBaseRepository
object ProductRepository extends CrudExampleBaseRepository[code.model.Product]
|
rehei/crudible
|
crudible-lift-example/src/main/scala/code/model/repository/ProductRepository.scala
|
Scala
|
apache-2.0
| 159 |
package atlas
import atlas.ast.Node
import atlas.tokens.Token
import scala.collection.mutable
object Parser {
type Result = (Seq[Node], Seq[Token])
type Parsec = (Seq[Token]) => Result
def mkASTree(ts: Seq[Token]): Node =
parseTop(ts) match { case (Seq(node), Seq()) => node }
private def parseTop(ts: Seq[Token]): Result = {
val desc = "a top level expression"
val expr = any(desc, parseStatic, parseFun, one("Newline"))
val parser = seq(repPat(expr, "EOF"), one("EOF"))
val (ns, rm) = parser(ts)
(Seq(ast.Top(ns)(ts.head.pos)), rm)
}
private def parseFun(ts: Seq[Token]): Result = {
val fn = key("fn")
val name = one("Identifier")
val newl = one("Newline")
val body = dlist(parseStmt)
val params = plist(parseParam)
val parser = seq(fn, name, params, key(":"), parseType, newl, body)
val (Seq(ast.Identifier(nm)
, ast.List(ps)
, tp: ast.Type
, ast.List(bd))
, rs) = parser(ts)
val narrow = ps collect { case p: ast.Param => p }
(Seq(ast.Fun(nm, narrow, tp, bd)(ts.head.pos)), rs)
}
private def parseApp(ts: Seq[Token]): Result = {
val name = one("Identifier")
val args = plist(parseExpr)
val parser = seq(name, args)
val (Seq(ast.Identifier(nm), ast.List(as)), rs) = parser(ts)
(Seq(ast.App(nm, as)(ts.head.pos)), rs)
}
private def parseSubs(ts: Seq[Token]): Result = {
val name = one("Identifier")
val arg = seq(key("["), parseExpr, key("]"))
val parser = seq(name, arg)
val (Seq(ast.Identifier(nm), as), rs) = parser(ts)
(Seq(ast.Subscript(nm, as)(ts.head.pos)), rs)
}
private def parseCons(ts: Seq[Token]): Result = {
val typedef = seq(key("["), parseType, key("]"))
val parser = seq(typedef, plist(parseExpr))
val (Seq(tp, ast.List(args)), rs) = parser(ts)
(Seq(ast.Cons(tp, args)(ts.head.pos)), rs)
}
private def parseStmt(ts: Seq[Token]): Result = {
val exprStmt = seq(parseExpr, one("Newline"))
val callStmt = seq(parseApp, one("Newline"))
val newlStmt = one("Newline")
val passStmt = seq(key("pass"), one("Newline"))
val parser = any("a statement",
newlStmt,
exprStmt,
callStmt,
passStmt,
parseLet,
parseMut,
parseStatic,
parseAssign,
parseAssignSub,
parseFun,
parseCond,
parseWhile,
parseFor)
parser(ts)
}
private def parseParam(ts: Seq[Token]): Result = {
val parser = seq(one("Identifier"), key(":"), parseType)
val (Seq(ast.Identifier(nm), tp), rs) = parser(ts)
(Seq(ast.Param(nm, tp)(ts.head.pos)), rs)
}
private def parseListType(ts: Seq[Token]): Result = {
val parenL = key("[")
val parenR = key("]")
val parser = seq(parenL, parseType, parenR)
val (Seq(tp), rm) = parser(ts)
(Seq(ast.ListType(tp)(ts.head.pos)), rm)
}
// TODO: Add more types!
// e.g. tuples, list, maps, polymorphic
private def parseType(ts: Seq[Token]): Result = {
val simple = one("Identifier")
val others = rep(seq(key("->"), simple))
val simpTp = seq(simple, others)
val parser = any("a type", simpTp, parseListType)
val (types, rs) = parser(ts)
(Seq(ast.Type(types)(ts.head.pos)), rs)
}
private def parseWhile(ts: Seq[Token]): Result = {
val block = dlist(parseStmt)
val break = one("Newline")
val parser = seq(key("while"), parseExpr, break, block)
val (Seq(cond, ast.List(body)), rs) = parser(ts)
(Seq(ast.While(cond, body)(ts.head.pos)), rs)
}
private def parseFor(ts: Seq[Token]): Result = {
val block = dlist(parseStmt)
val name = one("Identifier")
val break = one("Newline")
val parser = seq(
key("for"),
name,
key("="),
parseExpr,
key("to"),
parseExpr,
break,
block)
val (Seq(ast.Identifier(nm), _, from, to, ast.List(body)), rs) = parser(ts)
(Seq(ast.For(nm, from, to, body)(ts.head.pos)), rs)
}
private def parseCond(ts: Seq[Token]): Result = {
val block = dlist(parseStmt)
val break = one("Newline")
val ifStmt = seq(key("if"), parseExpr, break, block)
val elifStmt = rep(parseElif)
val elseStmt = eat(parseElse)
val (Seq(cond, ast.List(body)), ts0) = ifStmt(ts)
val (elifNodes, ts1) = elifStmt(ts0)
val (elseNodes, ts2) = elseStmt(ts1)
(Seq(ast.Cond(cond, body, elifNodes ++ elseNodes)(ts.head.pos)), ts2)
}
private def parseElif(ts: Seq[Token]): Result = {
val block = dlist(parseStmt)
val break = one("Newline")
val parser = seq(key("elif"), parseExpr, break, block)
val (Seq(cond, ast.List(body)), rs) = parser(ts)
(Seq(ast.Elif(cond, body)(ts.head.pos)), rs)
}
private def parseElse(ts: Seq[Token]): Result = {
val block = dlist(parseStmt)
val break = one("Newline")
val parser = seq(key("else"), break, block)
val (Seq(ast.List(body)), rs) = parser(ts)
(Seq(ast.Else(body)(ts.head.pos)), rs)
}
private def parseStatic(ts: Seq[Token]): Result = {
val static = key("static")
val name = one("Identifier")
val colon = key(":")
val assign = key("=")
val newline = one("Newline")
val parser = seq(static, name, colon, parseType, assign, parseExpr, newline)
val (Seq(ast.Identifier(nm), tp, _, rv), rm) = parser(ts)
(Seq(ast.Static(nm, tp, rv)(ts.head.pos)), rm)
}
private def parseMut(ts: Seq[Token]): Result = {
val let = key("let")
val mut = key("mut")
val name = one("Identifier")
val assign = key("=")
val newline = one("Newline")
val parser = seq(let, mut, name, assign, parseExpr, newline)
val (Seq(ast.Identifier(nm), _, rv), rm) = parser(ts)
(Seq(ast.Mut(nm, rv)(ts.head.pos)), rm)
}
private def parseLet(ts: Seq[Token]): Result = {
val let = key("let")
val name = one("Identifier")
val assign = key("=")
val newline = one("Newline")
val parser = seq(let, name, assign, parseExpr, newline)
val (Seq(ast.Identifier(nm), _, rv), rm) = parser(ts)
(Seq(ast.Let(nm, rv)(ts.head.pos)), rm)
}
private def parseAssign(ts: Seq[Token]): Result = {
val name = one("Identifier")
val assign = any("an assignable operator", key("="), key("+="))
val newline = one("Newline")
val parser = seq(name, assign, parseExpr, newline)
val (Seq(ast.Identifier(nm), ast.Operator(op), rv), rm) = parser(ts)
(Seq(ast.Assign(nm, op, rv)(ts.head.pos)), rm)
}
private def parseAssignSub(ts: Seq[Token]): Result = {
val name = one("Identifier")
val assign = any("an assignable operator", key("="))
val newline = one("Newline")
val parser = seq(name, key("["), parseExpr, key("]"), assign, parseExpr, newline)
val (Seq(ast.Identifier(nm), index, ast.Operator(op), rv), rm) = parser(ts)
(Seq(ast.AssignSub(nm, index, op, rv)(ts.head.pos)), rm)
}
private def parseExpr(ts: Seq[Token]): Result = {
val parser = seq(parseAtom, rep(seq(parseBinOp, parseAtom)))
val (nodes, rm) = parser(ts)
val (combined, Seq()) = sortExpr(nodes.tail, nodes.head, 0)
(Seq(combined), rm)
}
private def parseBinOp(ts: Seq[Token]): Result = {
val parser = any("an operator", precedenceMap.keys.toSeq.map(key):_*)
parser(ts)
}
private def parseUnaOp(ts: Seq[Token]): Result = {
val parser = seq(any("a unary operator", key("-"), key("!")), parseAtom)
val (Seq(ast.Operator(op), value), rm) = parser(ts)
(Seq(ast.UnaOp(op, value)(ts.head.pos)), rm)
}
private def parseAtom(ts: Seq[Token]): Result = {
val parens = seq(key("("), parseExpr, key(")"))
val anymsg = "an atomic expression"
val integr = one("Integer")
val nameid = one("Identifier")
val boolean = one("Boolean")
val parser = any(anymsg,
parens,
integr,
boolean,
parseApp,
parseSubs,
parseCons,
nameid,
parseUnaOp)
val result = parser(ts)
result
}
private def plist(item: Parsec): Parsec =
(ts: Seq[Token]) => {
val parenL = key("(")
val parenR = key(")")
val others = repRaw(seq(key(","), item), ")")
val parser = seq(parenL, eat(item, others), parenR)
val (ns, rm) = parser(ts)
(Seq(ast.List(ns)(ts.head.pos)), rm)
}
private def dlist(item: Parsec): Parsec =
(ts: Seq[Token]) => {
val indent = one("Indent")
val dedent = one("Dedent")
val others = repPat(item, "Dedent")
val parser = seq(indent, others, dedent)
val (ns, rm) = parser(ts)
(Seq(ast.List(ns)(ts.head.pos)), rm)
}
private def repPat(p: Parsec, until: String): Parsec =
(ts: Seq[Token]) => {
val buffer = mutable.Buffer[Node]()
var remain = ts
def reached = remain.headOption match {
case Some(t) => (t.productPrefix == until)
case None => false
}
while (! reached) {
val (nodes, rm) = p(remain)
buffer ++= nodes
remain = rm
}
(buffer.toSeq, remain)
}
private def repRaw(p: Parsec, until: String): Parsec =
(ts: Seq[Token]) => {
val buffer = mutable.Buffer[Node]()
var remain = ts
def reached = remain.headOption match {
case Some(t) => (t.raw == until)
case None => false
}
while (! reached) {
val (nodes, rm) = p(remain)
buffer ++= nodes
remain = rm
}
(buffer.toSeq, remain)
}
private def rep(p: Parsec): Parsec =
(ts: Seq[Token]) => {
val buffer = mutable.Buffer[Node]()
var remain = ts
try {
while (true) {
val (nodes, rm) = p(remain)
buffer ++= nodes
remain = rm
}
}
catch {
case err: ParserError if remain.length == err.count =>
// Do nothing
}
(buffer.toSeq, remain)
}
private def any(msg: String, ps: Parsec*): Parsec =
(ts: Seq[Token]) => {
var currentParsed = Option[Result](null)
var furthest = ParserError(ts.length, "")
ps.takeWhile(_ => currentParsed == None).foreach {
parsec =>
try {
val parsed@(nodes, next) = parsec(ts)
val (_, prev) = currentParsed getOrElse (Seq(), ts)
if (next.length < prev.length)
{ currentParsed = Some(parsed) }
}
catch {
case err: ParserError if err.count < furthest.count =>
furthest = err
case err: ParserError =>
// Do nothing...
}
}
// Check if any of the parsers has succeeded.
// If it didn't, throw the appropriate message
// using the longest parsed error as the highest
// priority.
(currentParsed, furthest.count == ts.length) match {
case (Some(nodes), _) =>
nodes
case (None, false) =>
throw furthest
case (None, true) =>
throw ts.report(msg)
}
}
private def seq(ps: Parsec*): Parsec =
(ts: Seq[Token]) => {
ps.foldLeft((Seq[Node](), ts)) {
case ((nodes, remains), parsec) =>
val (n, newRemains) = parsec(remains)
(nodes ++ n, newRemains)
}
}
private def eat(ps: Parsec*): Parsec =
(ts: Seq[Token]) => {
val buffer = mutable.Buffer[Node]()
var remain = ts
for (p <- ps)
try {
val (nodes, rm) = p(remain)
buffer ++= nodes
remain = rm
}
catch {
case err: ParserError
if err.count == remain.length =>
// Do nothing.
case err: ParserError =>
throw err
}
(buffer.toSeq, remain)
}
private def one(pattern: String): Parsec =
(ts: Seq[Token]) => ts match {
case h +: rest if h.productPrefix == pattern =>
(pin(h), rest)
case others =>
throw others.report(pattern)
}
private def key(s: String): Parsec =
(ts: Seq[Token]) => ts match {
case tokens.Reserve(`s`) +: rest =>
if (s == "pass")
(Seq(ast.Nop()(ts.head.pos)), rest)
else if (s == "+=" || s == "=")
(Seq(ast.Operator(s)(ts.head.pos)), rest)
else if (precedenceMap contains s)
(Seq(ast.Operator(s)(ts.head.pos)), rest)
else
(Seq(), rest)
case others =>
throw others.report(s)
}
private def pin(t: Token): Seq[Node] = t match {
case tokens.Identifier(n) => Seq(ast.Identifier(n)(t.pos))
case tokens.Integer(n) => Seq(ast.Integer(n.toInt)(t.pos))
case tokens.Boolean(n) => Seq(ast.Boolean(n.toBoolean)(t.pos))
case others => Seq()
}
private val precedenceMap = Map(
("!" -> 0),
("and" -> 10),
("or" -> 10),
("==" -> 20),
("!=" -> 20),
("+" -> 30),
("-" -> 30),
(">" -> 40),
("<" -> 40),
("<=" -> 40),
(">=" -> 40),
("*" -> 50),
("%" -> 50),
("/" -> 50))
private def sortExpr(s: Seq[Node], n: Node, m: Int): (Node, Seq[Node]) =
s match {
case ast.Operator(op) +: rest1 =>
val p = precedenceMap.get(op) getOrElse -1
if (p < m) return (n, s)
val (rhs1 +: rest2) = rest1
val (rhs2, rest3) = rest2 match {
case ast.Operator(rhsOp) +: _ =>
val next = precedenceMap.get(rhsOp) getOrElse -1
if (p < next)
sortExpr(rest2, rhs1, p + 1)
else
(rhs1, rest2)
case _ =>
(rhs1, rest2)
}
sortExpr(rest3, ast.BinOp(n, op, rhs2)(n.pos), m)
case _ =>
(n, s)
}
private implicit class NodeOps(val ts: Seq[Token]) {
def report(s: String): ParserError =
ts.headOption match {
case Some(t) =>
val e = if (t.raw == "\\n") "\\\\n" else t.raw
val m = s"${t.pos}: Expected $s but got '$e'."
ParserError(ts.length, m)
case None =>
val m = s"Expected $s, but reached end of file."
ParserError(ts.length, m)
}
}
}
|
jankdc/atlas
|
src/main/scala/atlas/Parser.scala
|
Scala
|
mit
| 14,024 |
package io.getquill
import io.getquill.ast._
import io.getquill.context.CanOutputClause
import io.getquill.context.sql.idiom._
import io.getquill.context.sql.norm.AddDropToNestedOrderBy
import io.getquill.context.sql.{ FlattenSqlQuery, SqlQuery }
import io.getquill.idiom.StatementInterpolator._
import io.getquill.idiom.{ Statement, StringToken, Token }
import io.getquill.norm.EqualityBehavior
import io.getquill.norm.EqualityBehavior.NonAnsiEquality
import io.getquill.sql.idiom.BooleanLiteralSupport
import io.getquill.util.Messages.fail
trait SQLServerDialect
extends SqlIdiom
with QuestionMarkBindVariables
with ConcatSupport
with CanOutputClause
with BooleanLiteralSupport {
override def querifyAst(ast: Ast) = AddDropToNestedOrderBy(SqlQuery(ast))
override def emptySetContainsToken(field: Token) = StringToken("1 <> 1")
override def prepareForProbing(string: String) = string
// SQL-Server can potentially disable ANSI-null via `SET ANSI_NULLS OFF`. Force more strict checking here
// for the sake of consistency with the other contexts.
override def equalityBehavior: EqualityBehavior = NonAnsiEquality
override protected def limitOffsetToken(query: Statement)(implicit astTokenizer: Tokenizer[Ast], strategy: NamingStrategy) =
Tokenizer[(Option[Ast], Option[Ast])] {
case (Some(limit), None) => stmt"TOP ${limit.token} $query"
case (Some(limit), Some(offset)) => stmt"$query OFFSET ${offset.token} ROWS FETCH FIRST ${limit.token} ROWS ONLY"
case (None, Some(offset)) => stmt"$query OFFSET ${offset.token} ROWS"
case other => super.limitOffsetToken(query).token(other)
}
override implicit def sqlQueryTokenizer(implicit astTokenizer: Tokenizer[Ast], strategy: NamingStrategy): Tokenizer[SqlQuery] =
Tokenizer[SqlQuery] {
case flatten: FlattenSqlQuery if flatten.orderBy.isEmpty && flatten.offset.nonEmpty =>
fail(s"SQLServer does not support OFFSET without ORDER BY")
case other => super.sqlQueryTokenizer.token(other)
}
override implicit def operationTokenizer(implicit astTokenizer: Tokenizer[Ast], strategy: NamingStrategy): Tokenizer[Operation] =
Tokenizer[Operation] {
case BinaryOperation(a, StringOperator.`+`, b) => stmt"${scopedTokenizer(a)} + ${scopedTokenizer(b)}"
case other => super.operationTokenizer.token(other)
}
}
object SQLServerDialect extends SQLServerDialect
|
getquill/quill
|
quill-sql-portable/src/main/scala/io/getquill/SQLServerDialect.scala
|
Scala
|
apache-2.0
| 2,482 |
package com.sksamuel.scapegoat.inspections.collections
import com.sksamuel.scapegoat.PluginRunner
import org.scalatest.{ OneInstancePerTest, FreeSpec, Matchers }
/** @author Stephen Samuel */
class UnsafeContainsTest extends FreeSpec with Matchers with PluginRunner with OneInstancePerTest {
override val inspections = Seq(new UnsafeContains)
"unsafe contains" - {
"should report warning" in {
val code = """
|object Test {
| import scala.language.higherKinds
| def f1[A](xs: Seq[A], y: A) = xs contains y // good
| def f2[A <: AnyRef](xs: Seq[A], y: Int) = xs contains y // bad
| def f3[A <: AnyRef](xs: Vector[A], y: Int) = xs contains y // bad
| def f4[CC[X] <: Seq[X], A](xs: CC[A], y: A) = xs contains y // good
| def f5[CC[X] <: Seq[X], A <: AnyRef, B <: AnyVal](xs: CC[A], y: B) = xs contains y // bad
|
| List(1).contains("sam")
| val int = 1
| List("sam").contains(int)
| List(2).contains(int) // is good
| List(new RuntimeException, new Exception).contains(new RuntimeException) // good
| val name = "RuntimeException"
| List(new RuntimeException).contains(name) // bad
|}""".stripMargin.trim
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 6
}
"should not report warning" - {
"for type parameter A in method, collection, and value" in {
val code = """
package com.sam
|class C {
| def f[A](xs: Seq[A], y: A) = xs contains y
|}
| """.stripMargin.trim
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
}
}
|
pwwpche/scalac-scapegoat-plugin
|
src/test/scala/com/sksamuel/scapegoat/inspections/collections/UnsafeContainsTest.scala
|
Scala
|
apache-2.0
| 1,892 |
package models
import common.models.halo.TimedPoint
import play.api.libs.json.Json
package object sports {
case class SportsBookTimedOdds(bookname: String, time: Long, a: Double, b: Double)
case class SportsBookOdds(bookname: String, a: Double, b: Double)
object SportsBookOdds {
/**
* Normalized item according to alpabetical order of item names
* @return a SportBookOdds with a.name < b.name
*/
def apply(bookname: String, line1: SportsEventOption, line2: SportsEventOption): SportsBookOdds = {
val (a, b) = if (line1.name < line2.name) (line1.odds, line2.odds) else (line2.odds, line1.odds)
SportsBookOdds(bookname, a, b)
}
}
case class SportsEventPair(optionA: SportsEventOption, optionB: SportsEventOption)
case class SportsEventOption(name: String, odds: Double)
case class SportsEvent(name: String, time: String, options: Seq[SportsEventOption])
case class SportsData(sportname: String, events: Seq[SportsEvent])
case class SportsBookData(bookname: String, sport: String, events: Seq[SportsEvent])
// reads
implicit val lineRead = Json.reads[SportsEventOption]
implicit val eventRead = Json.reads[SportsEvent]
implicit val bookReads = Json.reads[SportsBookData]
// writes
//implicit val oddsWrite = Json.writes[SportsEventOdds]
implicit val lineWrite = Json.writes[SportsEventOption]
implicit val eventWrite = Json.writes[SportsEvent]
implicit val bookWrites = Json.writes[SportsBookData]
}
|
asciiu/halo
|
arbiter/app/models/sports/package.scala
|
Scala
|
mit
| 1,483 |
package drt.shared
import drt.shared.api.Arrival
object PcpUtils {
val defaultPax = 0
def bestPcpPaxEstimate(flight: Arrival): Int =
(flight.ApiPax, flight.ActPax, flight.TranPax, flight.MaxPax) match {
case (Some(apiPax), _, _, _) if !flight.FeedSources.contains(LiveFeedSource) => apiPax
case (_, Some(actPax), Some(tranPax), _) if (actPax - tranPax) >= 0 => actPax - tranPax
case (_, Some(actPax), None, _) => actPax
case (Some(apiPax), _, _, _) => apiPax
case _ => defaultPax
}
def walkTime(arrival: Arrival, timeToChox: Long, firstPaxOff: Long): Option[Long] =
arrival.PcpTime.map(pcpTime => pcpTime - (arrival.bestArrivalTime(timeToChox) + firstPaxOff))
}
|
UKHomeOffice/drt-scalajs-spa-exploration
|
shared/src/main/scala/drt/shared/PcpUtils.scala
|
Scala
|
apache-2.0
| 714 |
package Tutorial
import Chisel._
import Node._
import scala.collection.mutable.HashMap
import scala.io.Source
import java.io.FileOutputStream
import java.io.File
object Image {
val MagicNumber = 0x59a66a95
val NoColorMap = 0
val StandardType = 1
def dwordFromString(s: String, off: Int): Int =
(s(off+0) << 24)|(s(off+1) << 16)|(s(off+2) << 8)|(s(off+3) << 0)
def dwordToString(d: Int) = {
var res = new Array[Byte](4)
res(0) = ((d >> 24)&255).toByte
res(1) = ((d >> 16)&255).toByte
res(2) = ((d >> 8)&255).toByte
res(3) = ((d >> 0)&255).toByte
res
}
def apply(filename: String): Image = {
val file = Source.fromFile(filename)(scala.io.Codec.ISO8859)
var buf = new StringBuilder();
file.foreach(c => buf += c)
val rawData = buf.result()
file.close()
val magicNumber = dwordFromString(rawData, 0)
if (magicNumber != MagicNumber) println("BAD MAGIC NUMBER")
val w = dwordFromString(rawData, 4)
val h = dwordFromString(rawData, 8)
val d = dwordFromString(rawData, 12)
// println(w + "x" + h + "x" + d)
val len = dwordFromString(rawData, 16)
val itype = dwordFromString(rawData, 20)
val colorMapType = dwordFromString(rawData, 24)
val colorMapLength = dwordFromString(rawData, 28)
val data = new Array[Byte](rawData.length - 32)
for (i <- 32 until rawData.length) {
data(i-32) = rawData(i).toByte
}
new Image(w, h, d, data)
}
def apply(w: Int, h: Int, d: Int): Image = {
var dat = new Array[Byte](w*h*d/8)
println("CREATING IMAGE OF LEN " + dat.length)
new Image(w, h, d, dat)
}
}
import Image._
class Image(val w: Int, val h: Int, val d: Int, val data: Array[Byte]) {
def write(filename: String) = {
val writer = new FileOutputStream(new File(filename))
// println("WRITING " + filename + " DATA LEN " + data.length)
writer.write(dwordToString(MagicNumber))
writer.write(dwordToString(w))
writer.write(dwordToString(h))
writer.write(dwordToString(d))
writer.write(dwordToString(data.length))
writer.write(dwordToString(StandardType))
writer.write(dwordToString(NoColorMap))
writer.write(dwordToString(0))
writer.write(data)
writer.close()
}
}
|
seyedmaysamlavasani/GorillaPP
|
chisel/KmeansAndMesh/srcOld/Image.scala
|
Scala
|
bsd-3-clause
| 2,321 |
package org.lanyard.util
/** The incomplete beta function. */
case class IncBeta( val a: Double, val b: Double ) extends PartialFunction[Double,Double] {
require( a > 0, "Incomplete beta function parameter a needs to be strictly positive. Found value: " + a )
require( b > 0, "Incomplete beta function parameter b needs to be strictly positive. Found value: " + b )
/** precision of computation*/
private val EPS = 1E-9
/** tiny value to shift */
private val TINY = 1E-30
/** Checks if the incomplete beta is defined at a given value.
*
* @param value value to be checked
* @return true if value is in [0,1], false otherwise
*/
override def isDefinedAt( value: Double ): Boolean = 0.0 < value || value < 1.0
/** Computes the value of the incomplete beta function at a given value.
*
* @param value value to compute the incomplete beta for
* @return incomplete beta of value
*/
def apply( value: Double ): Double = {
if( value == 0 || value == 1 ) {
value
} else if( a > 3000 && b > 3000 ) {
approximate( value )
} else {
continuedFraction( value )
}
}
/** Computes a value of the incomplete beta function.
*
* I uses the method proposed in: ''W.J. Lentz, 1976. Generating Bessel functions in Mie scattering
* calculations using continued fractions, Applied Optics, vol 15, 668''
* to compute the continues fraction defined in:
* ''Milton Abramowitz, and Irene A. Stegun. 1964. Handbook of Mathematical Functions with Formulas, Graphs,
* and Mathematical Tables. 9th Edition. Dover, New York''.
*
* @param value value to compute the incomplete beta for
* @return incomplete beta of value
*/
private def continuedFraction( value: Double ): Double = {
import math._
/** Computes the even step in the Lentz method */
@inline def evenStep( iter: Int): Double = ( b - iter ) * value / ( (a - 1 + 2 * iter) * ( a + 2 * iter) )
/** Computes the odd step in the Lentz method */
@inline def oddStep( iter: Int): Double = - ( a + iter ) * (a + b + iter) * value / ( ( a + 2 * iter) * ( a + 1 + 2 * iter) )
/** Shifts a value if it is small enough */
@inline def tinyShift( value: Double): Double = if( abs( value ) < TINY ) TINY else value
var c = 1.0
var d = tinyShift( 1.0 - (a + b) * value / ( a + 1.0) )
d = 1.0 / d
var coef = 0.0
var result = d
var unsufficientPrecision = true
var i = 1
while( i < 10000 && unsufficientPrecision ) {
coef = evenStep( i ) // even step
d = tinyShift( 1.0 + coef * d )
c = tinyShift( 1.0 + coef / c )
d = 1.0 / d
result *= d * c
coef = oddStep( i ) // odd step
d = tinyShift( 1.0 + coef * d )
c = tinyShift( 1.0 + coef / c )
d = 1.0 / d
result *= d * c
if( abs( d * c - 1.0 ) <= EPS ) {
unsufficientPrecision = false
}
}
result
}
private def approximate( value: Double): Double = ???
}
|
perian/Lanyard
|
src/main/scala/org/lanyard/util/IncBeta.scala
|
Scala
|
gpl-2.0
| 3,023 |
package org.sstudio.bulldozer.dsl.compare
final class IntCompare(int: Int) extends TCompare[Int] {
v = int
}
|
avril23/bulldozer
|
bulldozer/src/main/scala/org/sstudio/bulldozer/dsl/compare/IntCompare.scala
|
Scala
|
bsd-3-clause
| 112 |
/*
* Copyright 2010-2011 Christos KK Loverdos
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ckkloverdos.restie
class RestPathManager {
type Length = Int
private[this] var _patternByName = Map[String, RestPathPattern]()
private[this] var _patternListByPathLength = Map[Length, List[RestPathPattern]]()
def add(pattern: RestPathPattern): RestPathManager = synchronized {
_patternByName += pattern.name -> pattern
val patternElementCount = pattern.elementCount
_patternListByPathLength.get(patternElementCount) match {
case Some(list) =>
_patternListByPathLength = _patternListByPathLength updated (patternElementCount, pattern :: list)
case None =>
_patternListByPathLength = _patternListByPathLength updated (patternElementCount, pattern :: Nil)
}
this
}
def findPatternByName(name: String) =
_patternByName.get(name)
def findMatcher(path: String): Option[RestPathMatcher] = {
val analyzer = new RestPathAnalyzer(path)
val pathPartsCount = analyzer.pathPartsCount
this._patternListByPathLength.get(pathPartsCount) match {
case Some(patternList) =>
// Find the first pattern
var valueMap = null: Map[String, String]
val patternO = patternList find { pattern =>
valueMap = Map[String, String]()
// ... whose elements match all the analyzed path elements
analyzer.pathParts.view.zipWithIndex.forall { case (element, index) =>
if(pattern.isVariablePosition(index + 1)) {
val variable = pattern.variableAtPosition(index + 1)
valueMap += variable -> element
true
} else {
element == pattern.elementAtPosition(index + 1)
}
}
}
patternO map { pattern => new RestPathMatcher(pattern, valueMap) }
case None =>
None
}
}
override def toString =
_patternByName.mkString("RestPathManager(", "", ")")
}
|
loverdos/restie
|
src/main/scala/com/ckkloverdos/restie/RestPathManager.scala
|
Scala
|
apache-2.0
| 2,510 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.standardpartition
import java.nio.file.{Files, LinkOption, Paths}
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.TestQueryExecutor
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
class StandardPartitionTableDropTestCase extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
dropTable
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "dd-MM-yyyy")
sql(
"""
| CREATE TABLE originTable (empno int, empname String, designation String, doj Timestamp,
| workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
| projectcode int, projectjoindate Timestamp, projectenddate Date,attendance int,
| utilization int,salary int)
| STORED BY 'org.apache.carbondata.format'
""".stripMargin)
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE originTable OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
}
test("show partitions on partition table") {
sql(
"""
| CREATE TABLE partitionshow (designation String, doj Timestamp,
| workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
| projectcode int, projectjoindate Timestamp, projectenddate Date,attendance int,
| utilization int,salary int)
| PARTITIONED BY (empno int, empname String)
| STORED BY 'org.apache.carbondata.format'
""".stripMargin)
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionshow OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
checkExistence(sql(s"""SHOW PARTITIONS partitionshow"""), true, "empno=11", "empno=12")
}
test("droping on partition table for int partition column") {
sql(
"""
| CREATE TABLE partitionone (empname String, designation String, doj Timestamp,
| workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
| projectcode int, projectjoindate Timestamp, projectenddate Date,attendance int,
| utilization int,salary int)
| PARTITIONED BY (empno int)
| STORED BY 'org.apache.carbondata.format'
""".stripMargin)
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionone OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
checkAnswer(
sql(s"""select count (*) from partitionone"""),
sql(s"""select count (*) from originTable"""))
checkAnswer(
sql(s"""select count (*) from partitionone where empno=11"""),
sql(s"""select count (*) from originTable where empno=11"""))
sql(s"""ALTER TABLE partitionone DROP PARTITION(empno='11')""")
checkExistence(sql(s"""SHOW PARTITIONS partitionone"""), false, "empno=11")
checkAnswer(
sql(s"""select count (*) from partitionone where empno=11"""),
Seq(Row(0)))
}
test("dropping partition on table for more partition columns") {
sql(
"""
| CREATE TABLE partitionmany (empno int, empname String, designation String,
| workgroupcategory int, workgroupcategoryname String, deptno int,
| projectjoindate Timestamp, projectenddate Date,attendance int,
| utilization int,salary int)
| PARTITIONED BY (deptname String,doj Timestamp,projectcode int)
| STORED BY 'org.apache.carbondata.format'
""".stripMargin)
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionmany OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionmany OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
sql(s"""ALTER TABLE partitionmany DROP PARTITION(deptname='Learning')""")
checkExistence(sql(s"""SHOW PARTITIONS partitionmany"""), false, "deptname=Learning", "projectcode=928479")
checkAnswer(
sql(s"""select count (*) from partitionmany where deptname='Learning'"""),
Seq(Row(0)))
}
test("dropping all partition on table") {
sql(
"""
| CREATE TABLE partitionall (empno int, empname String, designation String,
| workgroupcategory int, workgroupcategoryname String, deptno int,
| projectjoindate Timestamp, projectenddate Date,attendance int,
| utilization int,salary int)
| PARTITIONED BY (deptname String,doj Timestamp,projectcode int)
| STORED BY 'org.apache.carbondata.format'
""".stripMargin)
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionall OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionall OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
sql(s"""ALTER TABLE partitionall DROP PARTITION(deptname='Learning')""")
sql(s"""ALTER TABLE partitionall DROP PARTITION(deptname='configManagement')""")
sql(s"""ALTER TABLE partitionall DROP PARTITION(deptname='network')""")
sql(s"""ALTER TABLE partitionall DROP PARTITION(deptname='protocol')""")
sql(s"""ALTER TABLE partitionall DROP PARTITION(deptname='security')""")
assert(sql(s"""SHOW PARTITIONS partitionall""").collect().length == 0)
checkAnswer(
sql(s"""select count (*) from partitionall"""),
Seq(Row(0)))
}
test("dropping static partition on table") {
sql(
"""
| CREATE TABLE staticpartition (empno int, doj Timestamp,
| workgroupcategoryname String, deptno int,
| projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
| utilization int,salary int,workgroupcategory int, empname String, designation String)
| PARTITIONED BY (deptname String)
| STORED BY 'org.apache.carbondata.format'
""".stripMargin)
sql(s"""insert into staticpartition PARTITION(deptname='software') select empno,doj,workgroupcategoryname,deptno,projectcode,projectjoindate,projectenddate,attendance,utilization,salary,workgroupcategory,empname,designation from originTable""")
checkExistence(sql(s"""SHOW PARTITIONS staticpartition"""), true, "deptname=software")
assert(sql(s"""SHOW PARTITIONS staticpartition""").collect().length == 1)
sql(s"""ALTER TABLE staticpartition DROP PARTITION(deptname='software')""")
checkAnswer(
sql(s"""select count (*) from staticpartition"""),
Seq(Row(0)))
sql(s"""insert into staticpartition select empno,doj,workgroupcategoryname,deptno,projectcode,projectjoindate,projectenddate,attendance,utilization,salary,workgroupcategory,empname,designation,deptname from originTable""")
checkExistence(sql(s"""SHOW PARTITIONS staticpartition"""), true, "deptname=protocol")
checkAnswer(
sql(s"""select count (*) from staticpartition"""),
sql(s"""select count (*) from originTable"""))
}
test("dropping all partition on table and do compaction") {
sql(
"""
| CREATE TABLE partitionallcompaction (empno int, empname String, designation String,
| workgroupcategory int, workgroupcategoryname String, deptno int,
| projectjoindate Timestamp, projectenddate Date,attendance int,
| utilization int,salary int)
| PARTITIONED BY (deptname String,doj Timestamp,projectcode int)
| STORED BY 'org.apache.carbondata.format'
""".stripMargin)
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionallcompaction OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionallcompaction OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionallcompaction OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionallcompaction OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
sql(s"""ALTER TABLE partitionallcompaction DROP PARTITION(deptname='Learning')""")
sql(s"""ALTER TABLE partitionallcompaction DROP PARTITION(deptname='configManagement')""")
sql(s"""ALTER TABLE partitionallcompaction DROP PARTITION(deptname='network')""")
sql(s"""ALTER TABLE partitionallcompaction DROP PARTITION(deptname='protocol')""")
sql(s"""ALTER TABLE partitionallcompaction DROP PARTITION(deptname='security')""")
assert(sql(s"""SHOW PARTITIONS partitionallcompaction""").collect().length == 0)
sql("ALTER TABLE partitionallcompaction COMPACT 'MAJOR'").collect()
checkAnswer(
sql(s"""select count (*) from partitionallcompaction"""),
Seq(Row(0)))
}
test("test dropping on partition table for int partition column") {
sql(
"""
| CREATE TABLE partitionone1 (empname String, designation String, doj Timestamp,
| workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
| projectcode int, projectjoindate Timestamp, projectenddate Date,attendance int,
| utilization int,salary int)
| PARTITIONED BY (empno int)
| STORED BY 'org.apache.carbondata.format'
""".stripMargin)
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE partitionone1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
checkAnswer(
sql(s"""select count (*) from partitionone1"""),
sql(s"""select count (*) from originTable"""))
checkAnswer(
sql(s"""select count (*) from partitionone1 where empno=11"""),
sql(s"""select count (*) from originTable where empno=11"""))
sql(s"""ALTER TABLE partitionone1 DROP PARTITION(empno='11')""")
sql(s"CLEAN FILES FOR TABLE partitionone1").show()
assert(Files.notExists(Paths.get(TestQueryExecutor.warehouse + "/partitionone1/" + "empno=11"), LinkOption.NOFOLLOW_LINKS))
sql("drop table if exists partitionone1")
}
override def afterAll = {
dropTable
}
def dropTable = {
sql("drop table if exists originTable")
sql("drop table if exists originMultiLoads")
sql("drop table if exists partitionone")
sql("drop table if exists partitionall")
sql("drop table if exists partitionmany")
sql("drop table if exists partitionshow")
sql("drop table if exists staticpartition")
sql("drop table if exists partitionallcompaction")
sql("drop table if exists partitionone1")
}
}
|
sgururajshetty/carbondata
|
integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/standardpartition/StandardPartitionTableDropTestCase.scala
|
Scala
|
apache-2.0
| 11,500 |
package mesosphere.marathon.state
import java.lang.{ Double => JDouble, Integer => JInt }
import com.fasterxml.jackson.annotation.JsonIgnore
import mesosphere.marathon.Protos
import mesosphere.marathon.Protos.Constraint
import mesosphere.marathon.Protos.HealthCheckDefinition.Protocol
import mesosphere.marathon.health.HealthCheck
import mesosphere.marathon.state.Container.Docker.PortMapping
import mesosphere.marathon.state.PathId._
import mesosphere.mesos.TaskBuilder
import mesosphere.mesos.protos.{ Resource, ScalarResource }
import org.apache.mesos.Protos.ContainerInfo.DockerInfo.Network
import org.apache.mesos.{ Protos => mesos }
import scala.collection.JavaConverters._
import scala.collection.immutable.Seq
import scala.concurrent.duration._
case class AppDefinition(
id: PathId = AppDefinition.DefaultId,
cmd: Option[String] = AppDefinition.DefaultCmd,
args: Option[Seq[String]] = AppDefinition.DefaultArgs,
user: Option[String] = AppDefinition.DefaultUser,
env: Map[String, String] = AppDefinition.DefaultEnv,
instances: JInt = AppDefinition.DefaultInstances,
cpus: JDouble = AppDefinition.DefaultCpus,
mem: JDouble = AppDefinition.DefaultMem,
disk: JDouble = AppDefinition.DefaultDisk,
executor: String = AppDefinition.DefaultExecutor,
constraints: Set[Constraint] = AppDefinition.DefaultConstraints,
uris: Seq[String] = AppDefinition.DefaultUris,
storeUrls: Seq[String] = AppDefinition.DefaultStoreUrls,
ports: Seq[JInt] = AppDefinition.DefaultPorts,
requirePorts: Boolean = AppDefinition.DefaultRequirePorts,
backoff: FiniteDuration = AppDefinition.DefaultBackoff,
backoffFactor: JDouble = AppDefinition.DefaultBackoffFactor,
maxLaunchDelay: FiniteDuration = AppDefinition.DefaultMaxLaunchDelay,
container: Option[Container] = AppDefinition.DefaultContainer,
healthChecks: Set[HealthCheck] = AppDefinition.DefaultHealthChecks,
dependencies: Set[PathId] = AppDefinition.DefaultDependencies,
upgradeStrategy: UpgradeStrategy = AppDefinition.DefaultUpgradeStrategy,
labels: Map[String, String] = AppDefinition.DefaultLabels,
acceptedResourceRoles: Option[Set[String]] = None,
version: Timestamp = Timestamp.now()) extends MarathonState[Protos.ServiceDefinition, AppDefinition]
with Timestamped {
import mesosphere.mesos.protos.Implicits._
assert(
portIndicesAreValid(),
"Health check port indices must address an element of the ports array or container port mappings."
)
/**
* Returns true if all health check port index values are in the range
* of ths app's ports array, or if defined, the array of container
* port mappings.
*/
def portIndicesAreValid(): Boolean = {
val validPortIndices = 0 until hostPorts.size
healthChecks.forall { hc =>
hc.protocol == Protocol.COMMAND || (validPortIndices contains hc.portIndex)
}
}
def toProto: Protos.ServiceDefinition = {
val commandInfo = TaskBuilder.commandInfo(
app = this,
taskId = None,
host = None,
ports = Seq.empty,
envPrefix = None
)
val cpusResource = ScalarResource(Resource.CPUS, cpus)
val memResource = ScalarResource(Resource.MEM, mem)
val diskResource = ScalarResource(Resource.DISK, disk)
val appLabels = labels.map {
case (key, value) =>
mesos.Parameter.newBuilder
.setKey(key)
.setValue(value)
.build
}
val builder = Protos.ServiceDefinition.newBuilder
.setId(id.toString)
.setCmd(commandInfo)
.setInstances(instances)
.addAllPorts(ports.asJava)
.setRequirePorts(requirePorts)
.setBackoff(backoff.toMillis)
.setBackoffFactor(backoffFactor)
.setMaxLaunchDelay(maxLaunchDelay.toMillis)
.setExecutor(executor)
.addAllConstraints(constraints.asJava)
.addResources(cpusResource)
.addResources(memResource)
.addResources(diskResource)
.addAllHealthChecks(healthChecks.map(_.toProto).asJava)
.setVersion(version.toString)
.setUpgradeStrategy(upgradeStrategy.toProto)
.addAllDependencies(dependencies.map(_.toString).asJava)
.addAllStoreUrls(storeUrls.asJava)
.addAllLabels(appLabels.asJava)
container.foreach { c => builder.setContainer(c.toProto()) }
acceptedResourceRoles.foreach { acceptedResourceRoles =>
val roles = Protos.ResourceRoles.newBuilder()
acceptedResourceRoles.seq.foreach(roles.addRole)
builder.setAcceptedResourceRoles(roles)
}
builder.build
}
//TODO: fix style issue and enable this scalastyle check
//scalastyle:off cyclomatic.complexity method.length
def mergeFromProto(proto: Protos.ServiceDefinition): AppDefinition = {
val envMap: Map[String, String] =
proto.getCmd.getEnvironment.getVariablesList.asScala.map {
v => v.getName -> v.getValue
}.toMap
val resourcesMap: Map[String, JDouble] =
proto.getResourcesList.asScala.map {
r => r.getName -> (r.getScalar.getValue: JDouble)
}.toMap
val argsOption =
if (proto.getCmd.getArgumentsCount > 0)
Some(proto.getCmd.getArgumentsList.asScala.to[Seq])
else None
//Precondition: either args or command is defined
val commandOption =
if (argsOption.isEmpty && proto.getCmd.hasValue && proto.getCmd.getValue.nonEmpty)
Some(proto.getCmd.getValue)
else None
val containerOption =
if (proto.hasContainer)
Some(Container(proto.getContainer))
else if (proto.getCmd.hasContainer)
Some(Container(proto.getCmd.getContainer))
else if (proto.hasOBSOLETEContainer)
Some(Container(proto.getOBSOLETEContainer))
else None
val acceptedResourceRoles: Option[Set[String]] =
if (proto.hasAcceptedResourceRoles)
Some(proto.getAcceptedResourceRoles.getRoleList.asScala.toSet)
else
None
AppDefinition(
id = proto.getId.toPath,
user = if (proto.getCmd.hasUser) Some(proto.getCmd.getUser) else None,
cmd = commandOption,
args = argsOption,
executor = proto.getExecutor,
instances = proto.getInstances,
ports = proto.getPortsList.asScala.to[Seq],
requirePorts = proto.getRequirePorts,
backoff = proto.getBackoff.milliseconds,
backoffFactor = proto.getBackoffFactor,
maxLaunchDelay = proto.getMaxLaunchDelay.milliseconds,
constraints = proto.getConstraintsList.asScala.toSet,
acceptedResourceRoles = acceptedResourceRoles,
cpus = resourcesMap.getOrElse(Resource.CPUS, this.cpus),
mem = resourcesMap.getOrElse(Resource.MEM, this.mem),
disk = resourcesMap.getOrElse(Resource.DISK, this.disk),
env = envMap,
uris = proto.getCmd.getUrisList.asScala.map(_.getValue).to[Seq],
storeUrls = proto.getStoreUrlsList.asScala.to[Seq],
container = containerOption,
healthChecks = proto.getHealthChecksList.asScala.map(new HealthCheck().mergeFromProto).toSet,
labels = proto.getLabelsList.asScala.map { p => p.getKey -> p.getValue }.toMap,
version = Timestamp(proto.getVersion),
upgradeStrategy =
if (proto.hasUpgradeStrategy) UpgradeStrategy.fromProto(proto.getUpgradeStrategy)
else UpgradeStrategy.empty,
dependencies = proto.getDependenciesList.asScala.map(PathId.apply).toSet
)
}
@JsonIgnore
def portMappings: Option[Seq[PortMapping]] =
for {
c <- container
d <- c.docker
n <- d.network if n == Network.BRIDGE
pms <- d.portMappings
} yield pms
@JsonIgnore
def containerHostPorts: Option[Seq[Int]] =
for (pms <- portMappings) yield pms.map(_.hostPort.toInt)
@JsonIgnore
def containerServicePorts: Option[Seq[Int]] =
for (pms <- portMappings) yield pms.map(_.servicePort.toInt)
@JsonIgnore
def hostPorts: Seq[Int] =
containerHostPorts.getOrElse(ports.map(_.toInt))
@JsonIgnore
def servicePorts: Seq[Int] =
containerServicePorts.getOrElse(ports.map(_.toInt))
@JsonIgnore
def hasDynamicPort: Boolean = servicePorts.contains(0)
def mergeFromProto(bytes: Array[Byte]): AppDefinition = {
val proto = Protos.ServiceDefinition.parseFrom(bytes)
mergeFromProto(proto)
}
def withNormalizedVersion: AppDefinition = copy(version = Timestamp(0))
def isOnlyScaleChange(to: AppDefinition): Boolean =
!isUpgrade(to) && (instances != to.instances)
def isUpgrade(to: AppDefinition): Boolean =
this != to.copy(instances = instances, version = version)
}
object AppDefinition {
val RandomPortValue: Int = 0
// App defaults
val DefaultId: PathId = PathId.empty
val DefaultCmd: Option[String] = None
val DefaultArgs: Option[Seq[String]] = None
val DefaultUser: Option[String] = None
val DefaultEnv: Map[String, String] = Map.empty
val DefaultInstances: Int = 1
val DefaultCpus: Double = 1.0
val DefaultMem: Double = 128.0
val DefaultDisk: Double = 0.0
val DefaultExecutor: String = ""
val DefaultConstraints: Set[Constraint] = Set.empty
val DefaultUris: Seq[String] = Seq.empty
val DefaultStoreUrls: Seq[String] = Seq.empty
val DefaultPorts: Seq[JInt] = Seq(RandomPortValue)
val DefaultRequirePorts: Boolean = false
val DefaultBackoff: FiniteDuration = 1.second
val DefaultBackoffFactor = 1.15
val DefaultMaxLaunchDelay: FiniteDuration = 1.hour
val DefaultContainer: Option[Container] = None
val DefaultHealthChecks: Set[HealthCheck] = Set.empty
val DefaultDependencies: Set[PathId] = Set.empty
val DefaultUpgradeStrategy: UpgradeStrategy = UpgradeStrategy.empty
val DefaultLabels: Map[String, String] = Map.empty
/**
* This default is only used in tests
*/
val DefaultAcceptedResourceRoles: Set[String] = Set.empty
def fromProto(proto: Protos.ServiceDefinition): AppDefinition =
AppDefinition().mergeFromProto(proto)
}
|
sepiroth887/marathon
|
src/main/scala/mesosphere/marathon/state/AppDefinition.scala
|
Scala
|
apache-2.0
| 9,870 |
package com.sksamuel.elastic4s.searches.aggs.pipeline
import com.sksamuel.elastic4s.script.Script
import com.sksamuel.elastic4s.searches.aggs.AbstractAggregation
import com.sksamuel.elastic4s.searches.sort.Sort
trait PipelineAggregationApi {
def avgBucketAgg(name: String, bucketsPath: String): AvgBucketPipelineAgg = avgBucketAggregation(name, bucketsPath)
def avgBucketAggregation(name: String, bucketsPath: String): AvgBucketPipelineAgg =
AvgBucketPipelineAgg(name, bucketsPath)
def bucketSelectorAggregation(name: String,
script: Script,
bucketsPathMap: Map[String, String]): BucketSelectorPipelineAgg =
BucketSelectorPipelineAgg(name, script, bucketsPathMap)
def bucketSortAggregation(name: String, sort: Seq[Sort]): BucketSortPipelineAgg =
BucketSortPipelineAgg(name, sort)
def bucketScriptAggregation(name: String, script: Script, bucketsPath: Map[String, String]): BucketScriptPipelineAgg =
BucketScriptPipelineAgg(name, script, bucketsPath)
def cumulativeSumAggregation(name: String, bucketsPath: String): CumulativeSumPipelineAgg =
CumulativeSumPipelineAgg(name, bucketsPath)
def derivativeAggregation(name: String, bucketsPath: String): DerivativePipelineAgg =
DerivativePipelineAgg(name, bucketsPath)
def diffAggregation(name: String, bucketsPath: String): DiffPipelineAgg = DiffPipelineAgg(name, bucketsPath)
def extendedStatsBucketAggregation(name: String, bucketsPath: String): ExtendedStatsBucketPipelineAgg =
ExtendedStatsBucketPipelineAgg(name, bucketsPath)
def maxBucketAgg(name: String, bucketsPath: String): MaxBucket = maxBucketAggregation(name, bucketsPath)
def maxBucketAggregation(name: String, bucketsPath: String): MaxBucket =
MaxBucket(name, bucketsPath)
def minBucketAggregation(name: String, bucketsPath: String): MinBucketPipelineAgg =
MinBucketPipelineAgg(name, bucketsPath)
def movingAverageAggregation(name: String, bucketsPath: String): MovAvgPipelineAgg =
MovAvgPipelineAgg(name, bucketsPath)
def percentilesBucketAggregation(name: String, bucketsPath: String): PercentilesBucketPipelineAgg =
PercentilesBucketPipelineAgg(name, bucketsPath)
def statsBucketAggregation(name: String, bucketsPath: String): StatsBucketPipelineAgg =
StatsBucketPipelineAgg(name, bucketsPath)
def sumBucketAggregation(name: String, bucketsPath: String): SumBucketPipelineAgg =
SumBucketPipelineAgg(name, bucketsPath)
}
trait PipelineAgg extends AbstractAggregation
|
Tecsisa/elastic4s
|
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/aggs/pipeline/PipelineAggDsl.scala
|
Scala
|
apache-2.0
| 2,544 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import scala.language.existentials
import scala.reflect.ClassTag
import scala.util.{Failure, Success, Try}
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.util.StringKeyHashMap
/** A catalog for looking up user defined functions, used by an [[Analyzer]]. */
trait FunctionRegistry {
final def registerFunction(name: String, builder: FunctionBuilder): Unit = {
registerFunction(name, new ExpressionInfo(builder.getClass.getCanonicalName, name), builder)
}
def registerFunction(name: String, info: ExpressionInfo, builder: FunctionBuilder): Unit
@throws[AnalysisException]("If function does not exist")
def lookupFunction(name: String, children: Seq[Expression]): Expression
/* List all of the registered function names. */
def listFunction(): Seq[String]
/* Get the class of the registered function by specified name. */
def lookupFunction(name: String): Option[ExpressionInfo]
}
class SimpleFunctionRegistry extends FunctionRegistry {
private val functionBuilders =
StringKeyHashMap[(ExpressionInfo, FunctionBuilder)](caseSensitive = false)
override def registerFunction(name: String, info: ExpressionInfo, builder: FunctionBuilder)
: Unit = {
functionBuilders.put(name, (info, builder))
}
override def lookupFunction(name: String, children: Seq[Expression]): Expression = {
val func = functionBuilders.get(name).map(_._2).getOrElse {
throw new AnalysisException(s"undefined function $name")
}
func(children)
}
override def listFunction(): Seq[String] = functionBuilders.iterator.map(_._1).toList.sorted
override def lookupFunction(name: String): Option[ExpressionInfo] = {
functionBuilders.get(name).map(_._1)
}
}
/**
* A trivial catalog that returns an error when a function is requested. Used for testing when all
* functions are already filled in and the analyzer needs only to resolve attribute references.
*/
object EmptyFunctionRegistry extends FunctionRegistry {
override def registerFunction(name: String, info: ExpressionInfo, builder: FunctionBuilder)
: Unit = {
throw new UnsupportedOperationException
}
override def lookupFunction(name: String, children: Seq[Expression]): Expression = {
throw new UnsupportedOperationException
}
override def listFunction(): Seq[String] = {
throw new UnsupportedOperationException
}
override def lookupFunction(name: String): Option[ExpressionInfo] = {
throw new UnsupportedOperationException
}
}
object FunctionRegistry {
type FunctionBuilder = Seq[Expression] => Expression
val expressions: Map[String, (ExpressionInfo, FunctionBuilder)] = Map(
// misc non-aggregate functions
expression[Abs]("abs"),
expression[CreateArray]("array"),
expression[Coalesce]("coalesce"),
expression[Explode]("explode"),
expression[Greatest]("greatest"),
expression[If]("if"),
expression[IsNaN]("isnan"),
expression[IsNull]("isnull"),
expression[IsNotNull]("isnotnull"),
expression[Least]("least"),
expression[Coalesce]("nvl"),
expression[Rand]("rand"),
expression[Randn]("randn"),
expression[CreateStruct]("struct"),
expression[CreateNamedStruct]("named_struct"),
expression[Sqrt]("sqrt"),
expression[NaNvl]("nanvl"),
// math functions
expression[Acos]("acos"),
expression[Asin]("asin"),
expression[Atan]("atan"),
expression[Atan2]("atan2"),
expression[Bin]("bin"),
expression[Cbrt]("cbrt"),
expression[Ceil]("ceil"),
expression[Ceil]("ceiling"),
expression[Cos]("cos"),
expression[Cosh]("cosh"),
expression[Conv]("conv"),
expression[EulerNumber]("e"),
expression[Exp]("exp"),
expression[Expm1]("expm1"),
expression[Floor]("floor"),
expression[Factorial]("factorial"),
expression[Hypot]("hypot"),
expression[Hex]("hex"),
expression[Logarithm]("log"),
expression[Log]("ln"),
expression[Log10]("log10"),
expression[Log1p]("log1p"),
expression[Log2]("log2"),
expression[UnaryMinus]("negative"),
expression[Pi]("pi"),
expression[Pow]("pow"),
expression[Pow]("power"),
expression[Pmod]("pmod"),
expression[UnaryPositive]("positive"),
expression[Rint]("rint"),
expression[Round]("round"),
expression[ShiftLeft]("shiftleft"),
expression[ShiftRight]("shiftright"),
expression[ShiftRightUnsigned]("shiftrightunsigned"),
expression[Signum]("sign"),
expression[Signum]("signum"),
expression[Sin]("sin"),
expression[Sinh]("sinh"),
expression[Tan]("tan"),
expression[Tanh]("tanh"),
expression[ToDegrees]("degrees"),
expression[ToRadians]("radians"),
// aggregate functions
expression[Average]("avg"),
expression[Count]("count"),
expression[First]("first"),
expression[Last]("last"),
expression[Max]("max"),
expression[Min]("min"),
expression[Sum]("sum"),
// string functions
expression[Ascii]("ascii"),
expression[Base64]("base64"),
expression[Concat]("concat"),
expression[ConcatWs]("concat_ws"),
expression[Encode]("encode"),
expression[Decode]("decode"),
expression[FindInSet]("find_in_set"),
expression[FormatNumber]("format_number"),
expression[GetJsonObject]("get_json_object"),
expression[InitCap]("initcap"),
expression[Lower]("lcase"),
expression[Lower]("lower"),
expression[Length]("length"),
expression[Levenshtein]("levenshtein"),
expression[RegExpExtract]("regexp_extract"),
expression[RegExpReplace]("regexp_replace"),
expression[StringInstr]("instr"),
expression[StringLocate]("locate"),
expression[StringLPad]("lpad"),
expression[StringTrimLeft]("ltrim"),
expression[FormatString]("format_string"),
expression[FormatString]("printf"),
expression[StringRPad]("rpad"),
expression[StringRepeat]("repeat"),
expression[StringReverse]("reverse"),
expression[StringTrimRight]("rtrim"),
expression[SoundEx]("soundex"),
expression[StringSpace]("space"),
expression[StringSplit]("split"),
expression[Substring]("substr"),
expression[Substring]("substring"),
expression[SubstringIndex]("substring_index"),
expression[StringTranslate]("translate"),
expression[StringTrim]("trim"),
expression[UnBase64]("unbase64"),
expression[Upper]("ucase"),
expression[Unhex]("unhex"),
expression[Upper]("upper"),
// datetime functions
expression[AddMonths]("add_months"),
expression[CurrentDate]("current_date"),
expression[CurrentTimestamp]("current_timestamp"),
expression[DateDiff]("datediff"),
expression[DateAdd]("date_add"),
expression[DateFormatClass]("date_format"),
expression[DateSub]("date_sub"),
expression[DayOfMonth]("day"),
expression[DayOfYear]("dayofyear"),
expression[DayOfMonth]("dayofmonth"),
expression[FromUnixTime]("from_unixtime"),
expression[FromUTCTimestamp]("from_utc_timestamp"),
expression[Hour]("hour"),
expression[LastDay]("last_day"),
expression[Minute]("minute"),
expression[Month]("month"),
expression[MonthsBetween]("months_between"),
expression[NextDay]("next_day"),
expression[Quarter]("quarter"),
expression[Second]("second"),
expression[ToDate]("to_date"),
expression[ToUTCTimestamp]("to_utc_timestamp"),
expression[TruncDate]("trunc"),
expression[UnixTimestamp]("unix_timestamp"),
expression[WeekOfYear]("weekofyear"),
expression[Year]("year"),
// collection functions
expression[Size]("size"),
expression[SortArray]("sort_array"),
expression[ArrayContains]("array_contains"),
// misc functions
expression[Crc32]("crc32"),
expression[Md5]("md5"),
expression[Sha1]("sha"),
expression[Sha1]("sha1"),
expression[Sha2]("sha2"),
expression[SparkPartitionID]("spark_partition_id"),
expression[InputFileName]("input_file_name")
)
val builtin: FunctionRegistry = {
val fr = new SimpleFunctionRegistry
expressions.foreach { case (name, (info, builder)) => fr.registerFunction(name, info, builder) }
fr
}
/** See usage above. */
def expression[T <: Expression](name: String)
(implicit tag: ClassTag[T]): (String, (ExpressionInfo, FunctionBuilder)) = {
// See if we can find a constructor that accepts Seq[Expression]
val varargCtor = Try(tag.runtimeClass.getDeclaredConstructor(classOf[Seq[_]])).toOption
val builder = (expressions: Seq[Expression]) => {
if (varargCtor.isDefined) {
// If there is an apply method that accepts Seq[Expression], use that one.
Try(varargCtor.get.newInstance(expressions).asInstanceOf[Expression]) match {
case Success(e) => e
case Failure(e) => throw new AnalysisException(e.getMessage)
}
} else {
// Otherwise, find an ctor method that matches the number of arguments, and use that.
val params = Seq.fill(expressions.size)(classOf[Expression])
val f = Try(tag.runtimeClass.getDeclaredConstructor(params : _*)) match {
case Success(e) =>
e
case Failure(e) =>
throw new AnalysisException(s"Invalid number of arguments for function $name")
}
Try(f.newInstance(expressions : _*).asInstanceOf[Expression]) match {
case Success(e) => e
case Failure(e) => throw new AnalysisException(e.getMessage)
}
}
}
val clazz = tag.runtimeClass
val df = clazz.getAnnotation(classOf[ExpressionDescription])
if (df != null) {
(name,
(new ExpressionInfo(clazz.getCanonicalName, name, df.usage(), df.extended()),
builder))
} else {
(name, (new ExpressionInfo(clazz.getCanonicalName, name), builder))
}
}
}
|
practice-vishnoi/dev-spark-1
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
|
Scala
|
apache-2.0
| 10,766 |
package org.scalaide.refactoring.internal
package source
import scala.tools.refactoring.implementations
import org.eclipse.ltk.ui.refactoring.RefactoringWizardPage
import org.scalaide.core.internal.jdt.model.ScalaSourceFile
import org.scalaide.core.internal.statistics.Features.IntroduceProductNTrait
import org.scalaide.refactoring.internal.RefactoringExecutorWithWizard
import ui.IntroduceProductNTraitConfigurationPageGenerator
/**
* This refactoring implements the ProductN trait for a class.
* Given N selected class parameters this refactoring generates
* the methods needed to implement the ProductN trait. This includes
* implementations for hashCode and equals.
* @see GenerateHashcodeAndEquals
*/
class IntroduceProductNTrait extends RefactoringExecutorWithWizard {
def createRefactoring(selectionStart: Int, selectionEnd: Int, file: ScalaSourceFile) = new IntroduceProductNTraitRefactoring(selectionStart, selectionEnd, file)
class IntroduceProductNTraitRefactoring(start: Int, end: Int, file: ScalaSourceFile)
extends ClassParameterDrivenIdeRefactoring(IntroduceProductNTrait, "Introduce ProductN trait", start, end, file) with IntroduceProductNTraitConfigurationPageGenerator {
val refactoring = withCompiler { c =>
new implementations.IntroduceProductNTrait {
val global = c
}
}
override private[source] def configPage(prepResult: refactoring.PreparationResult): RefactoringWizardPage =
new IntroduceProductNTraitConfigurationPage(
prepResult,
selectedClassParamNames_=,
callSuper_=,
keepExistingEqualityMethods_=)
}
}
|
dragos/scala-ide
|
org.scala-ide.sdt.core/src/org/scalaide/refactoring/internal/source/IntroduceProductNTrait.scala
|
Scala
|
bsd-3-clause
| 1,628 |
package org.geoscript.feature
/**
* Utilities for working with [[org.geoscript.feature.Feature]] in a typesafe way.
*
* [[org.geoscript.feature.Feature]] is defined in terms of java.lang.Object and
* requires casting to use. The classes in this package provide some
* convenience around doing the casting - in particular, we define a trait
* [[Fields]] which can be used to retrieve and update fields from and to
* features.
*
* A ``Fields`` may be constructed from a name and a type. The Fields then provides
* an ``unapply`` method for extracting values from features, and an update
* method for updating a feature (in place.) This enables pattern-matching with
* fields instances, and use of scala's syntactic sugar for updating
* collections. (By convention, fields instances should have names with an
* initial capital for use with pattern matching.)
*
* {{{
* val feature: Feature
* val Title: Fields[String] = "title".of[String]
* Title.unapply(feature): Option[String]
* val Title(t) = feature
* Title.update(feature, "Grand Poobah")
* Title(feature) = "Grand Poobah"
* }}}
*
* Fields instances may be combined by use of the ``~`` operator. In this case,
* the primitive values used with the Field must also be combined or
* deconstructed using ``~``.
* {{{
* val Record: Fields[String ~ Int ~ String] =
* "title".of[String] ~ "releasedate".of[Int] ~ "artist".of[String]
* val Record(title ~ releaseDate ~ artist) = feature
* Record(feature) = ("The White Album" ~ 1968 ~ "The Beatles")
* }}}
*
* A ``Fields`` also provides the ``mkSchema`` method for creating a
* [[org.geoscript.feature.Schema]]. Since a ``Schema`` requires a name and any
* geometry fields must specify a [[org.geoscript.projection.Projection]], these
* must be passed in to ``mkSchema``.
* {{{
* val Place = "name".of[String] ~ "loc".of[Geometry]
* val schema = Place.mkSchema("places", LatLon)
* }}}
*
* It is possible to create Features instead of modifying them. However, a
* Schema is required. The ``factoryForSchema`` method tests a schema for
* compatibility with a Fields and produces a feature factory function if the
* schema is compatible.
*
* {{{
* val placeSchema: Schema
* Place.factoryForSchema(placeSchema) match {
* case Some(mkPlace) => mkPlace("Library" ~ Point(1,2))
* case None => sys.error("The datastore is not compatible with place features")
* }
* }}}
*
* Finally, the ``schemaAndFactory`` method can be used to create a compatible
* schema and return it along with the feature factory. It takes the same
* inputs as the ``mkSchema`` method.
*
* {{{
* val (schema, mkPlace) = Place.schemaAndFactory("name", LatLon)
* }}}
*/
package object builder {
/**
* Provides syntactic sugar for combining values into instances of the ``~``
* class.
*
* @see [[org.geoscript.feature.builder]]
*/
implicit class Appendable[A](a: A) {
def ~ [B](b: B): (A ~ B) = new ~ (a, b)
}
/**
* Provides syntactic sugar for creating Fields instances.
*
* @see [[org.geoscript.feature.builder]]
*/
implicit class FieldSetBuilder(val name: String) extends AnyVal {
def of[T : Manifest]: Fields[T] = {
val clazz = implicitly[Manifest[T]].runtimeClass.asInstanceOf[Class[T]]
new NamedField(name, clazz)
}
}
}
package builder {
/**
* A Fields represents one or more fields that features may have, and provides
* facilities for retrieving and updating those fields in features.
*
* @see [[org.geoscript.feature.builder]]
*/
sealed trait Fields[T] {
def conformsTo(schema: Schema): Boolean
def fields: Seq[Field]
def values(t: T): Seq[AnyRef]
def unapply(feature: Feature): Option[T]
def update(feature: Feature, value: T): Unit
final
def schemaAndFactory
(name: String,
proj: org.geoscript.projection.Projection,
schemaFactory: org.opengis.feature.`type`.FeatureTypeFactory = schemaFactory,
featureFactory: org.opengis.feature.FeatureFactory = featureFactory)
: (Schema, T => Feature) = {
val schema = mkSchema(name, proj, schemaFactory)
(schema, factoryForSchema(schema, featureFactory).get)
}
final
def ~[U](that: Fields[U]): Fields[T ~ U] =
new ChainedFields[T, U](this, that)
final
def factoryForSchema
(schema: Schema,
featureFactory: org.opengis.feature.FeatureFactory = featureFactory)
: Option[T => Feature] =
if (conformsTo(schema))
Some(unsafeFactory(schema, featureFactory))
else
None
final
def mkSchema
(name: String,
proj: org.geoscript.projection.Projection,
schemaFactory: org.opengis.feature.`type`.FeatureTypeFactory = schemaFactory)
: Schema = {
val builder = new SchemaBuilder(schemaFactory)
import builder._
import org.geoscript.geometry.Geometry
Schema(
name,
fields = this.fields.map {
case Field(name, binding) if classOf[Geometry].isAssignableFrom(binding) =>
GeoField(name, binding, proj)
case f => f
})
}
private[builder]
def unsafeFactory
(schema: Schema,
featureFactory: org.opengis.feature.FeatureFactory)
: T => Feature = {
t =>
val feature = featureFactory.createSimpleFeature(values(t).toArray, schema, "")
update(feature, t)
feature
}
}
private[builder]
class ChainedFields[T, U](
tFields: Fields[T],
uFields: Fields[U]
) extends Fields[T ~ U] {
def conformsTo(schema: Schema): Boolean =
(tFields conformsTo schema) && (uFields conformsTo schema)
def fields = tFields.fields ++ uFields.fields
def values(x: T ~ U): Seq[AnyRef] = {
val (t ~ u) = x
tFields.values(t) ++ uFields.values(u)
}
def update(feature: Feature, value: T ~ U) {
val (t ~ u) = value
tFields(feature) = t
uFields(feature) = u
}
def unapply(feature: Feature): Option[T ~ U] =
for {
t <- tFields.unapply(feature)
u <- uFields.unapply(feature)
} yield t ~ u
}
private[builder]
class NamedField[T](name: String, clazz: Class[T]) extends Fields[T] {
def conformsTo(schema: Schema): Boolean = schema.fields.exists(field =>
field.name == name && field.binding.isAssignableFrom(clazz))
def fields = Seq(schemaBuilder.Field(name, clazz))
def values(t: T): Seq[AnyRef] = Seq(t.asInstanceOf[AnyRef])
def update(feature: Feature, value: T) {
feature.setAttribute(name, value)
}
def unapply(feature: Feature): Option[T] = {
val att = feature.getAttribute(name)
if (att == null || clazz.isInstance(att))
Some(clazz.cast(att))
else
None
}
}
/**
* A simple container for pairs of values, with nice syntax for destructuring
* nested pairs.
*/
case class ~[A,B](a: A, b: B)
}
|
dwins/geoscript.scala
|
geoscript/src/main/scala/feature/builder/Builder.scala
|
Scala
|
mit
| 6,930 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.ahc
import java.util.concurrent.atomic.AtomicBoolean
import scala.util.control.NonFatal
import io.gatling.commons.util.Throwables._
import io.gatling.http.action.sync.HttpTx
import io.gatling.http.response.Response
import org.asynchttpclient.{ AsyncHandler => AhcAsyncHandler, _ }
import org.asynchttpclient.AsyncHandler.State
import org.asynchttpclient.AsyncHandler.State._
import org.asynchttpclient.netty.request.NettyRequest
import com.typesafe.scalalogging._
import io.netty.handler.codec.http.HttpHeaders
object AsyncHandler extends StrictLogging {
private val DebugEnabled = logger.underlying.isDebugEnabled
private val InfoEnabled = logger.underlying.isInfoEnabled
}
/**
* This class is the AsyncHandler that AsyncHttpClient needs to process a request's response
*
* It is part of the HttpRequestAction
*
* @constructor constructs a Gatling AsyncHandler
* @param tx the data about the request to be sent and processed
* @param responseProcessor the responseProcessor
*/
class AsyncHandler(tx: HttpTx, responseProcessor: ResponseProcessor) extends AhcAsyncHandler[Unit] with LazyLogging {
private val responseBuilder = tx.responseBuilderFactory(tx.request.ahcRequest)
private val init = new AtomicBoolean
private val done = new AtomicBoolean
// [fl]
//
//
//
//
// [fl]
private[http] def start(): Unit =
if (init.compareAndSet(false, true)) {
responseBuilder.updateStartTimestamp()
// [fl]
//
// [fl]
}
// [fl]
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
// [fl]
override def onRequestSend(request: NettyRequest): Unit = {
responseBuilder.doReset()
if (AsyncHandler.DebugEnabled) {
responseBuilder.setNettyRequest(request.asInstanceOf[NettyRequest])
}
}
override def onRetry(): Unit =
if (!done.get) responseBuilder.markReset()
override def onStatusReceived(status: HttpResponseStatus): State = {
if (!done.get) responseBuilder.accumulate(status)
CONTINUE
}
override def onHeadersReceived(headers: HttpHeaders): State = {
if (!done.get) responseBuilder.accumulate(headers)
CONTINUE
}
override def onTrailingHeadersReceived(headers: HttpHeaders): State = {
if (!done.get) responseBuilder.accumulate(headers)
CONTINUE
}
override def onBodyPartReceived(bodyPart: HttpResponseBodyPart): State = {
if (!done.get) responseBuilder.accumulate(bodyPart)
CONTINUE
}
private def withResponse(f: Response => Unit): Unit =
if (done.compareAndSet(false, true)) {
try {
val response = responseBuilder.build
f(response)
} catch {
case NonFatal(t) => sendOnThrowable(responseBuilder.buildSafeResponse, t)
}
}
override def onCompleted: Unit =
withResponse { response =>
try {
responseProcessor.onCompleted(tx, response)
} catch {
case NonFatal(t) => sendOnThrowable(response, t)
}
}
override def onThrowable(throwable: Throwable): Unit =
withResponse { response =>
responseBuilder.updateEndTimestamp()
sendOnThrowable(response, throwable)
}
private def sendOnThrowable(response: Response, throwable: Throwable): Unit = {
val errorMessage = throwable.detailedMessage
if (AsyncHandler.DebugEnabled)
logger.debug(s"Request '${tx.request.requestName}' failed for user ${tx.session.userId}", throwable)
else if (AsyncHandler.InfoEnabled)
logger.info(s"Request '${tx.request.requestName}' failed for user ${tx.session.userId}: $errorMessage")
responseProcessor.onThrowable(tx, response, errorMessage)
}
}
|
wiacekm/gatling
|
gatling-http/src/main/scala/io/gatling/http/ahc/AsyncHandler.scala
|
Scala
|
apache-2.0
| 4,321 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.expression.sources
import scala.reflect.internal.util.SourceFile
import org.scalaide.core.compiler.IScalaPresentationCompiler
import org.scalaide.logging.HasLogger
object Imports extends HasLogger with SPCIntegration {
type ScopedImports = Seq[List[String]] // List for easier AST integration
private val ImportsSearchTimeoutInMillis = 100
private def reportProblemWithObtainingImports(reason: String): ScopedImports = {
logger.warn(s"No imports due to: $reason")
Nil
}
private case class ImportExtractor(pc: IScalaPresentationCompiler,
toLineNumber: Int) {
class ImportTraverser
extends pc.Traverser
with (pc.Tree => ScopedImports) {
// store trees that create new scope
private var path: Seq[pc.Tree] = Nil
// imports per given scope
private var importsMap: Map[Option[pc.Tree], List[pc.Import]] = Map().withDefault(_ => Nil)
// set to true to stop traversing (for whole tree not just current subtree)
private var done: Boolean = false
override def traverse(tree: pc.Tree): Unit = {
// Skip if already found in different subtree
if (!done) {
tree.pos match {
case pc.NoPosition =>
case pos if pos.line == toLineNumber =>
//we reach desired line - end searching
done = true
case _ =>
}
// Check it again - required line might be found in previous block
if (!done) {
tree match {
case _: pc.Function | _: pc.Block | _: pc.CaseDef =>
//add new scope
path = tree +: path
case importTree: pc.Import =>
//add new import to scope
val currentScopeImports = importsMap(path.headOption) :+ importTree
importsMap = importsMap + (path.headOption -> currentScopeImports)
case _ =>
}
super.traverse(tree)
}
}
}
/** Stringify select using names instead of toString */
private def stringifySelect(tree: pc.Tree): String = tree match {
case pc.Ident(name) => name.toString
case pc.Select(on, name) => stringifySelect(on) + "." + name
case other => throw new RuntimeException(s"We don't support manual to string conversion for: $other")
}
/** Manually convert import to string */
def manuallyStringifyImport(importTree: pc.Import) ={
val originalRoot = stringifySelect(importTree.expr)
// Selectors are hard to stringify.
// Fake import is created just to obtain original stringified selectors
val fakeRoot = pc.Ident(pc.TermName("fakeRoot"))
val fakeImport = pc.Import(fakeRoot, importTree.selectors)
val stringifiedSelectors = fakeImport.toString().drop("import fakeRoot.".size)
s"import $originalRoot.$stringifiedSelectors"
}
/** Create string from tree. Works also with erroneous trees */
def stringifyImport(importTree: pc.Import): String =
if (importTree.expr.isErroneous) manuallyStringifyImport(importTree)
else importTree.toString()
override def apply(tree: pc.Tree): ScopedImports = {
traverse(tree)
val scopes = None +: path.map(Option.apply)
scopes.flatMap(importsMap.get).map(_.map(stringifyImport))
}
}
def importForFile(tree: pc.Tree): ScopedImports =
new ImportTraverser().apply(tree)
}
private def getImports(pc: IScalaPresentationCompiler, sourceFile: SourceFile, line: Int):ScopedImports = {
pc.askParsedEntered(sourceFile, true).get(ImportsSearchTimeoutInMillis) match {
case None => reportProblemWithObtainingImports("timeout")
case Some(Right(error)) => reportProblemWithObtainingImports(error.getMessage)
case Some(Left(tree)) =>
pc.asyncExec {
val extractor = new ImportExtractor(pc, line)
extractor.importForFile(tree.asInstanceOf[extractor.pc.Tree])
}.get(ImportsSearchTimeoutInMillis) match {
case None => reportProblemWithObtainingImports("timeout")
case Some(Right(error)) => reportProblemWithObtainingImports(error.getMessage)
case Some(Left(imports)) => imports
}
}
}
def importsFromCurrentStackFrame: ScopedImports = forCurrentStackFrame(getImports, reportProblemWithObtainingImports)
}
|
stephenh/scala-ide
|
org.scala-ide.sdt.debug.expression/src/org/scalaide/debug/internal/expression/sources/Imports.scala
|
Scala
|
bsd-3-clause
| 4,504 |
package org.skycastle.core.design
import com.jme3.scene.Spatial
import org.skycastle.util.grid.{GridSize, GridPos}
import org.skycastle.util.octree.GridStorage
trait Change {
def canDo: ChangeResult
def doChange
def undoChange
}
case class ChangeImpl(_canDo: () => ChangeResult,
_doChange: () => Unit,
_undoChange: () => Unit) extends Change {
def canDo = _canDo()
def doChange = _doChange()
def undoChange = _undoChange()
}
trait HistoryDocument[T <: Change] {
private var undoQueue: List[T] = Nil
private var redoQueue: List[T] = Nil
def canDoChange(change: T): Boolean = change.canDo.isSuccess
def doChange(change: T): ChangeResult = {
val result = change.canDo
result.onSuccess{
change.doChange
undoQueue = change :: undoQueue
}
result
}
def undo(): Boolean = {
if (undoQueue.isEmpty) false
else {
val change = undoQueue.head
undoQueue = undoQueue.tail
change.undoChange
redoQueue = change :: redoQueue
true
}
}
def redo(): Boolean = {
if (redoQueue.isEmpty) false
else {
val change = redoQueue.head
redoQueue= redoQueue.tail
change.doChange
undoQueue ::= change
true
}
}
}
trait DesignChange extends Change
/**
* Contains a design for some structure.
* The design is made up of Parts.
* Provides a full set of editing operations, including undo.
*/
class Design extends HistoryDocument[DesignChange] {
private val gridStorage = new GridStorage[Part](GridSize(0))
private var parts: List[Part] = Nil
private var selectedParts: Set[Part] = Set()
case class AddChange(part: Part) extends DesignChange {
def canDo = checkCanAdd(part)
def doChange = doAddPart(part)
def undoChange = doRemovePart(part)
}
case class RemoveChange(part: Part) extends DesignChange {
def canDo = checkCanRemove(part)
def doChange = doRemovePart(part)
def undoChange = doAddPart(part)
}
def addPart(part: Part): ChangeResult = {
doChange(AddChange(part))
}
private def checkCanAdd(part: Part): ChangeResult = {
if (part == null) return ChangeNotPossible("Added part should not be null")
if (parts.contains(part)) return ChangeNotPossible("Can't add the same part " + part + " twice.")
// Check occupation
val occupants: List[Part] = part.occupiedCells.toList flatMap {(c: GridPos) => gridStorage.getDataAt(c)}
if (!occupants.isEmpty) return AlreadyOccupied(occupants)
// Check connections
// TODO
// Check support?
// TODO
ChangeSuccess
}
private def checkCanRemove(part: Part): ChangeResult = {
if (part == null) ChangeNotPossible("Removed part should not be null")
else if (!parts.contains(part)) ChangeNotPossible("Part " + part + " not found.")
else ChangeSuccess
}
private def doAddPart(part: Part) {
parts = part :: parts
part.occupiedCells.toList.foreach(c => gridStorage.put(c, part))
}
private def doRemovePart(part: Part) {
parts = parts.filterNot(_ == part)
part.occupiedCells.toList.foreach(c => gridStorage.remove(c))
}
def selectPart(part: Part) {
if (part == null) throw new IllegalArgumentException("Part should not be null")
if (!parts.contains(part)) throw new IllegalArgumentException("Can't find the part " + part + ".")
selectedParts += part
}
def unSelectPart(part: Part) {}
def togglePartSelection(part: Part) {}
def selectAll() {}
def selectNone() {}
def groupSelectedParts() {}
def unGroupSelectedGroups() {}
// TODO: Select all in volume
def deletePart(part: Part) {}
def getView(): Spatial = {null}
}
case class AlreadyOccupied(occupants: List[Part]) extends ChangeFailure
case class MismatchingConnections() extends ChangeFailure
case object MissingSupport extends ChangeFailure
|
zzorn/skycastle
|
src/main/scala/org/skycastle/core/design/Design.scala
|
Scala
|
gpl-2.0
| 3,879 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.catalog
import java.util.{HashMap => JHashMap, Map => JMap}
import java.lang.{Long => JLong}
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.table.api.TableSchema
import org.apache.flink.table.plan.stats.TableStats
/**
* Defines a table in an [[ExternalCatalog]].
*
* @param identifier Identifier of the table (database name and table name)
* @param tableType Table type, e.g csv, hbase, kafka
* @param schema Schema of the table (column names and types)
* @param properties Properties of the table
* @param stats Statistics of the table
* @param comment Comment of the table
* @param createTime Create timestamp of the table
* @param lastAccessTime Timestamp of last access of the table
*/
case class ExternalCatalogTable(
identifier: TableIdentifier,
tableType: String,
schema: TableSchema,
properties: JMap[String, String] = new JHashMap(),
stats: TableStats = null,
comment: String = null,
createTime: JLong = System.currentTimeMillis,
lastAccessTime: JLong = -1L)
/**
* Identifier for a catalog table.
*
* @param database Database name
* @param table Table name
*/
case class TableIdentifier(
database: String,
table: String) {
override def toString: String = s"$database.$table"
}
|
DieBauer/flink
|
flink-libraries/flink-table/src/main/scala/org/apache/flink/table/catalog/ExternalCatalogTable.scala
|
Scala
|
apache-2.0
| 2,233 |
package geek.lawsof.physics.lib.block
import java.util
import cpw.mods.fml.common.registry.GameRegistry
import geek.lawsof.physics.Reference
import geek.lawsof.physics.lib.item.ItemDescriptor
import geek.lawsof.physics.lib.item.traits.whiteColor
import net.minecraft.block.Block
import net.minecraft.client.renderer.texture.IIconRegister
import net.minecraft.entity.Entity
import net.minecraft.entity.player.EntityPlayer
import net.minecraft.item.{EnumRarity, ItemBlock, ItemStack}
import net.minecraft.util.IIcon
import net.minecraft.world.World
/**
* Created by anshuman on 15-07-2014.
*/
class ItemBlockBase(b: Block) extends ItemBlock(b) {
setMaxDamage(0)
val block = b.asInstanceOf[BlockBase]
def items = block.blocks.map(o => (o._1, o._2.item))
override def getMetadata(dmg: Int): Int = dmg
def getInternalName(dmg: Int = 0) = block.getInternalName(newItemStack(dmg = dmg))
override def getUnlocalizedName(stack: ItemStack): String = "block." + getInternalName(stack.getItemDamage)
def newItemStack(size: Int = 1, dmg: Int = 0) = new ItemStack(this, size, dmg)
def newStack(item: ItemDescriptor, size: Int = 1) = newItemStack(size, getMeta(item))
def getMeta(item: ItemDescriptor) = items.map(_.swap).get(item).get
override def registerIcons(reg: IIconRegister): Unit = {
items.foreach(_._2.registerIcon(reg))
errorIcon = reg.registerIcon(s"${Reference.MOD_ID}:ErrorItem")
}
var errorIcon: IIcon = null
override def getIconFromDamage(dmg: Int): IIcon = items.get(dmg) match {
case Some(item) => item.icon
case None => errorIcon
}
override def getRarity(stack: ItemStack): EnumRarity = items.get(stack.getItemDamage) match {
case Some(item) => item.txtColor.color
case None => whiteColor().color
}
override def addInformation(stack: ItemStack, p_77624_2_ : EntityPlayer, list: util.List[_], p_77624_4_ : Boolean): Unit = items.get(stack.getItemDamage) match {
case Some(item) => item.tooltipInfo(list)
case None => list.asInstanceOf[util.List[String]].add("This Item Is Invalid, This Is Probably Due To Some Corruption Or a Recent Update, Throw This Away, Sorry")
}
override def hasEffect(stack: ItemStack, pass: Int): Boolean = items.get(stack.getItemDamage) match {
case Some(item) => item.shiny
case None => false
}
override def doesContainerItemLeaveCraftingGrid(stack: ItemStack): Boolean = items(stack.getItemDamage).containerStack._2
override def hasContainerItem(stack: ItemStack): Boolean = items(stack.getItemDamage).hasContainer
override def getContainerItem(stack: ItemStack): ItemStack = items(stack.getItemDamage).containerStack._1
override def onCreated(stack: ItemStack, w: World, p: EntityPlayer): Unit = items(stack.getItemDamage).initNBT(stack, w, p)
}
|
GeckoTheGeek42/TheLawsOfPhysics
|
src/main/scala/geek/lawsof/physics/lib/block/ItemBlockBase.scala
|
Scala
|
mit
| 2,788 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.preferences
import org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer
import org.eclipse.jface.preference.BooleanFieldEditor
import org.eclipse.jface.preference.FieldEditorPreferencePage
import org.eclipse.swt.SWT
import org.eclipse.swt.widgets.Composite
import org.eclipse.swt.widgets.Group
import org.eclipse.ui.IWorkbench
import org.eclipse.ui.IWorkbenchPreferencePage
import org.scalaide.debug.internal.ScalaDebugPlugin
import org.scalaide.debug.internal.model.MethodClassifier
class DebuggerPreferencePage extends FieldEditorPreferencePage(FieldEditorPreferencePage.GRID) with IWorkbenchPreferencePage {
import DebuggerPreferencePage._
setPreferenceStore(ScalaDebugPlugin.plugin.getPreferenceStore)
private val groups = scala.collection.mutable.MutableList[Group]()
override def createFieldEditors(): Unit = {
createFiltersSection()
createHotCodeReplaceSection()
}
override def init(workbench: IWorkbench): Unit = {}
override def dispose: Unit = {
groups.foreach(_.dispose())
super.dispose()
}
private def createFiltersSection(): Unit = {
val filtersSection = createGroupComposite("Configured step filters", getFieldEditorParent())
addBooleanField(FILTER_SYNTHETIC, "Filter SYNTHETIC methods", filtersSection)
addBooleanField(FILTER_GETTER, "Filter Scala getters", filtersSection)
addBooleanField(FILTER_SETTER, "Filter Scala setters", filtersSection)
addBooleanField(FILTER_DEFAULT_GETTER, "Filter getters for default parameters", filtersSection)
addBooleanField(FILTER_FORWARDER, "Filter forwarder to trait methods", filtersSection)
}
private def createHotCodeReplaceSection(): Unit = {
val hotCodeReplaceSection = createGroupComposite("Hot Code Replace", getFieldEditorParent())
addBooleanField(HotCodeReplaceEnabled,
"Debug applications with experimental HCR support turned on [the change won't be applied to already running ones]", hotCodeReplaceSection)
addBooleanField(NotifyAboutFailedHcr, "Show a message when hot code replace fails", hotCodeReplaceSection)
addBooleanField(NotifyAboutUnsupportedHcr, "Show a message when hot code replace is not supported by VM", hotCodeReplaceSection)
addBooleanField(PerformHcrForFilesContainingErrors, "Replace class files containing compilation errors", hotCodeReplaceSection)
addBooleanField(DropObsoleteFramesAutomatically,
"When a thread is suspended, try to drop automatically frames recognised by VM as obsolete", hotCodeReplaceSection)
addBooleanField(AllowToDropObsoleteFramesManually, "Ignore obsolete state when checking if drop to frame can be performed", hotCodeReplaceSection)
}
private def addBooleanField(name: String, label: String, group: Group): Unit =
addField(new BooleanFieldEditor(name, label, group))
private def createGroupComposite(text: String, parent: Composite, style: Int = SWT.NONE): Group = {
val g = new Group(parent, style)
g.setText(text)
groups += g
g
}
}
object DebuggerPreferencePage {
import MethodClassifier._
val BASE = ScalaDebugPlugin.id + "."
val BASE_FILTER = BASE + "filter."
val FILTER_SYNTHETIC = BASE_FILTER + Synthetic
val FILTER_GETTER = BASE_FILTER + Getter
val FILTER_SETTER = BASE_FILTER + Setter
val FILTER_DEFAULT_GETTER = BASE_FILTER + DefaultGetter
val FILTER_FORWARDER = BASE_FILTER + Forwarder
val HotCodeReplaceEnabled = "org.scala-ide.sdt.debug.hcr.enabled"
val NotifyAboutFailedHcr = "org.scala-ide.sdt.debug.hcr.notifyFailed"
val NotifyAboutUnsupportedHcr = "org.scala-ide.sdt.debug.hcr.notifyUnsupported"
val PerformHcrForFilesContainingErrors = "org.scala-ide.sdt.debug.hcr.performForFilesContainingErrors"
val DropObsoleteFramesAutomatically = "org.scala-ide.sdt.debug.hcr.dropObsoleteFramesAutomatically"
val AllowToDropObsoleteFramesManually = "org.scala-ide.sdt.debug.hcr.allowToDropObsoleteFramesManually"
}
class DebugerPreferencesInitializer extends AbstractPreferenceInitializer {
import DebuggerPreferencePage._
override def initializeDefaultPreferences(): Unit = {
val store = ScalaDebugPlugin.plugin.getPreferenceStore
store.setDefault(FILTER_SYNTHETIC, true)
store.setDefault(FILTER_GETTER, true)
store.setDefault(FILTER_SETTER, true)
store.setDefault(FILTER_DEFAULT_GETTER, true)
store.setDefault(FILTER_FORWARDER, true)
store.setDefault(HotCodeReplaceEnabled, false)
store.setDefault(NotifyAboutFailedHcr, true)
store.setDefault(NotifyAboutUnsupportedHcr, true)
store.setDefault(PerformHcrForFilesContainingErrors, false)
store.setDefault(DropObsoleteFramesAutomatically, true)
store.setDefault(AllowToDropObsoleteFramesManually, true)
}
}
|
Kwestor/scala-ide
|
org.scala-ide.sdt.debug/src/org/scalaide/debug/internal/preferences/DebuggerPreferencePage.scala
|
Scala
|
bsd-3-clause
| 4,815 |
package katas.scala.orderbook.v5
import scala.xml.XML
import java.io.File
import org.xml.sax.helpers.DefaultHandler
import org.xml.sax.Attributes
import akka.actor.{Actor, ActorRef, ActorSystem, Props}
import scala.collection._
import immutable.TreeMap
import katas.scala.orderbook.v5.XmlCommandReader.ReadFrom
import java.util.concurrent.{SynchronousQueue, TimeUnit}
import scala.concurrent.Await
import scala.concurrent.duration.Duration
/**
* User: dima
* Date: 08/04/2012
*/
object Main {
def main(args: Array[String]) {
val system = ActorSystem("orderBook")
val report = new SynchronousQueue[CharSequence]()
val reportBuilder = system.actorOf(Props(new ReportBuilder(report)))
val orderRouter = system.actorOf(Props(new OrderRouter(reportBuilder)))
val orderRegistry = system.actorOf(Props(new OrderRegistry(orderRouter)))
val commandReader = system.actorOf(Props(new XmlCommandReader(orderRegistry)))
commandReader ! XmlCommandReader.ReadFrom("/Users/dima/IdeaProjects/katas/src/main/scala/ru/orderbook/orders2.xml")
println(report.poll(10, TimeUnit.SECONDS))
Await.ready(system.terminate(), Duration.Inf)
}
}
// common messages
case object StartOfStream
case object EndOfStream
// sent from XmlCommandReader to OrderRegistry
case class Add(id: Int, symbol: String, isBuy: Boolean, price: Int, size: Int)
case class Edit(id: Int, price: Int, size: Int)
case class Remove(id: Int)
// sent from OrderRegistry to OrderRouter and eventually OrderBooks
case class AddOrder(order: Order)
case class RemoveOrder(order: Order)
case class UpdateOrder(oldOrder: Order, newOrder: Order)
// messages for generating report
case object ReportRequest
case class ExpectedReportSize(size: Int)
case class OrderBookReport(symbol: String, bidSide: immutable.Map[Int, PriceLevel], askSide: immutable.Map[Int, PriceLevel])
case class Order(id: Int, symbol: String, isBuy: Boolean, price: Int, size: Int)
case class PriceLevel(price: Int, size: Int, count: Int)
class ReportBuilder(reportOutput: SynchronousQueue[CharSequence]) extends Actor {
private var expectedReportsSize: Int = 0
private var reports: TreeMap[String, OrderBookReport] = TreeMap()
def receive = {
case ExpectedReportSize(size) => expectedReportsSize = size
case report@OrderBookReport(symbol, _, _) =>
reports = reports.updated(symbol, report)
if (reports.size == expectedReportsSize) {
reportOutput.add(format(reports))
}
}
private def format(reports: TreeMap[String, OrderBookReport]): CharSequence = {
reports.values.foldLeft("") { (acc, report) =>
acc + "\\n" + report.symbol + "\\n" +
"bidSide\\n" + format(report.bidSide) +
"askSide\\n" + format(report.askSide)
}
}
private def format(bookSide: immutable.Map[Int, PriceLevel]): CharSequence = {
bookSide.values.foldLeft("") { (acc, level) =>
acc + "\\tprice = " + level.price + ", size = " + level.size + ", count = " + level.count + "\\n"
}
}
}
class OrderBook(symbol: String) extends Actor {
private var bidSide: immutable.Map[Int, PriceLevel] = new TreeMap()(Ordering.Int.reverse).withDefault{ PriceLevel(_, 0, 0) }
private var askSide: immutable.Map[Int, PriceLevel] = new TreeMap()(Ordering.Int).withDefault{ PriceLevel(_, 0, 0) }
def receive = {
case AddOrder(order) => add(order)
case RemoveOrder(order) => remove(order)
case UpdateOrder(oldOrder, newOrder) =>
remove(oldOrder)
add(newOrder)
case ReportRequest => sender ! OrderBookReport(symbol, bidSide, askSide)
case msg@_ => println("OrderBook doesn't understand: " + msg)
}
private def add(order: Order) {
updateBookSideFor(order, { (bookSide, level) =>
bookSide.updated(level.price, PriceLevel(level.price, level.size + order.size, level.count + 1))
})
}
private def remove(order: Order) {
updateBookSideFor(order, { (bookSide, level) =>
if (level.count <= 1) bookSide - level.price
else bookSide.updated(level.price, PriceLevel(level.price, level.size - order.size, level.count - 1))
})
}
private def updateBookSideFor(order: Order, f: (immutable.Map[Int, PriceLevel], PriceLevel) => immutable.Map[Int, PriceLevel]) {
if (order.isBuy)
bidSide = f(bidSide, bidSide(order.price))
else
askSide = f(askSide, askSide(order.price))
}
}
class OrderRouter(reportBuilder: ActorRef) extends Actor {
private val orderBooks: mutable.Map[String, ActorRef] = mutable.Map()
def receive = {
case msg@AddOrder(order) => orderBookFor(order) ! msg
case msg@UpdateOrder(oldOrder, _) => orderBookFor(oldOrder) ! msg
case msg@RemoveOrder(order) => orderBookFor(order) ! msg
case EndOfStream =>
reportBuilder ! ExpectedReportSize(orderBooks.size)
orderBooks.values.foreach{_ ! ReportRequest}
case msg : OrderBookReport => reportBuilder ! msg
case msg@_ => println("OrderRouter doesn't understand :" + msg)
}
def orderBookFor(order: Order): ActorRef = {
orderBooks.getOrElseUpdate(order.symbol, { context.actorOf(Props(new OrderBook(order.symbol))) })
}
}
class OrderRegistry(orderRouter: ActorRef) extends Actor {
private var orders: mutable.Map[Int, Order] = mutable.Map()
def receive = {
case Add(id, symbol, isBuy, price, size) =>
val order = Order(id, symbol, isBuy, price, size)
orders = orders.updated(id, order)
orderRouter ! AddOrder(order)
case Edit(id, price, size) =>
val order = orders(id)
val newOrder = Order(id, order.symbol, order.isBuy, price, size)
orders(id) = newOrder
orderRouter ! UpdateOrder(order, newOrder)
case Remove(id) =>
orderRouter ! RemoveOrder(orders.remove(id).get)
case msg@_ => orderRouter ! msg
}
}
object XmlCommandReader {
case class ReadFrom(filename: String)
}
class XmlCommandReader(orderRegistry: ActorRef) extends Actor {
def receive = {
case ReadFrom(filename) =>
// use separate thread so that not to use actors thread-pool
new Thread(new Runnable() {
def run() {
parse(filename)
}
}, "XML reading thread").start()
}
private def parse(filename: String) {
XML.parser.parse(new File(filename), new DefaultHandler {
override def startElement(uri: String, localName: String, qName: String, attributes: Attributes) {
def valueOf(name: String) = attributes.getValue(name)
val command = qName match {
case "add" => Add(
valueOf("order-id").toInt,
valueOf("symbol"),
valueOf("type") == "buy",
valueOf("price").toInt,
valueOf("quantity").toInt
)
case "edit" => Edit(
valueOf("order-id").toInt,
valueOf("price").toInt,
valueOf("quantity").toInt
)
case "remove" => Remove(valueOf("order-id").toInt)
case _ => () // ignore
}
if (command != ()) {
orderRegistry ! command
}
}
override def startDocument() {
orderRegistry ! StartOfStream
}
override def endDocument() {
orderRegistry ! EndOfStream
}
})
}
}
|
dkandalov/katas
|
scala/src/katas/scala/orderbook/v5/Main.scala
|
Scala
|
unlicense
| 7,212 |
package io.buoyant.interpreter
import com.twitter.finagle.util.LoadService
import com.twitter.finagle.{Path, Stack}
import io.buoyant.config.Parser
import io.buoyant.namer.{InterpreterConfig, InterpreterInitializer}
import org.scalatest.FunSuite
class MeshInterpreterInitializerTest extends FunSuite {
test("sanity") {
// ensure it doesn't totally blowup
val _ = MeshInterpreterConfig(Some(Path.read("/whats/in/a")), Some(Path.read("/default")), None, None, None, None)
.newInterpreter(Stack.Params.empty)
}
test("interpreter registration") {
assert(LoadService[InterpreterInitializer]().exists(_.isInstanceOf[MeshInterpreterInitializer]))
}
test("parse config") {
val yaml = s"""|kind: io.l5d.mesh
|dst: /$$/inet/127.1/4321
|root: /default
|tls:
| disableValidation: false
| commonName: "{service}"
| trustCerts:
| - /foo/caCert.pem
| clientAuth:
| certPath: /namerd-cert.pem
| keyPath: /namerd-key.pk8
|""".stripMargin
val mapper = Parser.objectMapper(yaml, Iterable(Seq(MeshInterpreterInitializer)))
val namerd = mapper.readValue[InterpreterConfig](yaml).asInstanceOf[MeshInterpreterConfig]
mapper.writeValueAsString(namerd) // ensure serialization doesn't blow up
assert(namerd.dst == Some(Path.read("/$/inet/127.1/4321")))
assert(namerd.root == Some(Path.read("/default")))
assert(!namerd.disabled)
val tls = namerd.tls.get
assert(tls.disableValidation == Some(false))
assert(tls.commonName == Some("{service}"))
assert(tls.trustCerts == Some(List("/foo/caCert.pem")))
assert(tls.clientAuth.get.certPath == "/namerd-cert.pem")
assert(tls.clientAuth.get.keyPath == "/namerd-key.pk8")
}
}
|
linkerd/linkerd
|
interpreter/mesh/src/test/scala/io/buoyant/interpreter/MeshInterpreterInitializerTest.scala
|
Scala
|
apache-2.0
| 1,900 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.accounts.frs102.BoxesFixture
import uk.gov.hmrc.ct.box.CtValidation
class ACQ5021Spec extends WordSpec with Matchers with BoxesFixture {
"ACQ5021" should {
"for Full Accounts pass validation" when {
"ac42, acq5021 have value" in {
ac42withValue
ac43noValue
acq5022noValue
ACQ5021(Some(false)).validate(boxRetriever) shouldBe empty
}
"ac42, acq5022 have value" in {
ac42withValue
ac43noValue
acq5022withValue
ACQ5021(None).validate(boxRetriever) shouldBe empty
}
"ac43, acq5021 have value" in {
ac42noValue
ac43withValue
acq5022noValue
ACQ5021(Some(false)).validate(boxRetriever) shouldBe empty
}
"ac43, acq5022 have value" in {
ac42noValue
ac43withValue
acq5022withValue
ACQ5021(None).validate(boxRetriever) shouldBe empty
}
"ac42, acq5022, acq5021 have value" in {
ac42withValue
ac43noValue
acq5022withValue
ACQ5021(Some(true)).validate(boxRetriever) shouldBe empty
}
"ac43, acq5022, acq5021 have value" in {
ac42noValue
ac43withValue
acq5022withValue
ACQ5021(Some(true)).validate(boxRetriever) shouldBe empty
}
"ac42, ac43, acq5022, acq5021 have value" in {
ac42withValue
ac43withValue
acq5022withValue
ACQ5021(Some(true)).validate(boxRetriever) shouldBe empty
}
"all no value" in {
ac42noValue
ac43noValue
acq5022noValue
ACQ5021(None).validate(boxRetriever) shouldBe empty
}
}
"for Full Accounts fail validation" when {
val errorAtLeastOne = Set(CtValidation(None,"error.balance.sheet.intangible.assets.one.box.required",None))
val cannotExist = Set(CtValidation(Some("ACQ5021"),"error.ACQ5021.cannot.exist",None))
"ac42 has value and acq5021,acq5022 have no value" in {
ac42withValue
ac43noValue
acq5022noValue
ACQ5021(None).validate(boxRetriever) shouldBe errorAtLeastOne
}
"ac43 has value and acq5021,acq5022 have no value" in {
ac42noValue
ac43withValue
acq5022noValue
ACQ5021(None).validate(boxRetriever) shouldBe errorAtLeastOne
}
"ac42,ac43 have value and acq5021,acq5022 have no value" in {
ac42withValue
ac43withValue
acq5022noValue
ACQ5021(None).validate(boxRetriever) shouldBe errorAtLeastOne
}
"ac42, ac43 has no value and acq5021 has value" in {
ac42noValue
ac43noValue
acq5022noValue
ACQ5021(Some(false)).validate(boxRetriever) shouldBe cannotExist
}
}
}
}
|
hmrc/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/ACQ5021Spec.scala
|
Scala
|
apache-2.0
| 3,480 |
package edu.jhu.hlt.probe.classifier
import de.bwaldvogel.liblinear._
import edu.jhu.hlt.probe._
import java.nio.file._
import scala.collection.JavaConversions._
import scala.collection._
/**
* A simple log-linear model based on the library `liblinear`.
* This is used for fast prototyping.
* @author Tongfei Chen ([email protected]).
* @since 0.4.2
*/
class LogLinearModel[A] private(featureAlphabet: Alphabet, model: Model)
extends (FeatureVector[A] => Int)
{
/** Classifies a feature vector into positive (1) or negative (0). */
def apply(fx: FeatureVector[A]): Int = {
if (score(fx) <= 0.5) 0 else 1
}
/** Returns P(1 | fx). */
def score(fx: FeatureVector[A]): Double = {
val afv = AlphabetizedFeatureVector(featureAlphabet)(fx.groups.toSeq: _*)
val x = afv.pairs.map { t =>
new FeatureNode(t._1, t._2).asInstanceOf[de.bwaldvogel.liblinear.Feature]
}.toArray.sortBy(_.getIndex)
val scores = Array.ofDim[Double](2)
val _ = Linear.predictProbability(model, x, scores)
scores(model.getLabels.indexOf(1))
}
/** Returns the list of non-zero parameters (feature weights) in descending order. */
def parameters = {
val w = model.getFeatureWeights
(1 until featureAlphabet.size)
.filter { i => w(i - 1) != 0.0 } // !!! Liblinear stores the weight for feature i in w(i - 1) !!!
.map { i => featureAlphabet.get(i) → w(i - 1) }
.sortBy(-_._2)
}
def saveToFile(fn: String) = {
val pw = new java.io.PrintWriter(fn)
for ((k, w) <- parameters)
pw.println(s"$k\\t$w")
pw.close()
}
}
object LogLinearModel {
/**
* Fits a log-linear model using L,,1,, regularization.
* @param c regularization coefficient
* @param tol Tolerance as stopping criteria
* @param data A sequence of training data. Each sample should be of type `(FeatureVector, Int)`.
* @return A log-linear model
*/
def fitWithL1Regularization[A](c: Double, tol: Double = 0.001)(data: Iterable[(FeatureVector[A], Int)]): LogLinearModel[A] = {
val featureAlphabet = new Alphabet
val fvs = data map { _._1 }
val fs = fvs map { fv => AlphabetizedFeatureVector(featureAlphabet)(fv.groups.toSeq: _*) }
featureAlphabet.freeze()
val problem = new Problem
problem.l = fvs.size
problem.n = featureAlphabet.size
problem.x = fs.map { (f: AlphabetizedFeatureVector) =>
f.pairs.map { t =>
new FeatureNode(t._1, t._2).asInstanceOf[de.bwaldvogel.liblinear.Feature]
}.toArray.sortBy(_.getIndex)
}.toArray
problem.y = data.map(_._2.toDouble).toArray
val solver = SolverType.L1R_LR
val parameter = new Parameter(solver, c, tol)
val model = Linear.train(problem, parameter)
new LogLinearModel(featureAlphabet, model)
}
def main(args: Array[String]) = {
val inputFeatureFile = args(0)
val outputModelFile = args(1)
val regCoeff = args(2)
val data = Files.readAllLines(Paths.get(inputFeatureFile)).map { line =>
val spPos = line.indexOf(' ')
val l = line.substring(0, spPos)
val fv = line.substring(spPos + 1)
FeatureVector.parse(fv) -> l.toInt
}
val model = fitWithL1Regularization(regCoeff.toDouble)(data)
model.saveToFile(outputModelFile)
}
}
|
ctongfei/feature
|
core/src/main/scala/edu/jhu/hlt/probe/classifier/LogLinearModel.scala
|
Scala
|
mit
| 3,266 |
package example.akkawschat
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.stream.ActorFlowMaterializer
import scala.util.{ Success, Failure }
object Boot extends App {
implicit val system = ActorSystem()
import system.dispatcher
implicit val materializer = ActorFlowMaterializer()
val config = system.settings.config
val interface = config.getString("app.interface")
val port = config.getInt("app.port")
val service = new Webservice
val binding = Http().bindAndHandle(service.route, interface, port)
binding.onComplete {
case Success(binding) ⇒
val localAddress = binding.localAddress
println(s"Server is listening on ${localAddress.getHostName}:${localAddress.getPort}")
case Failure(e) ⇒
println(s"Binding failed with ${e.getMessage}")
system.shutdown()
}
}
|
tabruhn/akka-chat-playground
|
backend/src/main/scala/example/akkawschat/Boot.scala
|
Scala
|
mit
| 848 |
package com.microsoft.netalyzer.loader
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.types._
object Utils {
def initializeDb(path: String, sc: SQLContext): Unit = {
sc.sql(
s"""
CREATE DATABASE IF NOT EXISTS netalyzer
LOCATION "$path"
""".stripMargin
)
sc.sql(
"""
CREATE TABLE IF NOT EXISTS netalyzer.samples (
datetime TIMESTAMP,
hostname VARCHAR(255),
portname VARCHAR(255),
portspeed DECIMAL(38,0),
totalrxbytes DECIMAL(38,0),
totaltxbytes DECIMAL(38,0),
id VARCHAR(127)
)
CLUSTERED BY(id) INTO 16 BUCKETS
STORED AS ORC
TBLPROPERTIES("transactional"="true")
""".stripMargin
)
}
// https://docs.oracle.com/javase/7/docs/api/java/text/SimpleDateFormat.html
def importCsvData(path: String, sc: SQLContext): Unit = {
val customSchema = StructType(
Array(
StructField("datetime", TimestampType, nullable = false),
StructField("hostname", StringType, nullable = false),
StructField("portname", StringType, nullable = false),
StructField("portspeed", DecimalType(38, 0), nullable = false),
StructField("totalrxbytes", DecimalType(38, 0), nullable = false),
StructField("totaltxbytes", DecimalType(38, 0), nullable = false)
)
)
val fileSystem = FileSystem.get(sc.sparkContext.hadoopConfiguration)
val tmpPath = path + "_LOADING"
if (fileSystem.exists(new Path(tmpPath))) {
val rawDf = sc.read
.format("com.databricks.spark.csv")
.option("mode", "FAILFAST")
.option("header", "true")
.option("dateFormat", "yyyy-MM-dd'T'HH:mm:ss.SSSXXX")
.schema(customSchema)
.load(tmpPath)
.repartition(16)
rawDf.registerTempTable("rawDf")
val newDf = sc.sql(
"""
SELECT datetime,
hostname,
portname,
portspeed,
totalrxbytes,
totaltxbytes,
sha2(concat(datetime, hostname, portname), 256) AS id
FROM rawDf
""".stripMargin
)
newDf.printSchema()
newDf.show(100)
newDf.write.mode("append").saveAsTable("netalyzer.samples")
fileSystem.delete(new Path(tmpPath), true)
}
else if (fileSystem.exists(new Path(path))) {
//fixme
fileSystem.rename(new Path(path), new Path(tmpPath))
val rawDf = sc.read
.format("com.databricks.spark.csv")
.option("mode", "FAILFAST")
.option("header", "true")
.option("dateFormat", "yyyy-MM-dd'T'HH:mm:ss.SSSXXX")
.schema(customSchema)
.load(tmpPath)
.repartition(16)
rawDf.registerTempTable("rawDf")
val newDf = sc.sql(
"""
SELECT datetime,
hostname,
portname,
portspeed,
totalrxbytes,
totaltxbytes,
sha2(concat(datetime, hostname, portname), 256) AS id
FROM rawDf
""".stripMargin
)
newDf.printSchema()
newDf.show(100)
newDf.write.mode("append").saveAsTable("netalyzer.samples")
fileSystem.delete(new Path(tmpPath), true)
}
}
}
|
bitvector2/netalyzer-loader
|
src/main/scala/com/microsoft/netalyzer/loader/Utils.scala
|
Scala
|
apache-2.0
| 3,294 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.rdd
import scala.collection.JavaConverters._
import org.apache.hadoop.mapreduce.RecordReader
import org.apache.spark.{Partition, TaskContext}
import org.apache.spark.sql.profiler.{Profiler, QueryTaskEnd}
import org.apache.spark.util.TaskCompletionListener
import org.apache.carbondata.common.logging.LogServiceFactory
import org.apache.carbondata.core.memory.UnsafeMemoryManager
import org.apache.carbondata.core.stats.{QueryStatistic, QueryStatisticsConstants, QueryStatisticsRecorder}
import org.apache.carbondata.core.util.{TaskMetricsMap, ThreadLocalTaskInfo}
import org.apache.carbondata.spark.InitInputMetrics
class QueryTaskCompletionListener(freeMemory: Boolean,
var reader: RecordReader[Void, Object],
inputMetricsStats: InitInputMetrics, executionId: String, taskId: Int, queryStartTime: Long,
queryStatisticsRecorder: QueryStatisticsRecorder, split: Partition, queryId: String)
extends TaskCompletionListener {
override def onTaskCompletion(context: TaskContext): Unit = {
if (reader != null) {
try {
reader.close()
} catch {
case e: Exception =>
LogServiceFactory.getLogService(this.getClass.getCanonicalName).error(e)
}
reader = null
}
TaskMetricsMap.getInstance().updateReadBytes(Thread.currentThread().getId)
inputMetricsStats.updateAndClose()
logStatistics(executionId, taskId, queryStartTime, queryStatisticsRecorder, split)
if (freeMemory) {
UnsafeMemoryManager.INSTANCE
.freeMemoryAll(ThreadLocalTaskInfo.getCarbonTaskInfo.getTaskId)
}
}
def logStatistics(
executionId: String,
taskId: Long,
queryStartTime: Long,
recorder: QueryStatisticsRecorder,
split: Partition
): Unit = {
if (null != recorder) {
val queryStatistic = new QueryStatistic()
queryStatistic.addFixedTimeStatistic(QueryStatisticsConstants.EXECUTOR_PART,
System.currentTimeMillis - queryStartTime)
recorder.recordStatistics(queryStatistic)
// print executor query statistics for each task_id
val statistics = recorder.statisticsForTask(taskId, queryStartTime)
if (statistics != null && executionId != null) {
Profiler.invokeIfEnable {
val inputSplit = split.asInstanceOf[CarbonSparkPartition].split.value
inputSplit.calculateLength()
val size = inputSplit.getLength
val files = inputSplit.getAllSplits.asScala.map { s =>
s.getSegmentId + "/" + s.getPath.getName
}.toArray[String]
Profiler.send(
QueryTaskEnd(
executionId.toLong,
queryId,
statistics.getValues,
size,
files
)
)
}
}
recorder.logStatisticsForTask(statistics)
}
}
}
|
sgururajshetty/carbondata
|
integration/spark-common/src/main/scala/org/apache/carbondata/spark/rdd/QueryTaskCompletionListener.scala
|
Scala
|
apache-2.0
| 3,659 |
/*
* Copyright 2012 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.money.otel.handlers.logging
import com.comcast.money.otel.handlers.OtelSpanHandler
import com.typesafe.config.Config
import io.opentelemetry.exporter.logging.LoggingSpanExporter
import io.opentelemetry.sdk.trace.`export`.SpanExporter
/**
* A Money [[com.comcast.money.api.SpanHandler]] that can export spans to [[java.util.logging.Logger]].
*
* Sample configuration:
*
* {{{
* handling = {
* async = true
* handlers = [
* {
* class = "com.comcast.money.otel.handlers.logging.LoggingSpanHandler"
* batch = true
* exporter-timeout-ms = 30000
* max-batch-size = 512
* max-queue-size = 2048
* schedule-delay-ms = 5000
* }
* ]
* }
* }}}
*
*/
class LoggingSpanHandler(config: Config) extends OtelSpanHandler(config) {
override protected def createSpanExporter(config: Config): SpanExporter = new LoggingSpanExporter()
}
|
Comcast/money
|
money-otel-logging-exporter/src/main/scala/com/comcast/money/otel/handlers/logging/LoggingSpanHandler.scala
|
Scala
|
apache-2.0
| 1,564 |
package com.szadowsz.spark.ml.feature
import com.szadowsz.common.lang.WordTokeniser
import org.apache.spark.ml.UnaryTransformer
import org.apache.spark.sql._
import org.apache.spark.sql.types.{ArrayType, DataType, StringType}
import org.slf4j.LoggerFactory
/**
* Created on 28/11/2016.
*/
class TokeniserTransformer (override val uid : String) extends UnaryTransformer[String, Seq[String], TokeniserTransformer] {
protected val logger = LoggerFactory.getLogger("com.szadowsz.ulster.spark")
override protected def createTransformFunc: (String) => Seq[String] = (s : String) => Option(s) match {
case Some(_) => WordTokeniser.tokenise(s)
case None => null
}
override protected def outputDataType: DataType = ArrayType(StringType)
override def transform(dataset: Dataset[_]): DataFrame = {
logger.info("Executing stage {}",uid)
logger.debug("Processing dataset {}",dataset.schema.fieldNames.mkString("[",",","]"))
super.transform(dataset)
}
}
|
zakski/project-cadisainmduit
|
module/spark/src/main/scala/com/szadowsz/spark/ml/feature/TokeniserTransformer.scala
|
Scala
|
apache-2.0
| 980 |
package com.geishatokyo.sqlgen.loader
import java.io.{File, InputStream}
import com.geishatokyo.sqlgen.SQLGenException
import com.geishatokyo.sqlgen.core.Workbook
import scala.util.matching.Regex
/**
* Created by takezoux2 on 2017/07/05.
*/
class AutoFileDetectionLoader(patterns: Seq[Pattern], defaultLoader: Option[Loader]) extends Loader{
override def load(file: File): Workbook = {
patterns.find(p => {
p.fileRegex.findFirstIn(file.getAbsolutePath).isDefined
}) match{
case Some(p) => {
p.loader.load(file)
}
case None => {
defaultLoader.map(_.load(file)).getOrElse {
throw SQLGenException(s"Can't detect loader for ${file}")
}
}
}
}
override def load(name: String, input: InputStream): Workbook = {
patterns.find(p => {
p.fileRegex.findFirstIn(name).isDefined
}) match{
case Some(p) => {
p.loader.load(name, input)
}
case None => {
throw SQLGenException(s"Can't detect loader for ${name}")
}
}
}
def isSupported(file: File) = {
patterns.exists(p => {
p.fileRegex.findFirstIn(file.getAbsolutePath).isDefined
})
}
}
object AutoFileDetectionLoader {
val default: AutoFileDetectionLoader = new AutoFileDetectionLoader(
List(
Pattern.withExtension("csv", new CSVLoader())
),
Some(new XLSLoader())
)
}
case class Pattern(fileRegex: Regex, loader: Loader)
object Pattern {
def withExtension(ext: String, loader: Loader) = {
Pattern(s"""\\\\.${ext}$$""".r, loader)
}
}
|
geishatokyo/sql-generator
|
src/main/scala/com/geishatokyo/sqlgen/loader/AutoFileDetectionLoader.scala
|
Scala
|
mit
| 1,575 |
package idv.brianhsu.maidroid.plurk.fragment
import idv.brianhsu.maidroid.plurk._
import idv.brianhsu.maidroid.plurk.activity._
import idv.brianhsu.maidroid.plurk.TypedResource._
import idv.brianhsu.maidroid.plurk.adapter._
import idv.brianhsu.maidroid.plurk.dialog._
import idv.brianhsu.maidroid.plurk.util._
import idv.brianhsu.maidroid.plurk.view._
import idv.brianhsu.maidroid.ui.util.AsyncUI._
import idv.brianhsu.maidroid.ui.util.CallbackConversions._
import android.app.Activity
import android.app.AlertDialog
import android.app.ProgressDialog
import android.widget.ArrayAdapter
import android.content.DialogInterface
import android.os.Bundle
import android.net.Uri
import android.support.v4.app.Fragment
import android.view.LayoutInflater
import android.view.ViewGroup
import android.view.View
import android.view.Menu
import android.view.MenuItem
import android.view.MenuInflater
import android.widget.AdapterView
import android.widget.Toast
import android.webkit.WebViewClient
import android.webkit.WebView
import android.support.v4.app.FragmentActivity
import android.support.v7.widget.SearchView
import android.support.v4.view.MenuItemCompat
import org.bone.soplurk.api._
import org.bone.soplurk.api.PlurkAPI._
import org.bone.soplurk.model._
import scala.concurrent._
import scala.util.Try
class BlockListFragment extends Fragment {
private implicit def activity = getActivity.asInstanceOf[FragmentActivity]
private def listViewHolder = Option(getView).map(_.findView(TR.userListListView))
private def loadingIndicatorHolder = Option(getView).map(_.findView(TR.userListLoadingIndicator))
private def errorNoticeHolder = Option(getView).map(_.findView(TR.userListErrorNotice))
private def emptyNoticeHolder = Option(getView).map(_.findView(TR.userListEmptyNotice))
private def retryButtonHolder = Option(getView).map(_.findView(TR.moduleErrorNoticeRetryButton))
private def plurkAPI = PlurkAPIHelper.getPlurkAPI(activity)
private var blockList: Option[Vector[User]] = None
private lazy val searchView = new SearchView(activity)
private def getBlockList: Vector[User] = {
blockList match {
case Some(list) => list
case None =>
var batch = plurkAPI.Blocks.get().get._2
var allBlock: Vector[User] = batch.toVector
while (batch != Nil) {
batch = plurkAPI.Blocks.get(offset = allBlock.size).get._2
allBlock = allBlock ++ batch.toVector
}
val distinctUser = allBlock.distinct
blockList = Some(distinctUser)
distinctUser
}
}
private def showErrorNotice(message: String) {
loadingIndicatorHolder.foreach(_.hide())
errorNoticeHolder.foreach(_.setVisibility(View.VISIBLE))
errorNoticeHolder.foreach { errorNotice =>
errorNotice.setMessageWithRetry(message) { retryButton =>
retryButton.setEnabled(false)
errorNoticeHolder.foreach(_.setVisibility(View.GONE))
loadingIndicatorHolder.foreach(_.show())
updateList()
}
}
}
private def removeBlock(adapter: UserListAdapter, user: User) {
val dialogBuilder = new AlertDialog.Builder(activity)
val displayName = (
user.displayName.filterNot(_.trim.isEmpty) orElse
Option(user.fullName).filterNot(_.trim.isEmpty) orElse
Option(user.nickname).filterNot(_.trim.isEmpty)
).getOrElse(user.id)
val confirmDialog =
dialogBuilder.setTitle(R.string.fragmentBlockListUnblockTitle)
.setMessage(activity.getString(R.string.fragmentBlockListUnblockMessage).format(displayName))
.setPositiveButton(R.string.ok, null)
.setNegativeButton(R.string.cancel, null)
.create()
confirmDialog.setOnShowListener(new DialogInterface.OnShowListener() {
override def onShow(dialog: DialogInterface) {
val okButton = confirmDialog.getButton(DialogInterface.BUTTON_POSITIVE)
okButton.setOnClickListener { view: View =>
val progressDialog = ProgressDialog.show(
activity,
activity.getString(R.string.pleaseWait),
activity.getString(R.string.fragmentBlockListUnblocking),
true, false
)
val future = Future { plurkAPI.Blocks.unblock(user.id).get }
future.onSuccessInUI { status =>
if (activity != null) {
adapter.removeUser(user.id)
progressDialog.dismiss()
confirmDialog.dismiss()
}
}
future.onFailureInUI { case e: Exception =>
if (activity != null) {
Toast.makeText(activity, R.string.fragmentBlockListUnblockFailed, Toast.LENGTH_LONG).show()
progressDialog.dismiss()
confirmDialog.dismiss()
}
}
}
}
})
confirmDialog.show()
}
def updateList() {
val future = Future { getBlockList }
future.onSuccessInUI { allFollowings =>
if (activity != null) {
val adapter = new UserListAdapter(activity, allFollowings)
listViewHolder.foreach { listView =>
listView.setAdapter(adapter)
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
override def onQueryTextChange(newText: String) = {
adapter.getFilter.filter(newText)
false
}
override def onQueryTextSubmit(text: String) = {
adapter.getFilter.filter(text)
true
}
})
emptyNoticeHolder.foreach(view => listView.setEmptyView(view))
listView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
override def onItemClick(parent: AdapterView[_], view: View, position: Int, id: Long) {
val user = adapter.getItem(position).asInstanceOf[User]
UserTimelineActivity.startActivity(activity, user)
}
})
listView.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() {
override def onItemLongClick(parent: AdapterView[_], view: View, position: Int, id: Long): Boolean = {
val dialog = new AlertDialog.Builder(activity)
val itemList = Array(
activity.getString(R.string.fragmentBlockListViewTimeline),
activity.getString(R.string.fragmentBlockListUnblock)
)
val itemAdapter = new ArrayAdapter(activity, android.R.layout.select_dialog_item, itemList)
val onClickListener = new DialogInterface.OnClickListener {
override def onClick(dialog: DialogInterface, which: Int) {
val user = adapter.getItem(position)
which match {
case 0 => UserTimelineActivity.startActivity(activity, user)
case 1 => removeBlock(adapter, user)
}
}
}
dialog.setTitle(R.string.fragmentBlockListAction)
.setAdapter(itemAdapter, onClickListener)
.show()
true
}
})
}
loadingIndicatorHolder.foreach(_.setVisibility(View.GONE))
}
}
future.onFailureInUI { case e: Exception =>
if (activity != null) {
showErrorNotice(activity.getString(R.string.fragmentBlockFetchFailure))
}
}
}
override def onCreateOptionsMenu(menu: Menu, inflater: MenuInflater) {
inflater.inflate(R.menu.fragment_user_list, menu)
val searchItem = menu.findItem(R.id.userListSearch)
if (searchView.getParent != null) {
searchView.getParent.asInstanceOf[ViewGroup].removeView(searchView)
}
MenuItemCompat.setActionView(searchItem, searchView)
searchView.setIconified(true)
super.onCreateOptionsMenu(menu, inflater)
}
override def onOptionsItemSelected(item: MenuItem) = item.getItemId match {
case _ => super.onOptionsItemSelected(item)
}
override def onCreateView(inflater: LayoutInflater, container: ViewGroup,
savedInstanceState: Bundle): View = {
val view = inflater.inflate(R.layout.fragment_user_list, container, false)
updateList()
setHasOptionsMenu(true)
view
}
}
|
brianhsu/MaidroidPlurk
|
src/main/scala/fragment/BlockListFragment.scala
|
Scala
|
gpl-3.0
| 8,302 |
package org.json4s
package jackson
import com.fasterxml.jackson.databind._
import com.fasterxml.jackson.databind.DeserializationFeature.{USE_BIG_DECIMAL_FOR_FLOATS, USE_BIG_INTEGER_FOR_INTS}
import scala.util.control.Exception.allCatch
trait JsonMethods extends org.json4s.JsonMethods[JValue] {
private[this] lazy val _defaultMapper = {
val m = new ObjectMapper()
m.registerModule(new Json4sScalaModule)
// for backwards compatibility
m.configure(USE_BIG_INTEGER_FOR_INTS, true)
m
}
def mapper = _defaultMapper
def parse(in: JsonInput, useBigDecimalForDouble: Boolean = false, useBigIntForLong: Boolean = true): JValue = {
var reader = mapper.reader[ObjectReader](classOf[JValue])
if (useBigDecimalForDouble) reader = reader `with` USE_BIG_DECIMAL_FOR_FLOATS
if (useBigIntForLong) reader = reader `with` USE_BIG_INTEGER_FOR_INTS
in match {
case StringInput(s) => reader.readValue(s)
case ReaderInput(rdr) => reader.readValue(rdr)
case StreamInput(stream) => reader.readValue(stream)
case FileInput(file) => reader.readValue(file)
}
}
def parseOpt(in: JsonInput, useBigDecimalForDouble: Boolean = false, useBigIntForLong: Boolean = true): Option[JValue] = allCatch opt {
parse(in, useBigDecimalForDouble, useBigIntForLong)
}
def render(value: JValue)(implicit formats: Formats = DefaultFormats): JValue =
formats.emptyValueStrategy.replaceEmpty(value)
def compact(d: JValue): String = mapper.writeValueAsString(d)
def pretty(d: JValue): String = {
val writer = mapper.writerWithDefaultPrettyPrinter[ObjectWriter]()
writer.writeValueAsString(d)
}
def asJValue[T](obj: T)(implicit writer: Writer[T]): JValue = writer.write(obj)
def fromJValue[T](json: JValue)(implicit reader: Reader[T]): T = reader.read(json)
def asJsonNode(jv: JValue): JsonNode = mapper.valueToTree[JsonNode](jv)
def fromJsonNode(jn: JsonNode): JValue = mapper.treeToValue[JValue](jn, classOf[JValue])
}
object JsonMethods extends JsonMethods
|
geggo98/json4s
|
jackson/src/main/scala/org/json4s/jackson/JsonMethods.scala
|
Scala
|
apache-2.0
| 2,028 |
/*
* File PacketRecipe.scala is part of JsonRecipes.
* JsonRecipes is opensource Minecraft mod(released under LGPLv3), created by anti344.
* Full licence information can be found in LICENCE and LICENCE.LESSER files in jar-file of the mod.
* Copyright © 2014, anti344
*/
package net.anti344.jsonrecipes.network.packet
import net.anti344.jsonrecipes.json.Parser
import cpw.mods.fml.relauncher.Side
import io.netty.buffer.ByteBuf
class PacketRecipe(private var json: String)
extends Packet{
def this() =
this("")
def read(buf: ByteBuf) =
json = buf
def write(buf: ByteBuf) =
buf <<< json
def execute(side: Side) =
Parser.loadSingleRecipe(json)
}
|
mc-anti344/JsonRecipes
|
src/main/scala/net/anti344/jsonrecipes/network/packet/PacketRecipe.scala
|
Scala
|
gpl-3.0
| 680 |
package org.jetbrains.plugins.scala
package compiler
import com.intellij.openapi.components.ApplicationComponent
import com.intellij.openapi.projectRoots.{JavaSdk}
import collection.JavaConverters._
import com.intellij.util.PathUtil
import java.io.{IOException, File}
import com.intellij.openapi.application.ApplicationManager
import extensions._
import com.intellij.notification.{NotificationListener, Notifications, NotificationType, Notification}
import com.intellij.openapi.roots.ProjectRootManager
import com.intellij.openapi.project.Project
import scala.util.control.Exception._
import javax.swing.event.HyperlinkEvent
import com.intellij.openapi.options.ShowSettingsUtil
/**
* @author Pavel Fatin
*/
class CompileServerLauncher extends ApplicationComponent {
private var instance: Option[ServerInstance] = None
def initComponent() {}
def disposeComponent() {
if (running) stop()
}
def tryToStart(project: Project): Boolean = running || start(project)
private def start(project: Project): Boolean = {
/* val applicationSettings = ScalaApplicationSettings.getInstance
if (applicationSettings.COMPILE_SERVER_SDK == null) {
// Try to find a suitable JDK
val choice = Option(ProjectRootManager.getInstance(project).getProjectSdk).orElse {
val all = ProjectJdkTable.getInstance.getSdksOfType(JavaSdk.getInstance()).asScala
all.headOption
}
choice.foreach(sdk => applicationSettings.COMPILE_SERVER_SDK = sdk.getName)
// val message = "JVM SDK is automatically selected: " + name +
// "\\n(can be changed in Application Settings / Scala)"
// Notifications.Bus.notify(new Notification("scala", "Scala compile server",
// message, NotificationType.INFORMATION))
}
findJdkByName(applicationSettings.COMPILE_SERVER_SDK)
.left.map(_ + "\\nPlease either disable Scala compile server or configure a valid JVM SDK for it.")
.right.flatMap(start) match {
case Left(error) =>
val title = "Cannot start Scala compile server"
val content = s"<html><body>${error.replace("\\n", "<br>")} <a href=''>Configure</a></body></html>"
Notifications.Bus.notify(new Notification("scala", title, content, NotificationType.ERROR, ConfigureLinkListener))
false
case Right(_) =>
ApplicationManager.getApplication invokeLater new Runnable {
override def run() {
CompileServerManager.instance(project).configureWidget()
}
}
true
} */
false
}
private def start(jdk: JDK): Either[String, Process] = {
import CompileServerLauncher.{compilerJars, jvmParameters}
val settings = ScalaApplicationSettings.getInstance
compilerJars.partition(_.exists) match {
case (presentFiles, Seq()) =>
val classpath = (jdk.tools +: presentFiles).map(_.canonicalPath).mkString(File.pathSeparator)
val commands = jdk.executable.canonicalPath +: "-cp" +: classpath +: jvmParameters :+
"org.jetbrains.plugins.scala.nailgun.NailgunRunner" :+ settings.COMPILE_SERVER_PORT
val builder = new ProcessBuilder(commands.asJava)
catching(classOf[IOException]).either(builder.start())
.left.map(_.getMessage)
.right.map { process =>
val watcher = new ProcessWatcher(process)
instance = Some(ServerInstance(watcher, settings.COMPILE_SERVER_PORT.toInt))
watcher.startNotify()
process
}
case (_, absentFiles) =>
val paths = absentFiles.map(_.getPath).mkString(", ")
Left("Required file(s) not found: " + paths)
}
}
// TODO stop server more gracefully
def stop() {
instance.foreach { it =>
it.destroyProcess()
}
}
def stop(project: Project) {
stop()
ApplicationManager.getApplication invokeLater new Runnable {
override def run() {
CompileServerManager.instance(project).configureWidget()
}
}
}
def running: Boolean = instance.exists(_.running)
def errors(): Seq[String] = instance.map(_.errors()).getOrElse(Seq.empty)
def port: Option[Int] = instance.map(_.port)
def getComponentName = getClass.getSimpleName
}
object CompileServerLauncher {
def instance = ApplicationManager.getApplication.getComponent(classOf[CompileServerLauncher])
def compilerJars = {
val ideaRoot = new File(PathUtil.getJarPathForClass(classOf[ApplicationManager])).getParent
val pluginRoot = new File(PathUtil.getJarPathForClass(getClass)).getParent
val jpsRoot = new File(pluginRoot, "jps")
Seq(
new File(ideaRoot, "jps-server.jar"),
new File(ideaRoot, "trove4j.jar"),
new File(ideaRoot, "util.jar"),
new File(pluginRoot, "scala-library.jar"),
new File(pluginRoot, "scala-nailgun-runner.jar"),
new File(pluginRoot, "compiler-settings.jar"),
new File(jpsRoot, "nailgun.jar"),
new File(jpsRoot, "sbt-interface.jar"),
new File(jpsRoot, "incremental-compiler.jar"),
new File(jpsRoot, "jline.jar"),
new File(jpsRoot, "scala-jps-plugin.jar"))
}
def jvmParameters = {
val settings = ScalaApplicationSettings.getInstance
val xmx = settings.COMPILE_SERVER_MAXIMUM_HEAP_SIZE |> { size =>
if (size.isEmpty) Nil else List("-Xmx%sm".format(size))
}
xmx ++ settings.COMPILE_SERVER_JVM_PARAMETERS.split(" ").toSeq
}
}
private case class ServerInstance(watcher: ProcessWatcher, port: Int) {
def running: Boolean = watcher.running
def errors(): Seq[String] = watcher.errors()
def destroyProcess() {
watcher.destroyProcess()
}
}
private object ConfigureLinkListener extends NotificationListener.Adapter {
def hyperlinkActivated(notification: Notification, event: HyperlinkEvent) {
ShowSettingsUtil.getInstance().showSettingsDialog(null, "Scala")
notification.expire()
}
}
|
consulo/consulo-scala
|
src/org/jetbrains/plugins/scala/compiler/CompileServerLauncher.scala
|
Scala
|
apache-2.0
| 5,996 |
package com.socrata.tileserver
package services
import java.nio.charset.StandardCharsets.UTF_8
import javax.servlet.http.HttpServletResponse
import com.rojoma.json.v3.interpolation._
import com.rojoma.json.v3.io.JsonReader
import org.slf4j.{Logger, LoggerFactory, MDC}
import org.velvia.InvalidMsgPackDataException
import com.socrata.http.server.implicits._
import com.socrata.http.server.responses._
import com.socrata.http.server.routing.{SimpleResource, TypedPathComponent}
import com.socrata.http.server.{HttpRequest, HttpResponse, HttpService}
import TileService._
import exceptions._
import handlers._
import util._
// scalastyle:off multiple.string.literals
/** Service that provides the actual tiles.
*
* @constructor This only needs to be called once, by the main application.
* @param renderer talks to the underlying carto-renderer service.
* @param geo talks to the upstream geo-json service.
*/
case class TileService(renderer: RenderProvider, geo: GeoProvider) {
// The `Handler`s that this service is backed by.
private[this] val typedHandlers: Seq[Handler with FileType] = Seq(PbfHandler,
BpbfHandler,
PngHandler(renderer),
UnfashionablePngHandler,
JsonHandler,
TxtHandler)
private[this] val handler: Handler = typedHandlers.
map(h => h: Handler).
reduce(_.orElse(_))
/** The types (file extensions) supported by this endpoint. */
val types: Set[String] = typedHandlers.map(_.extension).toSet
/** Process a request to this service.
*
* @param info the incoming request + metadata.
*/
def handleRequest(info: RequestInfo) : HttpResponse = {
try {
val resp = geo.doQuery(info)
val result = resp.resultCode match {
case OK.statusCode =>
val base = OK ~> HeaderFilter.extract(resp)
handler(info)(base, resp)
case NotModified.statusCode => NotModified
case _ => echoResponse(resp)
}
Header("Access-Control-Allow-Origin", "*") ~>
Header("Access-Control-Allow-Headers", "X-Socrata-Host, X-Socrata-RequestId") ~>
result
} catch {
case packEx @ (_: InvalidSoqlPackException | _: InvalidMsgPackDataException) =>
fatal("Invalid or corrupt data returned from underlying service", packEx)
case unknown: Exception =>
fatal("Unknown error", unknown)
}
}
def handleOptions() : HttpResponse = {
Header("Access-Control-Allow-Origin", "*") ~>
Header("Access-Control-Allow-Headers", "X-Socrata-Host, X-Socrata-RequestId") ~>
OK
}
/** Handle the request.
*
* @param identifier unique identifier for this set
* @param geoColumn the column in the dataset that contains the
* location information.
* @param zoom the zoom level, 1 is zoomed all the way out.
* @param x the x coordinate of the tile.
* @param typedY the y coordinate of the tile, and the type (extension).
*/
def service(identifier: String,
geoColumn: String,
zoom: Int,
x: Int,
typedY: TypedPathComponent[Int]): SimpleResource =
new SimpleResource {
val TypedPathComponent(y, ext) = typedY
override def get: HttpService = {
MDC.put("X-Socrata-Resource", identifier)
{ req =>
val info =
RequestInfo(req, identifier, geoColumn, QuadTile(x, y, zoom), ext)
handleRequest(info)
}
}
override def options: HttpService = {
{ req =>
handleOptions()
}
}
}
}
object TileService {
private val logger: Logger = LoggerFactory.getLogger(getClass)
private val allowed = Set(HttpServletResponse.SC_BAD_REQUEST,
HttpServletResponse.SC_FORBIDDEN,
HttpServletResponse.SC_NOT_FOUND,
HttpServletResponse.SC_REQUEST_TIMEOUT,
HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
HttpServletResponse.SC_NOT_IMPLEMENTED,
HttpServletResponse.SC_SERVICE_UNAVAILABLE)
// Basic hostname validation.
private val validHost = """([A-Za-z0-9\\-][.]?)+""".r
/** Http response representing the underlying response.
*
* @param resp the underlying response.
*/
def echoResponse(resp: GeoResponse): HttpResponse = {
val body = try {
JsonReader.fromString(new String(resp.payload, UTF_8))
} catch {
case e: Exception =>
json"""{ message: "Failed to parse underlying JSON", cause: ${e.getMessage}}"""
}
val code = resp.resultCode
val base = if (allowed(code)) Status(code) else InternalServerError
base ~>
Json(json"""{underlying: {resultCode:${resp.resultCode}, body: $body}}""")
}
/** Http response representing a fatal error.
*
* @param message the message for the payload.
* @param cause the cause of the error.
*/
def fatal(message: String, cause: Throwable): HttpResponse = {
logger.warn(message)
logger.warn(cause.getMessage, cause)
@annotation.tailrec
def rootCause(t: Throwable): Throwable =
if (t.getCause != null) rootCause(t.getCause) else t // scalastyle:ignore
val root = rootCause(cause)
val payload = if (cause.getMessage != null) { // scalastyle:ignore
json"""{message: $message, cause: ${cause.getMessage}}"""
} else if (root.getMessage != null) { // scalastyle:ignore
logger.warn(root.getMessage, root)
json"""{message: $message, cause: ${root.getMessage}}"""
} else {
json"""{message: $message}"""
}
InternalServerError ~>
Header("Access-Control-Allow-Origin", "*") ~>
Json(payload)
}
}
|
socrata-platform/tileserver
|
src/main/scala/com.socrata.tileserver/services/TileService.scala
|
Scala
|
apache-2.0
| 6,048 |
package com.peterpotts.sample
import scala.util.Random
class SampleShuffle[T](samples: Sample[T]*) extends Sample[T] {
def next(): T = samples(Random.nextInt(samples.size)).next()
}
object SampleShuffle {
def apply[T](samples: Sample[T]*) = new SampleShuffle(samples: _*)
}
|
peterpotts/sample
|
src/main/scala/com/peterpotts/sample/SampleShuffle.scala
|
Scala
|
mit
| 281 |
package week5
import akka.actor.Actor
import akka.actor.ActorRef
import akka.event.LoggingReceive
object WireTransfer {
case class Transfer(from: ActorRef, to: ActorRef, amount: BigInt) {
require(amount > 0)
}
case object Done
case object Failed
}
class WireTransfer extends Actor {
import WireTransfer._
def receive = LoggingReceive {
case Transfer(from, to, amount) => {
from ! BankAccount.Withdraw(amount)
context.become(awaitFrom(to, amount, sender))
}
}
def awaitFrom(to: ActorRef, amount: BigInt, customer: ActorRef): Receive = LoggingReceive {
case BankAccount.Done => {
to ! BankAccount.Deposit(amount)
context.become(awaitTo(customer))
}
case BankAccount.Failed => {
customer ! Failed
context.stop(self)
}
}
def awaitTo(customer: ActorRef): Receive = LoggingReceive {
case BankAccount.Done => {
customer ! Done
context.stop(self)
}
}
}
|
M4573R/playground-notes
|
principles-of-reactive-programming/week-5/src/main/scala/WireTransfer.scala
|
Scala
|
mit
| 959 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import kafka.server.{AbstractFetcherManager, AbstractFetcherThread, BrokerAndInitialOffset}
import kafka.cluster.{BrokerEndPoint, Cluster}
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.utils.Time
import scala.collection.immutable
import collection.mutable.HashMap
import scala.collection.mutable
import java.util.concurrent.locks.ReentrantLock
import kafka.utils.CoreUtils.inLock
import kafka.utils.ZkUtils
import kafka.utils.ShutdownableThread
import kafka.client.ClientUtils
import java.util.concurrent.atomic.AtomicInteger
/**
* Usage:
* Once ConsumerFetcherManager is created, startConnections() and stopAllConnections() can be called repeatedly
* until shutdown() is called.
*/
@deprecated("This class has been deprecated and will be removed in a future release.", "0.11.0.0")
class ConsumerFetcherManager(private val consumerIdString: String,
private val config: ConsumerConfig,
private val zkUtils : ZkUtils)
extends AbstractFetcherManager("ConsumerFetcherManager-%d".format(Time.SYSTEM.milliseconds),
config.clientId, config.numConsumerFetchers) {
private var partitionMap: immutable.Map[TopicPartition, PartitionTopicInfo] = null
private val noLeaderPartitionSet = new mutable.HashSet[TopicPartition]
private val lock = new ReentrantLock
private val cond = lock.newCondition()
private var leaderFinderThread: ShutdownableThread = null
private val correlationId = new AtomicInteger(0)
private class LeaderFinderThread(name: String) extends ShutdownableThread(name) {
// thread responsible for adding the fetcher to the right broker when leader is available
override def doWork() {
val leaderForPartitionsMap = new HashMap[TopicPartition, BrokerEndPoint]
lock.lock()
try {
while (noLeaderPartitionSet.isEmpty) {
trace("No partition for leader election.")
cond.await()
}
trace("Partitions without leader %s".format(noLeaderPartitionSet))
val brokers = ClientUtils.getPlaintextBrokerEndPoints(zkUtils)
val topicsMetadata = ClientUtils.fetchTopicMetadata(noLeaderPartitionSet.map(m => m.topic).toSet,
brokers,
config.clientId,
config.socketTimeoutMs,
correlationId.getAndIncrement).topicsMetadata
if(logger.isDebugEnabled) topicsMetadata.foreach(topicMetadata => debug(topicMetadata.toString()))
topicsMetadata.foreach { tmd =>
val topic = tmd.topic
tmd.partitionsMetadata.foreach { pmd =>
val topicAndPartition = new TopicPartition(topic, pmd.partitionId)
if(pmd.leader.isDefined && noLeaderPartitionSet.contains(topicAndPartition)) {
val leaderBroker = pmd.leader.get
leaderForPartitionsMap.put(topicAndPartition, leaderBroker)
noLeaderPartitionSet -= topicAndPartition
}
}
}
} catch {
case t: Throwable => {
if (!isRunning.get())
throw t /* If this thread is stopped, propagate this exception to kill the thread. */
else
warn("Failed to find leader for %s".format(noLeaderPartitionSet), t)
}
} finally {
lock.unlock()
}
try {
addFetcherForPartitions(leaderForPartitionsMap.map { case (topicPartition, broker) =>
topicPartition -> BrokerAndInitialOffset(broker, partitionMap(topicPartition).getFetchOffset())}
)
} catch {
case t: Throwable =>
if (!isRunning.get())
throw t /* If this thread is stopped, propagate this exception to kill the thread. */
else {
warn("Failed to add leader for partitions %s; will retry".format(leaderForPartitionsMap.keySet.mkString(",")), t)
lock.lock()
noLeaderPartitionSet ++= leaderForPartitionsMap.keySet
lock.unlock()
}
}
shutdownIdleFetcherThreads()
Thread.sleep(config.refreshLeaderBackoffMs)
}
}
override def createFetcherThread(fetcherId: Int, sourceBroker: BrokerEndPoint): AbstractFetcherThread = {
new ConsumerFetcherThread(
"ConsumerFetcherThread-%s-%d-%d".format(consumerIdString, fetcherId, sourceBroker.id),
config, sourceBroker, partitionMap, this)
}
def startConnections(topicInfos: Iterable[PartitionTopicInfo], cluster: Cluster) {
leaderFinderThread = new LeaderFinderThread(consumerIdString + "-leader-finder-thread")
leaderFinderThread.start()
inLock(lock) {
partitionMap = topicInfos.map(tpi => (new TopicPartition(tpi.topic, tpi.partitionId), tpi)).toMap
noLeaderPartitionSet ++= topicInfos.map(tpi => new TopicPartition(tpi.topic, tpi.partitionId))
cond.signalAll()
}
}
def stopConnections() {
/*
* Stop the leader finder thread first before stopping fetchers. Otherwise, if there are more partitions without
* leader, then the leader finder thread will process these partitions (before shutting down) and add fetchers for
* these partitions.
*/
info("Stopping leader finder thread")
if (leaderFinderThread != null) {
leaderFinderThread.shutdown()
leaderFinderThread = null
}
info("Stopping all fetchers")
closeAllFetchers()
// no need to hold the lock for the following since leaderFindThread and all fetchers have been stopped
partitionMap = null
noLeaderPartitionSet.clear()
info("All connections stopped")
}
def addPartitionsWithError(partitionList: Iterable[TopicPartition]) {
debug("adding partitions with error %s".format(partitionList))
inLock(lock) {
if (partitionMap != null) {
noLeaderPartitionSet ++= partitionList
cond.signalAll()
}
}
}
}
|
zzwlstarby/mykafka
|
core/src/main/scala/kafka/consumer/ConsumerFetcherManager.scala
|
Scala
|
apache-2.0
| 6,906 |
package chat.tox.antox.utils
import java.util
import android.content.{BroadcastReceiver, Context, Intent}
import android.net.ConnectivityManager
import chat.tox.antox.tox.ToxSingleton
import scala.collection.JavaConversions._
trait ConnectionTypeChangeListener {
//only called when network is connected
def connectionTypeChange(connectionType: Int): Unit
}
object ConnectionManager {
private val listenerList = new util.ArrayList[ConnectionTypeChangeListener]()
private var lastConnectionType: Option[Int] = None
def addConnectionTypeChangeListener(listener: ConnectionTypeChangeListener): Unit = {
listenerList.add(listener)
}
def getConnectionType(context: Context): Int = {
val connectivityManager = context.getSystemService(Context.CONNECTIVITY_SERVICE)
.asInstanceOf[ConnectivityManager]
connectivityManager.getActiveNetworkInfo.getType
}
def isNetworkAvailable(context: Context): Boolean = {
val connMgr = context.getSystemService(Context.CONNECTIVITY_SERVICE).asInstanceOf[ConnectivityManager]
val networkInfo = connMgr.getActiveNetworkInfo
networkInfo != null && networkInfo.isConnected
}
}
class ConnectionManager extends BroadcastReceiver {
override def onReceive(context: Context, intent: Intent) {
if (ConnectionManager.isNetworkAvailable(context)) {
val connectionType = ConnectionManager.getConnectionType(context)
if (ConnectionManager.lastConnectionType.isEmpty || connectionType != ConnectionManager.lastConnectionType.get) {
for (listener <- ConnectionManager.listenerList) {
listener.connectionTypeChange(connectionType)
}
ConnectionManager.lastConnectionType = Some(connectionType)
}
}
}
}
|
wiiam/Antox
|
app/src/main/scala/chat/tox/antox/utils/ConnectionManager.scala
|
Scala
|
gpl-3.0
| 1,736 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log
import java.io.File
import java.nio.file.{Files, NoSuchFileException}
import java.util.concurrent.locks.ReentrantLock
import LazyIndex._
import kafka.utils.CoreUtils.inLock
import kafka.utils.threadsafe
import org.apache.kafka.common.utils.Utils
/**
* A wrapper over an `AbstractIndex` instance that provides a mechanism to defer loading
* (i.e. memory mapping) the underlying index until it is accessed for the first time via the
* `get` method.
*
* In addition, this class exposes a number of methods (e.g. updateParentDir, renameTo, close,
* etc.) that provide the desired behavior without causing the index to be loaded. If the index
* had previously been loaded, the methods in this class simply delegate to the relevant method in
* the index.
*
* This is an important optimization with regards to broker start-up and shutdown time if it has a
* large number of segments.
*
* Methods of this class are thread safe. Make sure to check `AbstractIndex` subclasses
* documentation to establish their thread safety.
*
* @param loadIndex A function that takes a `File` pointing to an index and returns a loaded
* `AbstractIndex` instance.
*/
@threadsafe
class LazyIndex[T <: AbstractIndex] private (@volatile private var indexWrapper: IndexWrapper, loadIndex: File => T) {
private val lock = new ReentrantLock()
def file: File = indexWrapper.file
def get: T = {
indexWrapper match {
case indexValue: IndexValue[_] => indexValue.index.asInstanceOf[T]
case _: IndexFile =>
inLock(lock) {
indexWrapper match {
case indexValue: IndexValue[_] => indexValue.index.asInstanceOf[T]
case indexFile: IndexFile =>
val indexValue = new IndexValue(loadIndex(indexFile.file))
indexWrapper = indexValue
indexValue.index
}
}
}
}
def updateParentDir(parentDir: File): Unit = {
inLock(lock) {
indexWrapper.updateParentDir(parentDir)
}
}
def renameTo(f: File): Unit = {
inLock(lock) {
indexWrapper.renameTo(f)
}
}
def deleteIfExists(): Boolean = {
inLock(lock) {
indexWrapper.deleteIfExists()
}
}
def close(): Unit = {
inLock(lock) {
indexWrapper.close()
}
}
def closeHandler(): Unit = {
inLock(lock) {
indexWrapper.closeHandler()
}
}
}
object LazyIndex {
def forOffset(file: File, baseOffset: Long, maxIndexSize: Int = -1, writable: Boolean = true): LazyIndex[OffsetIndex] =
new LazyIndex(new IndexFile(file), file => new OffsetIndex(file, baseOffset, maxIndexSize, writable))
def forTime(file: File, baseOffset: Long, maxIndexSize: Int = -1, writable: Boolean = true): LazyIndex[TimeIndex] =
new LazyIndex(new IndexFile(file), file => new TimeIndex(file, baseOffset, maxIndexSize, writable))
private sealed trait IndexWrapper {
def file: File
def updateParentDir(f: File): Unit
def renameTo(f: File): Unit
def deleteIfExists(): Boolean
def close(): Unit
def closeHandler(): Unit
}
private class IndexFile(@volatile private var _file: File) extends IndexWrapper {
def file: File = _file
def updateParentDir(parentDir: File): Unit = _file = new File(parentDir, file.getName)
def renameTo(f: File): Unit = {
try Utils.atomicMoveWithFallback(file.toPath, f.toPath, false)
catch {
case _: NoSuchFileException if !file.exists => ()
}
finally _file = f
}
def deleteIfExists(): Boolean = Files.deleteIfExists(file.toPath)
def close(): Unit = ()
def closeHandler(): Unit = ()
}
private class IndexValue[T <: AbstractIndex](val index: T) extends IndexWrapper {
def file: File = index.file
def updateParentDir(parentDir: File): Unit = index.updateParentDir(parentDir)
def renameTo(f: File): Unit = index.renameTo(f)
def deleteIfExists(): Boolean = index.deleteIfExists()
def close(): Unit = index.close()
def closeHandler(): Unit = index.closeHandler()
}
}
|
TiVo/kafka
|
core/src/main/scala/kafka/log/LazyIndex.scala
|
Scala
|
apache-2.0
| 4,895 |
package com.twitter.finagle.exp.swift
import com.facebook.swift.codec.ThriftCodec
import com.facebook.swift.codec.internal.{TProtocolReader, TProtocolWriter}
import com.twitter.finagle.Service
import com.twitter.finagle.thrift.ThriftClientRequest
import com.twitter.util.Future
import java.lang.reflect.{InvocationHandler, Method, Proxy}
import java.util.Arrays
import org.apache.thrift.TApplicationException
import org.apache.thrift.protocol.{TBinaryProtocol, TMessageType, TMessage}
import org.apache.thrift.transport.{TMemoryBuffer, TMemoryInputTransport}
import scala.reflect.ClassTag
object SwiftProxy {
/**
* Given a service, create a `T`-typed client that dispatches
* thrift-encoded messages on it. `T` must be Swift-annotated.
*/
def newClient[T: ClassTag](service: Service[ThriftClientRequest, Array[Byte]]): T = {
val k = implicitly[ClassTag[T]].runtimeClass
val sym = ServiceSym(k)
Proxy.newProxyInstance(
k.getClassLoader(),
Array(k),
new ProxyHandler(sym, service)
).asInstanceOf[T]
}
}
private class ProxyHandler(
sym: ServiceSym,
service: Service[ThriftClientRequest, Array[Byte]])
extends InvocationHandler {
private[this] val codecs = Map() ++ (
for (s@MethodSym(_, m, _, _, _) <- sym.methods)
yield m -> new MethodCodec(s)
)
override def invoke(p: Object, m: Method, args: Array[Object]): Object = {
if (m.getDeclaringClass() == classOf[Object]) {
return m.getName() match {
case "toString" => "Service("+sym+")"
case "equals" =>
val eq = equals(Proxy.getInvocationHandler(args(0)))
new java.lang.Boolean(eq)
case "hashCode" => new java.lang.Integer(hashCode())
case _ => throw new UnsupportedOperationException
}
}
codecs.get(m) match {
case Some(codec) =>
val encoded = codec.encode(args)
service(new ThriftClientRequest(encoded, false)) map codec.decode
case None =>
val exc = new TApplicationException(
TApplicationException.UNKNOWN_METHOD,
"Unknown method "+m)
Future.exception(exc)
}
}
}
/**
* Given a method symbol, a codec serializes and
* deserializes argument structures.
*/
class MethodCodec(sym: MethodSym) {
private[this] val returnCodec =
ThriftCodecManager.getCodec(sym.returnType)
private[this] val exceptions = Map() ++ (
sym.exceptions map { case (id, thriftType) =>
id -> ThriftCodecManager.getCodec(thriftType).asInstanceOf[ThriftCodec[Object]]
}
)
def encode(args: Array[Object]): Array[Byte] = {
val buf = new TMemoryBuffer(32)
val out = new TBinaryProtocol(buf)
out.writeMessageBegin(new TMessage(sym.name, TMessageType.CALL, 0))
val writer = new TProtocolWriter(out)
writer.writeStructBegin(sym.name+"_args")
for (i <- Range(0, args.size)) {
val ArgSym(name, id, thriftType) = sym.args(i)
val codec = ThriftCodecManager
.getCodec(thriftType).asInstanceOf[ThriftCodec[Object]]
writer.writeField(name, id, codec, args(i))
}
writer.writeStructEnd()
out.writeMessageEnd()
Arrays.copyOfRange(buf.getArray(), 0, buf.length())
}
def decode(bytes: Array[Byte]): Object = {
val buf = new TMemoryInputTransport(bytes)
val in = new TBinaryProtocol(buf)
val msg = in.readMessageBegin()
if (msg.`type` == TMessageType.EXCEPTION)
throw TApplicationException.read(in)
require(msg.`type` == TMessageType.REPLY)
// todo: check method name, seqid, etc.
val reader = new TProtocolReader(in)
reader.readStructBegin()
while (reader.nextField()) {
reader.getFieldId() match {
case 0 =>
return reader.readField(returnCodec)
case id if exceptions contains id =>
throw reader.readField(exceptions(id)).asInstanceOf[Exception]
case _ =>
reader.skipFieldData()
}
}
throw new TApplicationException(
TApplicationException.MISSING_RESULT,
sym.name + " failed: unknown result")
}
}
|
latur19318/finagle
|
finagle-swift/src/main/scala/com/twitter/finagle/swift/proxy.scala
|
Scala
|
apache-2.0
| 4,057 |
package kafka.common
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Convenience case class since (clientId, brokerInfo) pairs are used to create
* SyncProducer Request Stats and SimpleConsumer Request and Response Stats.
*/
trait ClientIdBroker {
}
case class ClientIdAndBroker(clientId: String, brokerHost: String, brokerPort: Int) extends ClientIdBroker {
override def toString = "%s-%s-%d".format(clientId, brokerHost, brokerPort)
}
case class ClientIdAllBrokers(clientId: String) extends ClientIdBroker {
override def toString = "%s-%s".format(clientId, "AllBrokers")
}
|
flange/drift-dev
|
kafka/00-kafka_2.11-0.10.1.0/libs/tmp/kafka/common/ClientIdAndBroker.scala
|
Scala
|
apache-2.0
| 1,344 |
package com.highperformancespark.examples.goldilocks
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import org.apache.spark.{HashPartitioner, Partitioner}
import org.apache.spark.rdd.RDD
object SecondarySort {
//tag::sortByTwoKeys[]
def sortByTwoKeys[K : Ordering : ClassTag , S, V : ClassTag](pairRDD : RDD[((K, S), V)], partitions : Int ) = {
val colValuePartitioner = new PrimaryKeyPartitioner[K, S](partitions)
implicit val ordering: Ordering[(K, S)] = Ordering.by(_._1)
val sortedWithinParts = pairRDD.repartitionAndSortWithinPartitions(
colValuePartitioner)
sortedWithinParts
}
//end::sortByTwoKeys[]
//tag::sortAndGroup[]
def groupByKeyAndSortBySecondaryKey[K : Ordering : ClassTag, S, V : ClassTag](pairRDD : RDD[((K, S), V)], partitions : Int ) = {
val colValuePartitioner = new PrimaryKeyPartitioner[Double, Int](partitions)
implicit val ordering: Ordering[(K, S)] = Ordering.by(_._1)
val sortedWithinParts = pairRDD.repartitionAndSortWithinPartitions(
colValuePartitioner)
sortedWithinParts.mapPartitions( iter => groupSorted[K, S, V](iter) )
}
def groupSorted[K,S,V](
it: Iterator[((K, S), V)]): Iterator[(K, List[(S, V)])] = {
val res = List[(K, ArrayBuffer[(S, V)])]()
it.foldLeft(res)((list, next) => list match {
case Nil =>
val ((firstKey, secondKey), value) = next
List((firstKey, ArrayBuffer((secondKey, value))))
case head :: rest =>
val (curKey, valueBuf) = head
val ((firstKey, secondKey), value) = next
if (!firstKey.equals(curKey) ) {
(firstKey, ArrayBuffer((secondKey, value))) :: list
} else {
valueBuf.append((secondKey, value))
list
}
}).map { case (key, buf) => (key, buf.toList) }.iterator
}
//end::sortAndGroup[]
}
//tag::primaryKeyPartitioner[]
class PrimaryKeyPartitioner[K, S](partitions: Int) extends Partitioner {
/**
* We create a hash partitioner and use it with the first set of keys.
*/
val delegatePartitioner = new HashPartitioner(partitions)
override def numPartitions = delegatePartitioner.numPartitions
/**
* Partition according to the hash value of the first key
*/
override def getPartition(key: Any): Int = {
val k = key.asInstanceOf[(K, S)]
delegatePartitioner.getPartition(k._1)
}
}
//end::primaryKeyPartitioner[]
object CoPartitioningLessons {
def coLocated(a : RDD[(Int, String)], b : RDD[(Int, String)],
partitionerX : Partitioner, partitionerY :Partitioner): Unit = {
//tag::coLocated
val rddA = a.partitionBy(partitionerX)
rddA.cache()
val rddB = b.partitionBy(partitionerY)
rddB.cache()
val rddC = a.cogroup(b)
rddC.count()
//end::coLocated[]
}
def notCoLocated(a : RDD[(Int, String)], b : RDD[(Int, String )],
partitionerX : Partitioner, partitionerY :Partitioner): Unit = {
//tag::notCoLocated
val rddA = a.partitionBy(partitionerX)
rddA.cache()
val rddB = b.partitionBy(partitionerY)
rddB.cache()
val rddC = a.cogroup(b)
rddA.count()
rddB.count()
rddC.count()
//end::notCoLocated[]
}
}
|
mahmoudhanafy/high-performance-spark-examples
|
src/main/scala/com/high-performance-spark-examples/GoldiLocks/SecondarySort.scala
|
Scala
|
apache-2.0
| 3,194 |
package doc.jockey.model
import org.scalatest.WordSpec
import example.project.fixture.ComputerIs
import doc.jockey.rendering.HtmlAssertions
class TestNodeBehaviourSpec extends WordSpec with HtmlAssertions {
"Executing a Before gives you an After" in {
val aCmd = ComputerIs(true)
assert(Before(aCmd).execute === After(aCmd, List(Pass("on")), Nil))
}
"Afters have Summaries" in {
assert(After(ComputerIs(true), List(Fail("off", "on")), Nil).summary === Summary(0, 1))
}
}
|
agmenc/doc-jockey
|
src/test/scala/doc/jockey/model/TestNodeBehaviourSpec.scala
|
Scala
|
mit
| 493 |
package api
import spray.routing.Directives
import scala.concurrent.ExecutionContext
import akka.actor.ActorRef
import core.{User, RegistrationActor}
import akka.util.Timeout
import RegistrationActor._
import spray.http._
import core.User
import core.RegistrationActor.Register
import scala.Some
class RegistrationService(registration: ActorRef)(implicit executionContext: ExecutionContext)
extends Directives with DefaultJsonFormats {
case class ImageUploaded(size: Long)
import akka.pattern.ask
import scala.concurrent.duration._
implicit val timeout = Timeout(2.seconds)
implicit val userFormat = jsonFormat4(User)
implicit val registerFormat = jsonFormat1(Register)
implicit val registeredFormat = jsonObjectFormat[Registered.type]
implicit val notRegisteredFormat = jsonObjectFormat[NotRegistered.type]
implicit val imageUploadedFormat = jsonFormat1(ImageUploaded)
implicit object EitherErrorSelector extends ErrorSelector[NotRegistered.type] {
def apply(v: NotRegistered.type): StatusCode = StatusCodes.BadRequest
}
val route =
path("register") {
post {
handleWith { ru: Register => (registration ? ru).mapTo[Either[NotRegistered.type, Registered.type]] }
}
} ~
path("register" / "image") {
post {
handleWith { data: MultipartFormData =>
data.get("files[]") match {
case Some(imageEntity) =>
val size = imageEntity.entity.data.length
println(s"Uploaded $size")
ImageUploaded(size)
case None =>
println("No files")
ImageUploaded(0)
}
}
}
}
}
|
eigengo/activator-akka-spray
|
src/main/scala/api/RegistrationService.scala
|
Scala
|
apache-2.0
| 1,663 |
package toolkit.exceptions
/**
* Created by #GrowinScala
*/
class GraphRootHasInvalidTypes extends Exception("The graph root has invalid types")
|
exocute/Toolkit
|
src/main/scala/toolkit/exceptions/GraphRootHasInvalidTypes.scala
|
Scala
|
bsd-2-clause
| 149 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.security.auth
import kafka.common.{BaseEnum, KafkaException}
import org.apache.kafka.common.acl.AclPermissionType
@deprecated("Use org.apache.kafka.common.acl.AclPermissionType", "Since 2.5")
sealed trait PermissionType extends BaseEnum {
val toJava: AclPermissionType
}
@deprecated("Use org.apache.kafka.common.acl.AclPermissionType", "Since 2.5")
case object Allow extends PermissionType {
val name = "Allow"
val toJava = AclPermissionType.ALLOW
}
@deprecated("Use org.apache.kafka.common.acl.AclPermissionType", "Since 2.5")
case object Deny extends PermissionType {
val name = "Deny"
val toJava = AclPermissionType.DENY
}
@deprecated("Use org.apache.kafka.common.acl.AclPermissionType", "Since 2.5")
object PermissionType {
def fromString(permissionType: String): PermissionType = {
val pType = values.find(pType => pType.name.equalsIgnoreCase(permissionType))
pType.getOrElse(throw new KafkaException(permissionType + " not a valid permissionType name. The valid names are " + values.mkString(",")))
}
def fromJava(permissionType: AclPermissionType): PermissionType = fromString(permissionType.toString)
def values: Seq[PermissionType] = List(Allow, Deny)
}
|
sslavic/kafka
|
core/src/main/scala/kafka/security/auth/PermissionType.scala
|
Scala
|
apache-2.0
| 2,020 |
/*
* Copyright 2001-2016 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalactic.anyvals
import reflect.macros.Context
import java.util.regex.Pattern
// SKIP-SCALATESTJS,NATIVE-START
import java.util.regex.PatternSyntaxException
// SKIP-SCALATESTJS,NATIVE-END
import CompileTimeAssertions._
private[scalactic] object RegexStringMacro {
def isValid(s: String): Boolean =
checkIsValid(s)._1
private def checkIsValid(s: String): (Boolean, String) =
try {
Pattern.compile(s)
(true, "")
}
catch {
// SKIP-SCALATESTJS,NATIVE-START
case e: PatternSyntaxException =>
// SKIP-SCALATESTJS,NATIVE-END
//SCALATESTJS,NATIVE-ONLY case e: Exception => // TODO: Figure out exactly what exception JS throws in this case
(false, "\\n" + e.getMessage)
}
def apply(c: Context)(value: c.Expr[String]): c.Expr[RegexString] = {
val notValidExceptionMsg: String = {
import c.universe._
value.tree match {
case Literal(stringConst) =>
checkIsValid(stringConst.value.toString)._2
case _ =>
""
}
}
val notValidMsg =
"RegexString.apply can only be invoked on String literals that " +
"represent valid regular expressions." + notValidExceptionMsg
val notLiteralMsg =
"RegexString.apply can only be invoked on String literals that " +
"represent valid regular expressions. Please use RegexString.from " +
"instead."
ensureValidStringLiteral(c)(value, notValidMsg, notLiteralMsg)(isValid)
c.universe.reify { RegexString.ensuringValid(value.splice) }
}
}
|
dotty-staging/scalatest
|
scalactic-macro/src/main/scala/org/scalactic/anyvals/RegexStringMacro.scala
|
Scala
|
apache-2.0
| 2,127 |
/*
* @author Philip Stutz
* @author Daniel Strebel
*
* Copyright 2013 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.examples
import java.io._
import scala.collection.mutable.ArrayBuffer
import com.signalcollect._
import com.signalcollect.factory.messagebus.BulkAkkaMessageBusFactory
import com.signalcollect.configuration.ExecutionMode._
/**
* Use GraphSplitter to download the graph and generate the splits.
*
* Run with JVM parameters:
* -Xmx2000m -Xms2000m
*
* Computation ran in as little as 677 milliseconds (best run) on a notebook
* with a 2.3GHz Core i7 (1 processor, 4 cores, 8 splits for 8 hyper-threads).
*/
object EfficientSsspLoader extends App {
val g = new GraphBuilder[Int, Int].withMessageBusFactory(new BulkAkkaMessageBusFactory(96, false)).build
val numberOfSplits = Runtime.getRuntime.availableProcessors
val splits = {
val s = new Array[DataInputStream](numberOfSplits)
for (i <- 0 until numberOfSplits) {
s(i) = new DataInputStream(new FileInputStream(s"web-split-$i"))
}
s
}
for (i <- 0 until numberOfSplits) {
g.modifyGraph(loadSplit(i) _, Some(i))
}
print("Loading graph ...")
g.awaitIdle
println("done.")
print("Running computation ...")
val stats = g.execute(ExecutionConfiguration.withExecutionMode(PureAsynchronous))
println("done.")
println(stats)
implicit val ord = Ordering[Int].reverse
val min1000 = g.aggregate(new TopKFinder[Int](1000))
min1000 foreach (println(_))
g.shutdown
def loadSplit(splitIndex: Int)(ge: GraphEditor[Int, Int]) {
val in = splits(splitIndex)
var vertexId = CompactIntSet.readUnsignedVarInt(in)
while (vertexId >= 0) {
val numberOfEdges = CompactIntSet.readUnsignedVarInt(in)
var edges = new ArrayBuffer[Int]
while (edges.length < numberOfEdges) {
val nextEdge = CompactIntSet.readUnsignedVarInt(in)
edges += nextEdge
}
val vertex = {
if (vertexId == 254913) { // Source vertex
new EfficientSsspVertex(vertexId, 0)
} else {
new EfficientSsspVertex(vertexId)
}
}
vertex.setTargetIds(edges.length, CompactIntSet.create(edges.toArray))
ge.addVertex(vertex)
vertexId = CompactIntSet.readUnsignedVarInt(in)
}
}
}
/**
* A version of Sssp that performs faster and uses less memory than the standard version.
* This version collects upon signal delivery.
*/
class EfficientSsspVertex(val id: Int, var state: Int = Int.MaxValue) extends Vertex[Int, Int] {
var lastSignalState = Int.MaxValue
var outEdges = 0
def setState(s: Int) {
state = s
}
protected var targetIdArray: Array[Byte] = null
def setTargetIds(numberOfEdges: Int, compactIntSet: Array[Byte]) = {
outEdges = numberOfEdges
targetIdArray = compactIntSet
}
def deliverSignal(signal: Any, sourceId: Option[Any], graphEditor: GraphEditor[Any, Any]): Boolean = {
val s = signal.asInstanceOf[Int]
state = math.min(s, state)
true
}
override def executeSignalOperation(graphEditor: GraphEditor[Any, Any]) {
if (outEdges != 0) {
CompactIntSet.foreach(targetIdArray, graphEditor.sendSignal(state + 1, _, None))
}
lastSignalState = state
}
override def scoreSignal: Double = lastSignalState - state
def scoreCollect = 0 // Because signals are collected upon delivery.
def edgeCount = outEdges
override def toString = s"${this.getClass.getName}(state=$state)"
def executeCollectOperation(graphEditor: GraphEditor[Any, Any]) {}
def afterInitialization(graphEditor: GraphEditor[Any, Any]) = {}
def beforeRemoval(graphEditor: GraphEditor[Any, Any]) = {}
override def addEdge(e: Edge[_], graphEditor: GraphEditor[Any, Any]): Boolean = throw new UnsupportedOperationException("Use setTargetIds(...)")
override def removeEdge(targetId: Any, graphEditor: GraphEditor[Any, Any]): Boolean = throw new UnsupportedOperationException
override def removeAllEdges(graphEditor: GraphEditor[Any, Any]): Int = throw new UnsupportedOperationException
}
|
gmazlami/dcop-maxsum
|
src/main/scala/com/signalcollect/examples/EfficientSssp.scala
|
Scala
|
apache-2.0
| 4,611 |
package net.wrap_trap.goju
import scala.language.implicitConversions
import scala.language.reflectiveCalls
/**
* goju: HanoiDB(LSM-trees (Log-Structured Merge Trees) Indexed Storage) clone
*
* Copyright (c) 2016 Masayuki Takahashi
*
* This software is released under the MIT License.
* http://opensource.org/licenses/mit-license.php
*/
object Helper {
def using[A, R <: { def close() }](r: R)(f: R => A): A = {
try {
f(r)
} finally {
try { r.close() } catch { case _: Exception => }
}
}
def using[A, R1 <: { def close() }, R2 <: { def close() }](r1: R1, r2: R2)(
f: (R1, R2) => A): A = {
try {
f(r1, r2)
} finally {
try { r1.close() } catch { case _: Exception => }
try { r2.close() } catch { case _: Exception => }
}
}
def using[A, R1 <: { def close() }, R2 <: { def close() }, R3 <: { def close() }](
r1: R1,
r2: R2,
r3: R3)(f: (R1, R2, R3) => A): A = {
try {
f(r1, r2, r3)
} finally {
try { r1.close() } catch { case _: Exception => }
try { r2.close() } catch { case _: Exception => }
try { r3.close() } catch { case _: Exception => }
}
}
implicit def toBytes(key: Key): Array[Byte] = {
key.bytes
}
}
|
masayuki038/goju
|
src/main/scala/net/wrap_trap/goju/Helper.scala
|
Scala
|
mit
| 1,251 |
package com.twitter.finagle.stats
import com.twitter.common.base.Supplier
import com.twitter.common.stats.{Percentile, Stats}
import com.twitter.util.registry.GlobalRegistry
class CommonsStatsReceiver extends StatsReceiverWithCumulativeGauges {
GlobalRegistry.get.put(
Seq("stats", "commons_stats", "counters_latched"),
"false")
val repr = Stats.STATS_PROVIDER
@volatile private[this] var stats = Map.empty[Seq[String], Stat]
@volatile private[this] var counters = Map.empty[Seq[String], Counter]
private[this] def variableName(name: Seq[String]) = name mkString "_"
protected[this] def registerGauge(name: Seq[String], f: => Float): Unit = {
Stats.STATS_PROVIDER.makeGauge(variableName(name), new Supplier[java.lang.Float] {
def get = new java.lang.Float(f)
})
}
protected[this] def deregisterGauge(name: Seq[String]): Unit = {
// not implemented in commons
}
def counter(name: String*): Counter = {
if (!counters.contains(name)) synchronized {
if (!counters.contains(name)) {
val counter = new Counter {
private[this] val underlying = Stats.exportLong(variableName(name))
def incr(delta: Int) { underlying.addAndGet(delta) }
}
counters += (name -> counter)
}
}
counters(name)
}
def stat(name: String*): Stat = {
if (!stats.contains(name)) synchronized {
if (!stats.contains(name)) {
val stat = new Stat {
val percentile = new Percentile[java.lang.Float](variableName(name), 100.0f , 50, 95, 99)
def add(value: Float): Unit = percentile.record(value)
}
stats += (name -> stat)
}
}
stats(name)
}
}
|
folone/finagle
|
finagle-commons-stats/src/main/scala/com/twitter/finagle/stats/CommonsStatsReceiver.scala
|
Scala
|
apache-2.0
| 1,699 |
package monocle.function
import monocle.{Iso, Prism}
import scala.annotation.implicitNotFound
/**
* Typeclass that defines a [[Prism]] from an `S` and its empty value
* @tparam S source of [[Prism]]
*/
@implicitNotFound("Could not find an instance of Empty[${S}], please check Monocle instance location policy to " +
"find out which import is necessary")
abstract class Empty[S] extends Serializable {
def empty: Prism[S, Unit]
}
trait EmptyFunctions {
def empty[S](implicit ev: Empty[S]): Prism[S, Unit] =
ev.empty
def _isEmpty[S](s: S)(implicit ev: Empty[S]): Boolean =
ev.empty.getOption(s).isDefined
def _empty[S](implicit ev: Empty[S]): S =
ev.empty.reverseGet(())
}
object Empty extends EmptyFunctions {
/** lift an instance of [[Empty]] using an [[Iso]] */
def fromIso[S, A](iso: Iso[S, A])(implicit ev: Empty[A]): Empty[S] = new Empty[S] {
val empty: Prism[S, Unit] =
iso composePrism ev.empty
}
/************************************************************************************************/
/** Std instances */
/************************************************************************************************/
implicit def listEmpty[A]: Empty[List[A]] = new Empty[List[A]] {
val empty = Prism[List[A], Unit](l => if(l.isEmpty) Some(()) else None)(_ => List.empty)
}
implicit def mapEmpty[K, V]: Empty[Map[K, V]] = new Empty[Map[K, V]] {
val empty = Prism[Map[K, V], Unit](m => if(m.isEmpty) Some(()) else None)(_ => Map.empty)
}
implicit def optionEmpty[A]: Empty[Option[A]] = new Empty[Option[A]] {
val empty = monocle.std.option.none[A]
}
implicit def emptySet[A]: Empty[Set[A]] = new Empty[Set[A]] {
val empty = Prism[Set[A], Unit](s => if(s.isEmpty) Some(()) else None)(_ => Set.empty[A])
}
implicit def streamEmpty[A]: Empty[Stream[A]] = new Empty[Stream[A]] {
val empty = Prism[Stream[A], Unit](s => if(s.isEmpty) Some(()) else None)(_ => Stream.empty)
}
implicit val stringEmpty: Empty[String] = new Empty[String] {
val empty = Prism[String, Unit](s => if(s.isEmpty) Some(()) else None)(_ => "")
}
implicit def vectorEmpty[A]: Empty[Vector[A]] = new Empty[Vector[A]] {
val empty = Prism[Vector[A], Unit](v => if(v.isEmpty) Some(()) else None)(_ => Vector.empty)
}
/************************************************************************************************/
/** Scalaz instances */
/************************************************************************************************/
import monocle.std.maybe.nothing
import scalaz.{==>>, IList, ISet, Maybe}
implicit def iListEmpty[A]: Empty[IList[A]] = new Empty[IList[A]] {
def empty = Prism[IList[A], Unit](l => if(l.isEmpty) Some(()) else None)(_ => IList.empty)
}
implicit def iMapEmpty[K, V]: Empty[K ==>> V] = new Empty[K ==>> V] {
def empty = Prism[K ==>> V, Unit](m => if(m.isEmpty) Some(()) else None)(_ => ==>>.empty)
}
implicit def emptyISet[A]: Empty[ISet[A]] = new Empty[ISet[A]] {
def empty = Prism[ISet[A], Unit](s => if(s.isEmpty) Some(()) else None)(_ => ISet.empty[A])
}
implicit def maybeEmpty[A]: Empty[Maybe[A]] = new Empty[Maybe[A]]{
def empty = nothing
}
}
|
rperry/Monocle
|
core/shared/src/main/scala/monocle/function/Empty.scala
|
Scala
|
mit
| 3,370 |
/** ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \\ /__\\ (_ )(_ _)( ___)/ __) ( _ \\( )( )( _ \\ Read
* ) / /(__)\\ / /_ _)(_ )__) \\__ \\ )___/ )(__)( ) _ < README.txt
* (_)\\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
*/
package razie.diesel.dom
import razie.diesel.dom.RDOM.{C, DE}
import razie.hosting.WikiReactors
import razie.tconf.TagQuery
import razie.wiki.model._
/**
* The domain for a realm. All sub-domains should be combined in this one domain.
*
* Combine all domain tools with higher level helpers on top of a domain
*/
trait WikiDomain {
def realm: String
def wi: WikiInst
/** all plugins for this domain and in this realm */
def allPlugins: List[DomInventory]
/** register a connected inventory.
*
* Note - only useable inventories should be registered, i.e. where they are already connected */
def addPlugin(inv: DomInventory): List[DomInventory]
def findPlugins(inventory: String, conn: String = ""): List[DomInventory] = {
allPlugins
.find(_.name == inventory)
.orElse(
None
//allPlugins.find(_.isInstanceOf[DomInvWikiPlugin])
).toList
}
final val INVENTORY = "inventory"
/** based on annotations etc */
def findPluginsForClass(c: DE): List[DomInventory] = {
if (c.isInstanceOf[C]) {
allPlugins
.find(_.isDefinedFor(realm, c.asInstanceOf[C]))
.orElse(
// get annotation "inventory"
c.asInstanceOf[C].props
.find(_.name == INVENTORY)
.flatMap(inv =>
allPlugins.find(_.name == inv.currentStringValue)
)
).orElse(
allPlugins
.find(_.isInstanceOf[DomInvWikiPlugin])
.filter(_.isDefinedFor(realm, c.asInstanceOf[C]))
)
.toList
} else Nil
}
/** the aggregated domain representation for this realm */
def rdom: RDomain
/* while loading, it may recursively try to do some stuff - */
def isLoading: Boolean
def resetDom: Unit
/** is this an actual wiki category or a user-defined class or imported concept? */
def isWikiCategory(cat: String): Boolean
/** parse categories into domain model */
def createRDom: RDomain
// todo expand these inline
def zEnds(aEnd: String, zRole: String) = rdom.zEnds(aEnd, zRole)
def needsOwner(cat: String) = rdom.needsOwner(cat)
def prop(cat: String, name: String): Option[String] = rdom.prop(cat, name)
def needsParent(cat: String) = rdom.needsParent(cat)
def isA(what: String, cat: String): Boolean = rdom.isA(what, cat)
def noAds(cat: String) =
prop(cat, "noAds").isDefined
// todo optimize
def dtree(base: String): List[String] =
(base :: rdom.classes.values.filter(_.base contains base).toList.flatMap(x => dtree(x.name))).distinct
def roles(a: String, z: String): List[String] = {
val mine = rdom.classes.get(a).toList.flatMap(_.assocs).filter(_.z == z).map(_.zRole)
if (mine.isEmpty) rdom.classes.get(a).toList.flatMap(_.base).foldLeft(List.empty[String])(
(a, b) => a ++ roles(b, z))
else mine
}
}
object WikiDomain {
final val WIKI_CAT = "wikiCategory"
def apply(realm: String) = WikiReactors(realm).domain
/** todo does it really need to start with one */
def domFrom (first:WikiEntry, pages:List[WikiEntry]) : RDomain = {
RDomain.domFrom(first, pages)
}
/** crawl all domain pieces and build a domain */
def domFrom (we:WikiEntry) : Option[RDomain] = {
we.preprocessed
RDomain.domFrom(we)
}
/** crawl all domain pieces and build a domain */
def domFilter[T] (we:WikiEntry)(p:PartialFunction[Any,T]) : List[T] = {
RDomain.domFilter(we)(p)
}
def canCreateNew (realm:String, cat:String) = "User" != cat && "WikiLink" != cat
//todo can i create WIkiLink if I am admin?
/** root categories we can create free instance from */
def rootCats (realm:String) = {
apply(realm)
.rdom
.classes
.values
.filter(_.stereotypes.contains(razie.diesel.dom.WikiDomain.WIKI_CAT))
.map(_.name)
.toList
}
/** present a WE as a generic spec */
def spec (we:WikiEntry) = we
/** if any special DOM wiki changes, rebuild the domain */
WikiObservers mini {
case WikiEvent(_, "WikiEntry", _, Some(x), _, _, _)
if domTagQuery.matches(x.asInstanceOf[WikiEntry])
=> {
val we = x.asInstanceOf[WikiEntry]
WikiDomain.apply(we.realm).resetDom
}
}
/** use with WikiSearch.getList */
val domTagQuery = new TagQuery("DslDomain,dsldomain,Category,domain")
}
|
razie/diesel-hydra
|
diesel/src/main/scala/razie/diesel/dom/WikiDomain.scala
|
Scala
|
apache-2.0
| 4,678 |
package im.mange.flakeless
private [flakeless] class ConditionNotMetException(message: String) extends RuntimeException(message) {
def this(conditionToCheck: String, millis: Long) = this(conditionToCheck + " (not met within " + millis + " millis)\\n")
}
|
alltonp/flakeless
|
src/main/scala/im/mange/flakeless/ConditionNotMetException.scala
|
Scala
|
mit
| 256 |
package net.chrisloy.akka
import com.amazonaws.services.ec2.AmazonEC2Client
import java.net.URL
import java.io.{InputStreamReader, BufferedReader}
import com.amazonaws.services.ec2.model.{InstanceStateName, DescribeInstancesRequest, Instance}
import scala.collection.JavaConversions._
import com.amazonaws.services.autoscaling.model.{DescribeAutoScalingGroupsRequest, DescribeAutoScalingInstancesRequest}
import com.amazonaws.services.autoscaling.AmazonAutoScalingClient
class EC2(scaling: AmazonAutoScalingClient, ec2: AmazonEC2Client) {
def siblingIps: List[String] = groupInstanceIds(groupName(instanceId)) map instanceFromId collect {
case instance if isRunning(instance) => instance.getPrivateIpAddress
}
def currentIp = instanceFromId(instanceId).getPrivateIpAddress
val isRunning: Instance => Boolean = _.getState.getName == InstanceStateName.Running.toString
private def instanceId = {
val conn = new URL("http://169.254.169.254/latest/meta-data/instance-id").openConnection
val in = new BufferedReader(new InputStreamReader(conn.getInputStream))
try in.readLine() finally in.close()
}
private def instanceFromId(id: String): Instance = {
val result = ec2 describeInstances new DescribeInstancesRequest {
setInstanceIds(id :: Nil)
}
result.getReservations.head.getInstances.head
}
private def groupName(instanceId: String) = {
val result = scaling describeAutoScalingInstances new DescribeAutoScalingInstancesRequest {
setInstanceIds(instanceId :: Nil)
}
result.getAutoScalingInstances.head.getAutoScalingGroupName
}
private def groupInstanceIds(groupName: String) = {
val result = scaling describeAutoScalingGroups new DescribeAutoScalingGroupsRequest {
setAutoScalingGroupNames(groupName :: Nil)
}
result.getAutoScalingGroups.head.getInstances.toList map (_.getInstanceId)
}
}
|
chrisloy/akka-ec2
|
src/main/scala/net/chrisloy/akka/EC2.scala
|
Scala
|
gpl-3.0
| 1,889 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.sources
import java.io.File
import org.apache.spark.sql.{AnalysisException, Row}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.util.Utils
class InsertSuite extends DataSourceTest with SharedSQLContext {
protected override lazy val sql = spark.sql _
private var path: File = null
override def beforeAll(): Unit = {
super.beforeAll()
path = Utils.createTempDir()
val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str$i"}"""))
spark.read.json(rdd).createOrReplaceTempView("jt")
sql(
s"""
|CREATE TEMPORARY TABLE jsonTable (a int, b string)
|USING org.apache.spark.sql.json.DefaultSource
|OPTIONS (
| path '${path.toString}'
|)
""".stripMargin)
}
override def afterAll(): Unit = {
try {
spark.catalog.dropTempView("jsonTable")
spark.catalog.dropTempView("jt")
Utils.deleteRecursively(path)
} finally {
super.afterAll()
}
}
test("Simple INSERT OVERWRITE a JSONRelation") {
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jt
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
(1 to 10).map(i => Row(i, s"str$i"))
)
}
test("insert into a temp view that does not point to an insertable data source") {
import testImplicits._
withTempView("t1", "t2") {
sql(
"""
|CREATE TEMPORARY VIEW t1
|USING org.apache.spark.sql.sources.SimpleScanSource
|OPTIONS (
| From '1',
| To '10')
""".stripMargin)
sparkContext.parallelize(1 to 10).toDF("a").createOrReplaceTempView("t2")
val message = intercept[AnalysisException] {
sql("INSERT INTO TABLE t1 SELECT a FROM t2")
}.getMessage
assert(message.contains("does not allow insertion"))
}
}
test("PreInsert casting and renaming") {
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a * 2, a * 4 FROM jt
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
(1 to 10).map(i => Row(i * 2, s"${i * 4}"))
)
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a * 4 AS A, a * 6 as c FROM jt
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
(1 to 10).map(i => Row(i * 4, s"${i * 6}"))
)
}
test("SELECT clause generating a different number of columns is not allowed.") {
val message = intercept[AnalysisException] {
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a FROM jt
""".stripMargin)
}.getMessage
assert(message.contains("the number of columns are different")
)
}
test("INSERT OVERWRITE a JSONRelation multiple times") {
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jt
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
(1 to 10).map(i => Row(i, s"str$i"))
)
// Writing the table to less part files.
val rdd1 = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str$i"}"""), 5)
spark.read.json(rdd1).createOrReplaceTempView("jt1")
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jt1
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
(1 to 10).map(i => Row(i, s"str$i"))
)
// Writing the table to more part files.
val rdd2 = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str$i"}"""), 10)
spark.read.json(rdd2).createOrReplaceTempView("jt2")
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jt2
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
(1 to 10).map(i => Row(i, s"str$i"))
)
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a * 10, b FROM jt1
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
(1 to 10).map(i => Row(i * 10, s"str$i"))
)
spark.catalog.dropTempView("jt1")
spark.catalog.dropTempView("jt2")
}
test("INSERT INTO JSONRelation for now") {
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jt
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
sql("SELECT a, b FROM jt").collect()
)
sql(
s"""
|INSERT INTO TABLE jsonTable SELECT a, b FROM jt
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM jsonTable"),
sql("SELECT a, b FROM jt UNION ALL SELECT a, b FROM jt").collect()
)
}
test("INSERT INTO TABLE with Comment in columns") {
val tabName = "tab1"
withTable(tabName) {
sql(
s"""
|CREATE TABLE $tabName(col1 int COMMENT 'a', col2 int)
|USING parquet
""".stripMargin)
sql(s"INSERT INTO TABLE $tabName SELECT 1, 2")
checkAnswer(
sql(s"SELECT col1, col2 FROM $tabName"),
Row(1, 2) :: Nil
)
}
}
test("INSERT INTO TABLE - complex type but different names") {
val tab1 = "tab1"
val tab2 = "tab2"
withTable(tab1, tab2) {
sql(
s"""
|CREATE TABLE $tab1 (s struct<a: string, b: string>)
|USING parquet
""".stripMargin)
sql(s"INSERT INTO TABLE $tab1 SELECT named_struct('col1','1','col2','2')")
sql(
s"""
|CREATE TABLE $tab2 (p struct<c: string, d: string>)
|USING parquet
""".stripMargin)
sql(s"INSERT INTO TABLE $tab2 SELECT * FROM $tab1")
checkAnswer(
spark.table(tab1),
spark.table(tab2)
)
}
}
test("it is not allowed to write to a table while querying it.") {
val message = intercept[AnalysisException] {
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jsonTable
""".stripMargin)
}.getMessage
assert(
message.contains("Cannot overwrite a path that is also being read from."),
"INSERT OVERWRITE to a table while querying it should not be allowed.")
}
test("Caching") {
// write something to the jsonTable
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a, b FROM jt
""".stripMargin)
// Cached Query Execution
spark.catalog.cacheTable("jsonTable")
assertCached(sql("SELECT * FROM jsonTable"))
checkAnswer(
sql("SELECT * FROM jsonTable"),
(1 to 10).map(i => Row(i, s"str$i")))
assertCached(sql("SELECT a FROM jsonTable"))
checkAnswer(
sql("SELECT a FROM jsonTable"),
(1 to 10).map(Row(_)).toSeq)
assertCached(sql("SELECT a FROM jsonTable WHERE a < 5"))
checkAnswer(
sql("SELECT a FROM jsonTable WHERE a < 5"),
(1 to 4).map(Row(_)).toSeq)
assertCached(sql("SELECT a * 2 FROM jsonTable"))
checkAnswer(
sql("SELECT a * 2 FROM jsonTable"),
(1 to 10).map(i => Row(i * 2)).toSeq)
assertCached(sql(
"SELECT x.a, y.a FROM jsonTable x JOIN jsonTable y ON x.a = y.a + 1"), 2)
checkAnswer(sql(
"SELECT x.a, y.a FROM jsonTable x JOIN jsonTable y ON x.a = y.a + 1"),
(2 to 10).map(i => Row(i, i - 1)).toSeq)
// Insert overwrite and keep the same schema.
sql(
s"""
|INSERT OVERWRITE TABLE jsonTable SELECT a * 2, b FROM jt
""".stripMargin)
// jsonTable should be recached.
assertCached(sql("SELECT * FROM jsonTable"))
// TODO we need to invalidate the cached data in InsertIntoHadoopFsRelation
// // The cached data is the new data.
// checkAnswer(
// sql("SELECT a, b FROM jsonTable"),
// sql("SELECT a * 2, b FROM jt").collect())
//
// // Verify uncaching
// spark.catalog.uncacheTable("jsonTable")
// assertCached(sql("SELECT * FROM jsonTable"), 0)
}
test("it's not allowed to insert into a relation that is not an InsertableRelation") {
sql(
"""
|CREATE TEMPORARY TABLE oneToTen
|USING org.apache.spark.sql.sources.SimpleScanSource
|OPTIONS (
| From '1',
| To '10'
|)
""".stripMargin)
checkAnswer(
sql("SELECT * FROM oneToTen"),
(1 to 10).map(Row(_)).toSeq
)
val message = intercept[AnalysisException] {
sql(
s"""
|INSERT OVERWRITE TABLE oneToTen SELECT CAST(a AS INT) FROM jt
""".stripMargin)
}.getMessage
assert(
message.contains("does not allow insertion."),
"It is not allowed to insert into a table that is not an InsertableRelation."
)
spark.catalog.dropTempView("oneToTen")
}
test("SPARK-15824 - Execute an INSERT wrapped in a WITH statement immediately") {
withTable("target", "target2") {
sql(s"CREATE TABLE target(a INT, b STRING) USING JSON")
sql("WITH tbl AS (SELECT * FROM jt) INSERT OVERWRITE TABLE target SELECT a, b FROM tbl")
checkAnswer(
sql("SELECT a, b FROM target"),
sql("SELECT a, b FROM jt")
)
sql(s"CREATE TABLE target2(a INT, b STRING) USING JSON")
val e = sql(
"""
|WITH tbl AS (SELECT * FROM jt)
|FROM tbl
|INSERT INTO target2 SELECT a, b WHERE a <= 5
|INSERT INTO target2 SELECT a, b WHERE a > 5
""".stripMargin)
checkAnswer(
sql("SELECT a, b FROM target2"),
sql("SELECT a, b FROM jt")
)
}
}
test("SPARK-21203 wrong results of insertion of Array of Struct") {
val tabName = "tab1"
withTable(tabName) {
spark.sql(
"""
|CREATE TABLE `tab1`
|(`custom_fields` ARRAY<STRUCT<`id`: BIGINT, `value`: STRING>>)
|USING parquet
""".stripMargin)
spark.sql(
"""
|INSERT INTO `tab1`
|SELECT ARRAY(named_struct('id', 1, 'value', 'a'), named_struct('id', 2, 'value', 'b'))
""".stripMargin)
checkAnswer(
spark.sql("SELECT custom_fields.id, custom_fields.value FROM tab1"),
Row(Array(1, 2), Array("a", "b")))
}
}
}
|
spark0001/spark2.1.1
|
sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
|
Scala
|
apache-2.0
| 10,925 |
package fpscala.c03
import fpscala.datastructures.{Cons, List => FpList, Nil => FpNil}
import org.scalatest.{FlatSpec, Matchers}
class Exercise13Spec extends FlatSpec with Matchers {
"foldLeft implemented in terms of foldRight" should "work" in {
Exercise13.foldLeft(FpList(1, 2, 3, 4), FpNil: FpList[Int])((b, a) => Cons(a, b)) shouldBe Exercise10.foldLeft(FpList(1, 2, 3, 4), FpNil: FpList[Int])((b, a) => Cons(a, b))
}
"foldRight implemented in terms of foldLeft" should "work" in {
Exercise13.foldRight(FpList(1, 2, 3, 4), FpNil: FpList[Int])(Cons(_, _)) shouldBe FpList.foldRight(FpList(1, 2, 3, 4), FpNil: FpList[Int])(Cons(_, _))
}
}
|
willtaylor/fpscala
|
src/test/scala/fpscala/c03/Exercise13Spec.scala
|
Scala
|
gpl-3.0
| 662 |
package org.scalajs.core.tools.json
import org.json.simple.JSONValue
import scala.collection.JavaConverters._
import java.io.{Writer, Reader}
private[json] object Impl extends AbstractJSONImpl {
type Repr = Object
def fromString(x: String): Repr = x
def fromNumber(x: Number): Repr = x
def fromBoolean(x: Boolean): Repr = java.lang.Boolean.valueOf(x)
def fromList(x: List[Repr]): Repr = x.asJava
def fromMap(x: Map[String, Repr]): Repr = x.asJava
def toString(x: Repr): String = x.asInstanceOf[String]
def toNumber(x: Repr): Number = x.asInstanceOf[Number]
def toBoolean(x: Repr): Boolean =
x.asInstanceOf[java.lang.Boolean].booleanValue()
def toList(x: Repr): List[Repr] =
x.asInstanceOf[java.util.List[Repr]].asScala.toList
def toMap(x: Repr): Map[String, Repr] =
x.asInstanceOf[java.util.Map[String, Repr]].asScala.toMap
def serialize(x: Repr): String =
JSONValue.toJSONString(x)
def serialize(x: Repr, writer: Writer): Unit =
JSONValue.writeJSONString(x, writer)
def deserialize(str: String): Repr = JSONValue.parse(str)
def deserialize(reader: Reader): Repr = JSONValue.parse(reader)
}
|
mdedetrich/scala-js
|
tools/jvm/src/main/scala/org/scalajs/core/tools/json/Impl.scala
|
Scala
|
bsd-3-clause
| 1,153 |
package org.jetbrains.plugins.scala
package lang.typeInference.generated
import lang.typeInference.TypeInferenceTestBase
import org.jetbrains.plugins.scala.util.TestUtils
import org.jetbrains.plugins.scala.util.TestUtils.ScalaSdkVersion
/**
* @author Alefas
* @since 11.12.12
*/
class TypeInferenceScalazTest extends TypeInferenceTestBase {
//This class was generated by build script, please don't change this
override def folderPath: String = super.folderPath + "scalaz/"
protected override def isIncludeScalazLibrary: Boolean = true
def testSCL3819() {doTest()}
def testSCL4033() {doTest()}
def testSCL4352() {doTest()}
def testSCL4468() {doTest()}
def testSCL4912() {doTest()}
def testSCL6417() {doTest()}
}
|
consulo/consulo-scala
|
test/org/jetbrains/plugins/scala/lang/typeInference/generated/TypeInferenceScalazTest.scala
|
Scala
|
apache-2.0
| 741 |
object Test {
def main(args: Array[String]): Unit = {
Macros.testDefinitions()
}
}
|
som-snytt/dotty
|
tests/run-macros/tasty-definitions-1/quoted_2.scala
|
Scala
|
apache-2.0
| 92 |
package sangria.validation.rules
import sangria.util.{Pos, ValidationSupport}
import org.scalatest.wordspec.AnyWordSpec
class InputDocumentNonConflictingVariableInferenceSpec extends AnyWordSpec with ValidationSupport {
override val defaultRule = Some(new InputDocumentNonConflictingVariableInference)
"InputDocumentNonConflictingVariableInference" should {
"variable used multiple times in the right position" in expectInputPasses(
"ComplexInput",
"""
{
requiredField: true
stringField: $foo
stringListField: [$foo]
}
""")
"variable used 2 times with incompatible types" in expectInputFails(
"ComplexInput",
"""
{
requiredField: $foo
stringField: "hello world"
stringListField: [$foo]
}
""",
List(
"Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'String'." -> List(
Pos(5, 29),
Pos(3, 26))
)
)
"variable used multiple times with incompatible types" in expectInputFails(
"ComplexInput",
"""
{
requiredField: $foo
intField: $foo
stringField: $foo
stringListField: [$foo]
}
""",
List(
"Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'Int'." -> List(
Pos(4, 21),
Pos(3, 26)),
"Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'String'." -> List(
Pos(5, 24),
Pos(3, 26)),
"Inferred variable '$foo' is used with two conflicting types: 'Boolean!' and 'String'." -> List(
Pos(6, 29),
Pos(3, 26))
)
)
}
}
|
OlegIlyenko/sangria
|
modules/core/src/test/scala/sangria/validation/rules/InputDocumentNonConflictingVariableInferenceSpec.scala
|
Scala
|
apache-2.0
| 1,752 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import org.bdgenomics.utils.instrumentation.{ Clock, Metrics, MetricsRecorder }
/**
* Represents a timer, for timing a function. Call the `time` function, passing the function to time.
*
* For recording metrics the [[Timer]] either uses the passed-in [[MetricsRecorder]] if it is defined, or it looks in
* the [[Metrics.Recorder]] field for a recorder. If neither of these are defined then no metrics are recorded
* (the function is executed without recording metrics).
*
* The overhead of recording metrics has been measured at around 100 nanoseconds on an Intel i7-3720QM. The overhead
* of calling the `time` method when no metrics are being recorded (a recorder is not defined) is negligible.
*
* @note This class needs to be in the org.apache.spark.rdd package, otherwise Spark records somewhere in the
* `time` method as the call site (which in turn becomes the stage name).
* This can be fixed when Spark 1.1.1 is released (needs SPARK-1853).
*/
class Timer(name: String, clock: Clock = new Clock(), recorder: Option[MetricsRecorder] = None,
sequenceId: Option[Int] = None, isRDDOperation: Boolean = false) extends Serializable {
// Ensure all timer names are interned, since there should not be many distinct values and this will enable
// us to compare timer names much more efficiently (they can be compared by reference).
val timerName = name.intern()
/**
* Runs f, recording its duration, and returns its result.
*/
def time[A](f: => A): A = {
val recorderOption = if (recorder.isDefined) recorder else Metrics.Recorder.value
// If we were not initialized this will not be set, and nothing will
// be recorded (which is what we want)
recorderOption.fold { f } { (recorder) =>
val startTime = clock.nanoTime()
recorder.startPhase(timerName, sequenceId, isRDDOperation)
try {
f
} finally {
recorder.finishPhase(timerName, clock.nanoTime() - startTime)
}
}
}
}
|
tdanford/bdg-utils
|
utils-metrics/src/main/scala/org/apache/spark/rdd/Timer.scala
|
Scala
|
apache-2.0
| 2,816 |
package com.datastax.spark.connector.rdd.partitioner
import java.net.InetAddress
import org.apache.spark.Partition
/** Stores a CQL `WHERE` predicate matching a range of tokens. */
case class CqlTokenRange(cql: String, values: Any*)
trait EndpointPartition extends Partition {
def endpoints: Iterable[InetAddress]
}
/** Metadata describing Cassandra table partition processed by a single Spark task.
* Beware the term "partition" is overloaded. Here, in the context of Spark,
* it means an arbitrary collection of rows that can be processed locally on a single Cassandra cluster node.
* A `CassandraPartition` typically contains multiple CQL partitions, i.e. rows identified by different values of
* the CQL partitioning key.
*
* @param index identifier of the partition, used internally by Spark
* @param endpoints which nodes the data partition is located on
* @param tokenRanges token ranges determining the row set to be fetched
* @param rowCount estimated total row count in a partition
*/
case class CassandraPartition(index: Int,
endpoints: Iterable[InetAddress],
tokenRanges: Iterable[CqlTokenRange],
rowCount: Long) extends EndpointPartition
|
brkyvz/spark-cassandra-connector
|
spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/rdd/partitioner/CassandraRDDPartition.scala
|
Scala
|
apache-2.0
| 1,269 |
package im.actor.server.user
import java.time.{ Instant, LocalDateTime, ZoneOffset }
import java.util.TimeZone
import akka.actor.{ ActorSystem, Status }
import akka.http.scaladsl.util.FastFuture
import akka.pattern.pipe
import im.actor.api.rpc.contacts.{ UpdateContactRegistered, UpdateContactsAdded, UpdateContactsRemoved }
import im.actor.api.rpc.messaging._
import im.actor.api.rpc.misc.ApiExtension
import im.actor.api.rpc.peers.{ ApiPeer, ApiPeerType }
import im.actor.api.rpc.users._
import im.actor.concurrent.FutureExt
import im.actor.config.ActorConfig
import im.actor.server.ApiConversions._
import im.actor.server.acl.ACLUtils
import im.actor.server.bots.BotCommand
import im.actor.server.file.{ Avatar, ImageUtils }
import im.actor.server.model.{ AvatarData, Sex, User }
import im.actor.server.model.contact.{ UserContact, UserEmailContact, UserPhoneContact }
import im.actor.server.names.{ GlobalNameOwner, OwnerType }
import im.actor.server.office.EntityNotFound
import im.actor.server.persist.contact._
import im.actor.server.persist._
import im.actor.server.sequence.{ PushRules, SequenceErrors }
import im.actor.server.social.SocialManager._
import im.actor.server.user.UserCommands._
import im.actor.server.user.UserErrors.{ BotCommandAlreadyExists, InvalidBotCommand }
import im.actor.util.misc.StringUtils
import im.actor.util.ThreadLocalSecureRandom
import org.joda.time.DateTime
import slick.driver.PostgresDriver.api._
import scala.concurrent.Future
import scala.util.{ Failure, Success }
import scala.util.control.NoStackTrace
abstract class UserError(message: String) extends RuntimeException(message) with NoStackTrace
object UserErrors {
final case class UserNotFound(id: Int) extends EntityNotFound(s"User $id not found")
case object NicknameTaken extends UserError("Nickname taken")
case object InvalidNickname extends UserError("Invalid nickname")
case object InvalidName extends UserError("Invalid name")
final case class InvalidTimeZone(tz: String) extends UserError(s"Invalid time zone: $tz")
final case class InvalidLocale(locale: String) extends UserError(s"Invalid locale: $locale")
case object EmptyLocalesList extends UserError("Empty locale list")
final case class InvalidBotCommand(slashCommand: String)
extends UserError(s"Invalid slash command: $slashCommand")
final case class BotCommandAlreadyExists(slashCommand: String)
extends UserError(s"Bot command already exists: $slashCommand")
case object ContactNotFound extends UserError("Contact not found")
}
private object ServiceMessages {
def contactRegistered(userId: Int, name: String)(implicit system: ActorSystem) = {
ApiServiceMessage(s"$name joined ${ActorConfig.projectName}", Some(ApiServiceExContactRegistered(userId)))
}
}
private[user] trait UserCommandHandlers {
this: UserProcessor ⇒
import ImageUtils._
protected def create(
accessSalt: String,
nickname: Option[String],
name: String,
countryCode: String,
sex: ApiSex.ApiSex,
isBot: Boolean,
isAdmin: Boolean,
extensions: Seq[ApiExtension],
external: Option[String]
): Unit = {
log.debug("Creating user {} {}", userId, name)
val replyTo = sender()
onSuccess(checkNicknameExists(nickname)) { exists ⇒
if (!exists) {
val ts = now()
val e = UserEvents.Created(ts, userId, accessSalt, nickname, name, countryCode, sex, isBot, extensions, external, isAdmin = Some(isAdmin))
val user = UserBuilder(e)
persistStashingReply(e, user, replyTo) { evt ⇒
val user = User(
id = userId,
accessSalt = accessSalt,
nickname = nickname,
name = name,
countryCode = countryCode,
sex = Sex.fromInt(sex.id),
state = im.actor.server.model.UserState.Registered,
createdAt = LocalDateTime.now(ZoneOffset.UTC),
external = external,
isBot = isBot
)
db.run(for {
_ ← UserRepo.create(user)
} yield CreateAck()) andThen {
case Success(_) ⇒ userExt.hooks.afterCreate.runAll(user.id)
}
}
} else {
replyTo ! Status.Failure(UserErrors.NicknameTaken)
}
}
}
protected def updateIsAdmin(state: UserState, isAdmin: Option[Boolean]): Unit = {
persist(UserEvents.IsAdminUpdated(now(), isAdmin)) { e ⇒
context become working(updatedState(e, state))
sender() ! UpdateIsAdminAck()
}
}
protected def addAuth(user: UserState, authId: Long): Unit = {
persistStashingReply(UserEvents.AuthAdded(now(), authId), user) { _ ⇒
db.run(AuthSessionRepo.findByAuthId(authId)) foreach {
case Some(authSession) ⇒
seqUpdExt.registerAuthId(user.id, authId)
userExt.hooks.afterAuth.runAll(user.id, authSession.appId, authSession.deviceTitle)
case None ⇒ log.error("AuthSession for {} was not found", authId)
}
db.run(AuthIdRepo.setUserData(authId, user.id)) map (_ ⇒ NewAuthAck())
}
}
protected def removeAuth(user: UserState, authId: Long): Unit =
persistStashingReply(UserEvents.AuthRemoved(now(), authId), user) { _ ⇒
db.run(AuthIdRepo.delete(authId) map (_ ⇒ RemoveAuthAck()))
}
protected def changeCountryCode(user: UserState, countryCode: String): Unit =
persistReply(UserEvents.CountryCodeChanged(now(), countryCode), user) { e ⇒
db.run(UserRepo.setCountryCode(userId, countryCode) map (_ ⇒ ChangeCountryCodeAck()))
}
protected def changeName(user: UserState, authId: Long, name: String): Unit = {
val replyTo = sender()
if (StringUtils.validName(name).fold(l ⇒ false, r ⇒ true)) {
persistReply(UserEvents.NameChanged(now(), name), user) { _ ⇒
val update = UpdateUserNameChanged(userId, name)
for {
relatedUserIds ← getRelations(userId)
seqState ← seqUpdExt.broadcastClientUpdate(userId, authId, relatedUserIds, update)
_ ← db.run(UserRepo.setName(userId, name))
} yield seqState
}
} else {
replyTo ! Status.Failure(UserErrors.InvalidName)
}
}
protected def delete(user: UserState): Unit =
persistStashingReply(UserEvents.Deleted(now()), user) { _ ⇒
db.run(UserRepo.setDeletedAt(userId) map (_ ⇒ DeleteAck()))
}
protected def addPhone(user: UserState, phone: Long): Unit =
if (user.phones.contains(phone))
sender() ! AddPhoneAck()
else {
persistReply(UserEvents.PhoneAdded(now(), phone), user) { _ ⇒
val rng = ThreadLocalSecureRandom.current()
db.run(for {
_ ← UserPhoneRepo.create(rng.nextInt(), userId, ACLUtils.nextAccessSalt(rng), phone, "Mobile phone")
_ ← DBIO.from(markContactRegistered(user, phone, false))
} yield {
AddPhoneAck()
}) andThen {
case Failure(e) ⇒ log.error(e, "Failed to add phone")
}
}
}
protected def addEmail(user: UserState, email: String): Unit =
if (user.emails.contains(email))
sender() ! AddEmailAck()
else {
persistReply(UserEvents.EmailAdded(now(), email), user) { event ⇒
val rng = ThreadLocalSecureRandom.current()
db.run(for {
_ ← UserEmailRepo.create(rng.nextInt(), userId, ACLUtils.nextAccessSalt(rng), email, "Email")
_ ← DBIO.from(markContactRegistered(user, email, false))
} yield {
AddEmailAck()
}) andThen {
case Failure(e) ⇒ log.error(e, "Failed to add email")
}
}
}
protected def addSocialContact(user: UserState, contact: SocialContact): Unit =
persistReply(UserEvents.SocialContactAdded(now(), contact), user) { _ ⇒
FastFuture.successful(AddSocialContactAck())
}
protected def changeNickname(user: UserState, authId: Long, nicknameOpt: Option[String]): Unit = {
val replyTo = sender()
onSuccess(checkNicknameExists(nicknameOpt)) { exists ⇒
if (!exists) {
if (nicknameOpt forall StringUtils.validGlobalName) {
persistReply(UserEvents.NicknameChanged(now(), nicknameOpt), user, replyTo) { _ ⇒
val update = UpdateUserNickChanged(userId, nicknameOpt)
for {
_ ← globalNamesStorage.updateOrRemove(user.nickname, nicknameOpt, GlobalNameOwner(OwnerType.User, userId))
relatedUserIds ← getRelations(userId)
seqState ← seqUpdExt.broadcastClientUpdate(userId, authId, relatedUserIds, update)
} yield seqState
}
} else {
replyTo ! Status.Failure(UserErrors.InvalidNickname)
}
} else {
replyTo ! Status.Failure(UserErrors.NicknameTaken)
}
}
}
protected def changeAbout(user: UserState, authId: Long, about: Option[String]): Unit = {
persistReply(UserEvents.AboutChanged(now(), about), user) { _ ⇒
val update = UpdateUserAboutChanged(userId, about)
for {
_ ← db.run(UserRepo.setAbout(userId, about))
relatedUserIds ← getRelations(userId)
seqState ← seqUpdExt.broadcastClientUpdate(userId, authId, relatedUserIds, update)
} yield seqState
}
}
protected def changeTimeZone(user: UserState, authId: Long, timeZone: String): Unit = {
def validTimeZone(tz: String): Boolean = TimeZone.getAvailableIDs.contains(tz)
if (validTimeZone(timeZone)) {
if (!user.timeZone.contains(timeZone)) {
persistReply(UserEvents.TimeZoneChanged(now(), Some(timeZone)), user) { _ ⇒
val update = UpdateUserTimeZoneChanged(user.id, Some(timeZone))
for {
relatedUserIds ← getRelations(user.id)
seqState ← seqUpdExt.broadcastClientUpdate(user.id, authId, relatedUserIds, update)
} yield seqState
}
} else sender() ! Status.Failure(SequenceErrors.UpdateAlreadyApplied(UserFields.TimeZone))
} else {
val e = UserErrors.InvalidTimeZone(timeZone)
if (timeZone.nonEmpty)
log.error(e, "Invalid time zone")
sender() ! Status.Failure(e)
}
}
protected def changePreferredLanguages(user: UserState, authId: Long, preferredLanguages: Seq[String]): Unit = {
def validLocale(l: String): Boolean = l matches "^[a-z]{2}(?:-[A-Z]{2})?$"
preferredLanguages.find(l ⇒ !validLocale(l)) match {
case Some(invalid) ⇒
val e = UserErrors.InvalidLocale(invalid)
log.error(e, "Invalid preferred language")
sender() ! Status.Failure(e)
case None ⇒
preferredLanguages match {
case Nil ⇒ sender() ! Status.Failure(UserErrors.EmptyLocalesList)
case pl if pl == user.preferredLanguages ⇒
sender() ! Status.Failure(SequenceErrors.UpdateAlreadyApplied(UserFields.PreferredLanguages))
case _ ⇒
persistReply(UserEvents.PreferredLanguagesChanged(now(), preferredLanguages), user) { _ ⇒
val update = UpdateUserPreferredLanguagesChanged(user.id, preferredLanguages.toVector)
for {
relatedUserIds ← getRelations(user.id)
stateState ← seqUpdExt.broadcastClientUpdate(user.id, authId, relatedUserIds, update)
} yield stateState
}
}
}
}
protected def addBotCommand(user: UserState, rawCommand: BotCommand): Unit = {
val command = rawCommand.copy(slashCommand = rawCommand.slashCommand.trim)
def isValid(command: BotCommand) = command.slashCommand.matches("^[0-9a-zA-Z_]{2,32}")
if (user.botCommands.exists(_.slashCommand == command.slashCommand)) {
sender() ! Status.Failure(BotCommandAlreadyExists(command.slashCommand))
} else {
if (isValid(command)) {
persistReply(UserEvents.BotCommandAdded(now(), command), user) { _ ⇒
val update = UpdateUserBotCommandsChanged(user.id, user.botCommands :+ command)
for {
relatedUserIds ← getRelations(user.id)
_ ← seqUpdExt.broadcastPeopleUpdate(relatedUserIds + user.id, update)
} yield AddBotCommandAck()
}
} else {
sender() ! Status.Failure(InvalidBotCommand(command.slashCommand))
}
}
}
protected def removeBotCommand(user: UserState, slashCommand: String) =
if (user.botCommands.exists(_.slashCommand == slashCommand)) {
persistReply(UserEvents.BotCommandRemoved(now(), slashCommand), user) { _ ⇒
val update = UpdateUserBotCommandsChanged(user.id, user.botCommands.filterNot(_.slashCommand == slashCommand))
for {
relatedUserIds ← getRelations(user.id)
_ ← seqUpdExt.broadcastPeopleUpdate(relatedUserIds + user.id, update)
} yield RemoveBotCommandAck()
}
} else {
sender() ! RemoveBotCommandAck()
}
protected def addExt(user: UserState, ext: UserExt) = {
persist(UserEvents.ExtAdded(now(), ext)) { e ⇒
val newState = updatedState(e, user)
context become working(newState)
val update = UpdateUserExtChanged(userId, Some(extToApi(newState.ext)))
(for {
relatedUserIds ← getRelations(user.id)
_ ← seqUpdExt.broadcastPeopleUpdate(relatedUserIds + user.id, update)
} yield AddExtAck()) pipeTo sender()
}
}
protected def removeExt(user: UserState, key: String) = {
persist(UserEvents.ExtRemoved(now(), key)) { e ⇒
val newState = updatedState(e, user)
context become working(newState)
val update = UpdateUserExtChanged(userId, Some(extToApi(newState.ext)))
(for {
relatedUserIds ← getRelations(user.id)
_ ← seqUpdExt.broadcastPeopleUpdate(relatedUserIds + user.id, update)
} yield RemoveExtAck()) pipeTo sender()
}
}
protected def updateAvatar(user: UserState, authId: Long, avatarOpt: Option[Avatar]): Unit = {
persistReply(UserEvents.AvatarUpdated(now(), avatarOpt), user) { evt ⇒
val avatarData = avatarOpt map (getAvatarData(AvatarData.OfUser, user.id, _)) getOrElse AvatarData.empty(AvatarData.OfUser, user.id.toLong)
val update = UpdateUserAvatarChanged(user.id, avatarOpt)
val relationsF = getRelations(user.id)
for {
_ ← db.run(AvatarDataRepo.createOrUpdate(avatarData))
relatedUserIds ← relationsF
seqState ← seqUpdExt.broadcastClientUpdate(user.id, authId, relatedUserIds, update)
} yield UpdateAvatarAck(avatarOpt, seqState)
}
}
protected def addContacts(
user: UserState,
authId: Long,
contactsToAdd: Seq[UserCommands.ContactToAdd]
): Unit = {
val (idsLocalNames, plains, phones, emails) =
contactsToAdd.view
.filterNot(_.contactUserId == user.id)
.map {
case UserCommands.ContactToAdd(contactUserId, localNameOpt, phoneOpt, emailOpt) ⇒
val phone = phoneOpt map (UserPhoneContact(_, user.id, contactUserId, localNameOpt, isDeleted = false))
val email = emailOpt map (UserEmailContact(_, user.id, contactUserId, localNameOpt, isDeleted = false))
val plain =
if (phone.isDefined || email.isDefined)
None
else Some(UserContact(user.id, contactUserId, localNameOpt, isDeleted = false))
((contactUserId, localNameOpt), plain, phone, email)
}
.foldLeft(Map.empty[Int, Option[String]], Seq.empty[UserContact], Seq.empty[UserPhoneContact], Seq.empty[UserEmailContact]) {
case ((idsLocalNames, plains, phones, emails), (idLocalName, plain, phone, email)) ⇒
(
idsLocalNames + idLocalName,
plain.map(plains :+ _).getOrElse(plains),
phone.map(phones :+ _).getOrElse(phones),
email.map(emails :+ _).getOrElse(emails)
)
}
(for {
_ ← FutureExt.ftraverse(plains)(c ⇒ db.run(UserContactRepo.insertOrUpdate(c)))
_ ← FutureExt.ftraverse(phones)(c ⇒ db.run(UserPhoneContactRepo.insertOrUpdate(c)))
_ ← FutureExt.ftraverse(emails)(c ⇒ db.run(UserEmailContactRepo.insertOrUpdate(c)))
_ ← FutureExt.ftraverse(idsLocalNames.toSeq) {
case (contactUserId, localName) ⇒ contacts.editLocalNameSilently(authId, contactUserId, localName)
}
update = UpdateContactsAdded(idsLocalNames.keys.toVector)
seqState ← seqUpdExt.deliverClientUpdate(user.id, authId, update, PushRules(isFat = true))
} yield seqState) pipeTo sender()
}
protected def removeContact(
user: UserState,
authId: Long,
contactUserId: Int
): Unit = {
val updLocalName = UpdateUserLocalNameChanged(contactUserId, None)
val updContact = UpdateContactsRemoved(Vector(contactUserId))
(db.run(UserContactRepo.find(user.id, contactUserId)) flatMap {
case Some(_) ⇒
for {
_ ← db.run(UserContactRepo.delete(user.id, contactUserId))
_ ← seqUpdExt.deliverUserUpdate(user.id, updLocalName)
seqState ← seqUpdExt.deliverClientUpdate(user.id, authId, updContact)
} yield seqState
case None ⇒ Future.failed(UserErrors.ContactNotFound)
}) pipeTo sender()
}
private def checkNicknameExists(nicknameOpt: Option[String]): Future[Boolean] = {
nicknameOpt match {
case Some(nickname) ⇒ globalNamesStorage.exists(nickname)
case None ⇒ FastFuture.successful(false)
}
}
// TODO: DRY it, finally!
private def markContactRegistered(user: UserState, phoneNumber: Long, isSilent: Boolean): Future[Unit] = {
val dateMillis = Instant.now.toEpochMilli
for {
contacts ← db.run(UnregisteredPhoneContactRepo.find(phoneNumber))
_ = log.debug(s"Unregistered $phoneNumber is in contacts of users: $contacts")
_ ← Future.sequence(contacts map { contact ⇒
val randomId = ThreadLocalSecureRandom.current().nextLong()
val updateContactRegistered = UpdateContactRegistered(user.id, isSilent, dateMillis, randomId)
val updateContactsAdded = UpdateContactsAdded(Vector(user.id))
val localName = contact.name
val serviceMessage = ServiceMessages.contactRegistered(user.id, localName.getOrElse(user.name))
for {
_ ← userExt.addContact(contact.ownerUserId, user.id, localName, Some(phoneNumber), None)
_ ← seqUpdExt.deliverUserUpdate(
contact.ownerUserId,
updateContactRegistered,
pushRules = seqUpdExt.pushRules(isFat = true, Some(serviceMessage.text))
)
_ ← seqUpdExt.deliverUserUpdate(
contact.ownerUserId,
updateContactsAdded,
pushRules = seqUpdExt.pushRules(isFat = false, None)
)
_ ← dialogExt.writeMessageSelf(
contact.ownerUserId,
ApiPeer(ApiPeerType.Private, user.id),
user.id,
dateMillis,
randomId,
serviceMessage
)
} yield {
recordRelation(user.id, contact.ownerUserId)
}
})
_ ← db.run(UnregisteredPhoneContactRepo.deleteAll(phoneNumber))
} yield ()
}
private def markContactRegistered(user: UserState, email: String, isSilent: Boolean): Future[Unit] = {
val dateMillis = Instant.now.toEpochMilli
for {
_ ← userExt.hooks.beforeEmailContactRegistered.runAll(user.id, email)
contacts ← db.run(UnregisteredEmailContactRepo.find(email))
_ = log.debug(s"Unregistered $email is in contacts of users: $contacts")
_ ← Future.sequence(contacts.map { contact ⇒
val randomId = ThreadLocalSecureRandom.current().nextLong()
val updateContactRegistered = UpdateContactRegistered(user.id, isSilent, dateMillis, randomId)
val updateContactsAdded = UpdateContactsAdded(Vector(user.id))
val localName = contact.name
val serviceMessage = ServiceMessages.contactRegistered(user.id, localName.getOrElse(user.name))
for {
_ ← userExt.addContact(contact.ownerUserId, user.id, localName, None, Some(email))
_ ← seqUpdExt.deliverUserUpdate(
contact.ownerUserId,
updateContactRegistered,
pushRules = seqUpdExt.pushRules(isFat = true, Some(serviceMessage.text))
)
_ ← seqUpdExt.deliverUserUpdate(
contact.ownerUserId,
updateContactsAdded,
pushRules = seqUpdExt.pushRules(isFat = false, None)
)
_ ← dialogExt.writeMessageSelf(
contact.ownerUserId,
ApiPeer(ApiPeerType.Private, user.id),
user.id,
dateMillis,
randomId,
serviceMessage
)
} yield recordRelation(user.id, contact.ownerUserId)
})
_ ← db.run(UnregisteredEmailContactRepo.deleteAll(email))
} yield ()
}
}
|
EaglesoftZJ/actor-platform
|
actor-server/actor-core/src/main/scala/im/actor/server/user/UserCommandHandlers.scala
|
Scala
|
agpl-3.0
| 20,957 |
/*
* Copyright 2017 Mediative
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mediative.amadou
import org.apache.spark.sql._
import scala.util.{Failure, Success, Try}
sealed trait Stage[-I, +T] { self =>
def name: String
def map[U](f: T => U): Stage[I, U] = new Stage[I, U] {
override def name = self.name
override def run(ctx: Stage.Context[I]): Stage.Result[U] = self.run(ctx).map(f)
}
def flatMap[U](f: T => Stage[T, U]): Stage[I, U] = new Stage[I, U] {
override def name = self.name
override def run(ctx: Stage.Context[I]): Stage.Result[U] =
self.run(ctx).flatMap(data => f(data).run(ctx.withValue(data)))
}
def andThen[U](s: Stage[T, U]): Stage[I, U] = flatMap(_ => s)
def ~>[U](s: Stage[T, U]): Stage[I, U] = andThen(s)
def run(ctx: Stage.Context[I]): Stage.Result[T]
}
object Stage {
type Result[A] = Try[A]
abstract class Context[+I](val spark: SparkSession, val date: DateInterval, val value: I) {
def withValue[U](value: U): Context[U]
def run[T](stage: Stage[I, T], result: => T): Result[T] = Try(result)
}
object Context {
def apply(spark: SparkSession, date: DateInterval): Context[SparkSession] =
new SimpleContext(spark, date, spark)
}
class SimpleContext[+I](spark: SparkSession, date: DateInterval, value: I)
extends Context[I](spark, date, value) {
override def withValue[U](value: U) = new SimpleContext(spark, date, value)
}
def apply[S, T](stageName: String)(f: Stage.Context[S] => T): Stage[S, T] = new Stage[S, T] {
override def name = stageName
override def run(ctx: Stage.Context[S]) = ctx.run(this, f(ctx))
}
/**
* Read data from a data source.
*
* May be used anywhere in a for-expression to read from a data source.
*/
def source[T](name: String)(read: Stage.Context[SparkSession] => Dataset[T]) =
Stage(name) { ctx: Stage.Context[_] =>
read(ctx.withValue(ctx.spark))
}
def transform[S, T](name: String)(transform: Stage.Context[Dataset[S]] => Dataset[T]) =
Stage(name)(transform)
def sink[T](name: String)(write: Stage.Context[Dataset[T]] => Unit) =
Stage(name)((ctx: Stage.Context[Dataset[T]]) => { write(ctx); ctx.value })
def sequence[S, T](stages: Seq[Stage[S, T]]): Stage[S, Seq[T]] = new Stage[S, Seq[T]] {
override def name = "sequence"
override def run(ctx: Stage.Context[S]): Stage.Result[Seq[T]] = {
@scala.annotation.tailrec
def iterate(stages: Seq[Stage[S, T]], results: Seq[T]): Stage.Result[Seq[T]] =
stages match {
case Seq() => Success(results)
case Seq(stage, rest @ _*) =>
stage.run(ctx) match {
case Success(result) => iterate(rest, results :+ result)
case Failure(exception) => Failure(exception)
}
}
iterate(stages, Seq.empty)
}
}
case class SequenceAllException[S, T](failures: Seq[(Stage[S, T], Throwable)]) extends Exception {
override def getMessage =
failures
.map {
case (stage, failure) =>
s"${stage.name} failed: (${failure.getClass.getName}) ${failure.getMessage}"
}
.mkString(s"${failures.size} stage(s) failed:\\n - ", "\\n - ", "")
}
/**
* Combine multiple stages into a single stage which fails if any of them results
* in a failure.
*/
def sequenceAll[S, T](stages: Seq[Stage[S, T]]): Stage[S, Seq[T]] = new Stage[S, Seq[T]] {
override def name = "sequenceAll"
override def run(ctx: Stage.Context[S]): Stage.Result[Seq[T]] = {
val results: Seq[(Stage[S, T], Stage.Result[T])] =
stages.map(stage => stage -> stage.run(ctx))
results.filter(_._2.isFailure) match {
case Seq() => Success(results.map(_._2.get))
case failures =>
Failure(SequenceAllException(failures.map {
case (stage, result) => stage -> result.failed.get
}))
}
}
}
def identity[T] = new Stage[T, T] { self =>
override def name = "identity"
override def flatMap[U](f: T => Stage[T, U]): Stage[T, U] = new Stage[T, U] {
override def name = self.name
override def run(ctx: Stage.Context[T]): Stage.Result[U] =
Try(f(ctx.value)).flatMap(stage => stage.run(ctx))
}
override def run(ctx: Stage.Context[T]) = ctx.run(this, ctx.value)
}
}
|
mediative/amadou
|
core/src/main/scala/com.mediative.amadou/Stage.scala
|
Scala
|
apache-2.0
| 4,932 |
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.docgen
import org.junit.Test
import org.junit.Assert._
class AggregationTest extends DocumentingTestBase {
def graphDescription = List("A KNOWS B", "A KNOWS C", "A KNOWS D")
override val properties: Map[String, Map[String, Any]] = Map(
"A" -> Map("property" -> 13),
"B" -> Map("property" -> 33, "eyes" -> "blue"),
"C" -> Map("property" -> 44, "eyes" -> "blue"),
"D" -> Map("eyes" -> "brown")
)
def section = "Aggregation"
@Test def countNodes() {
testQuery(
title = "Count nodes",
text = "To count the number of nodes, for example the number of nodes connected to one node, you can use `count(*)`.",
queryText = "start n=node(%A%) match (n)-->(x) return n, count(*)",
returns = "This returns the start node and the count of related nodes.",
assertions = p => assertEquals(Map("n" -> node("A"), "count(*)" -> 3), p.toList.head))
}
@Test def countRelationshipsByType() {
testQuery(
title = "Group Count Relationship Types",
text = "To count the groups of relationship types, return the types and count them with `count(*)`.",
queryText = "start n=node(%A%) match (n)-[r]->() return type(r), count(*)",
returns = "The relationship types and their group count is returned by the query.",
assertions = p => assertEquals(Map("type(r)" -> "KNOWS", "count(*)" -> 3), p.toList.head))
}
@Test def countEntities() {
testQuery(
title = "Count entities",
text = "Instead of counting the number of results with `count(*)`, it might be more expressive to include " +
"the name of the identifier you care about.",
queryText = "start n=node(%A%) match (n)-->(x) return count(x)",
returns = "The example query returns the number of connected nodes from the start node.",
assertions = p => assertEquals(Map("count(x)" -> 3), p.toList.head))
}
@Test def countNonNullValues() {
testQuery(
title = "Count non-null values",
text = "You can count the non-`null` values by using +count(<identifier>)+.",
queryText = "start n=node(%A%,%B%,%C%,%D%) return count(n.property?)",
returns = "The count of related nodes with the `property` property set is returned by the query.",
assertions = p => assertEquals(Map("count(n.property?)" -> 3), p.toList.head))
}
@Test def sumProperty() {
testQuery(
title = "SUM",
text = "The +SUM+ aggregation function simply sums all the numeric values it encounters. " +
"Nulls are silently dropped. This is an example of how you can use +SUM+.",
queryText = "start n=node(%A%,%B%,%C%) return sum(n.property)",
returns = "This returns the sum of all the values in the property `property`.",
assertions = p => assertEquals(Map("sum(n.property)" -> (13 + 33 + 44)), p.toList.head))
}
@Test def avg() {
testQuery(
title = "AVG",
text = "+AVG+ calculates the average of a numeric column.",
queryText = "start n=node(%A%,%B%,%C%) return avg(n.property)",
returns = "The average of all the values in the property `property` is returned by the example query.",
assertions = p => assertEquals(Map("avg(n.property)" -> 30), p.toList.head))
}
@Test def min() {
testQuery(
title = "MIN",
text = "+MIN+ takes a numeric property as input, and returns the smallest value in that column.",
queryText = "start n=node(%A%,%B%,%C%) return min(n.property)",
returns = "This returns the smallest of all the values in the property `property`.",
assertions = p => assertEquals(Map("min(n.property)" -> 13), p.toList.head))
}
@Test def max() {
testQuery(
title = "MAX",
text = "+MAX+ find the largets value in a numeric column.",
queryText = "start n=node(%A%,%B%,%C%) return max(n.property)",
returns = "The largest of all the values in the property `property` is returned.",
assertions = p => assertEquals(Map("max(n.property)" -> 44), p.toList.head))
}
@Test def collect() {
testQuery(
title = "COLLECT",
text = "+COLLECT+ collects all the values into a list. It will ignore null values,",
queryText = "start n=node(%A%,%B%,%C%,%D%) return collect(n.property?)",
returns = "Returns a single row, with all the values collected.",
assertions = p => assertEquals(Map("collect(n.property?)" -> Seq(13, 33, 44)), p.toList.head))
}
@Test def count_distinct() {
testQuery(
title = "DISTINCT",
text = """All aggregation functions also take the +DISTINCT+ modifier, which removes duplicates from the values.
So, to count the number of unique eye colors from nodes related to `a`, this query can be used: """,
queryText = "start a=node(%A%) match a-->b return count(distinct b.eyes)",
returns = "Returns the number of eye colors.",
assertions = p => assertEquals(Map("count(distinct b.eyes)" -> 2), p.toList.head))
}
@Test def intro() {
testQuery(
title = "Introduction",
text = """To calculate aggregated data, Cypher offers aggregation, much like SQL's +GROUP BY+.
Aggregate functions take multiple input values and calculate an aggregated value from them. Examples are +AVG+ that
calculate the average of multiple numeric values, or +MIN+ that finds the smallest numeric value in a set of values.
Aggregation can be done over all the matching sub graphs, or it can be further divided by introducing key values.
These are non-aggregate expressions, that are used to group the values going into the aggregate functions.
So, if the return statement looks something like this:
[source,cypher]
----
RETURN n, count(*)
----
We have two return expressions -- `n`, and `count(*)`. The first, `n`, is no aggregate function, and so it will be the
grouping key. The latter, `count(*)` is an aggregate expression. So the matching subgraphs will be divided into
different buckets, depending on the grouping key. The aggregate function will then run on these buckets, calculating
the aggregate values.
The last piece of the puzzle is the +DISTINCT+ keyword. It is used to make all values unique before running them through
an aggregate function.
An example might be helpful:""",
queryText = "" +
"START me=node(1) " +
"MATCH me-->friend-->friend_of_friend " +
"RETURN count(distinct friend_of_friend), count(friend_of_friend)",
returns = "In this example we are trying to find all our friends of friends, and count them. The first aggregate function, " +
"+count(distinct friend_of_friend)+, will only see a `friend_of_friend` once -- +DISTINCT+ removes the duplicates. The latter " +
"aggregate function, +count(friend_of_friend)+, might very well see the same `friend_of_friend` multiple times. Since there is " +
"no real data in this case, an empty result is returned. See the sections below for real data.",
assertions = p => assertTrue(true))
}
@Test def percentile_disc() {
testQuery(
title = "PERCENTILE_DISC",
text = "+PERCENTILE_DISC+ calculates the percentile of a given value over a group, with a percentile from 0.0 to 1.0. It uses a rounding method, returning the nearest value to the percentile. For interpolated values, see PERCENTILE_CONT.",
queryText = "start n=node(%A%,%B%,%C%) return percentile_disc(n.property, 0.5)",
returns = "The 50th percentile of the values in the property `property` is returned by the example query. In this case, 0.5 is the median, or 50th percentile.",
assertions = p => assertEquals(Map("percentile_disc(n.property, 0.5)" -> 33), p.toList.head))
}
@Test def percentile_cont() {
testQuery(
title = "PERCENTILE_CONT",
text = "+PERCENTILE_CONT+ calculates the percentile of a given value over a group, with a percentile from 0.0 to 1.0. It uses a linear interpolation method, calculating a weighted average between two values, if the desired percentile lies between them. For nearest values using a rounding method, see PERCENTILE_DISC.",
queryText = "start n=node(%A%,%B%,%C%) return percentile_cont(n.property, 0.4)",
returns = "The 40th percentile of the values in the property `property` is returned by the example query, calculated with a weighted average.",
assertions = p => assertEquals(Map("percentile_cont(n.property, 0.4)" -> 29), p.toList.head))
}
}
|
dksaputra/community
|
cypher/src/test/scala/org/neo4j/cypher/docgen/AggregationTest.scala
|
Scala
|
gpl-3.0
| 9,203 |
package connectorFamily.featureModel
sealed abstract class AttrConstr
case class IfOut(exp: Exp) extends AttrConstr
case class Require(fid: FID) extends AttrConstr
case class Exclude(fid: FID) extends AttrConstr
//// Booleans - 5 operators ////
abstract class Exp extends AttrConstr {
def &&(e:Exp) = new AndExp(this,e)
def ||(e:Exp) = new OrExp(this,e)
def -->(e:Exp) = new ImpliesExp(this,e)
def <->(e:Exp) = new EquivExp(this,e)
def unary_! = new NegExp(this)
}
case object True extends Exp
case object False extends Exp
case class NegExp(e:Exp) extends Exp
//abstract class BoolExp extends Binary[Boolean](l,r);
case class AndExp(left:Exp,right:Exp) extends Exp
case class OrExp(left:Exp,right:Exp) extends Exp
case class ImpliesExp(left:Exp,right:Exp) extends Exp
case class EquivExp(left:Exp,right:Exp) extends Exp
//// Integers - 6 arithmetic + 6 relational operators ////
sealed abstract class Term {
def *(t:Term) = new MultMultTerm(this,t)
def /(t:Term) = new DivMultTerm(this,t)
def %(t:Term) = new ModMultTerm(this,t)
def +(t:Term) = new AddAddTerm(this,t)
def -(t:Term) = new SubAddTerm(this,t)
def unary_- = new MinusTerm(this)
def <==(t:Term) = new LTEQExp(this,t) // implicit conversions stop working with <=, <, <<.
def >==(t:Term) = new GTEQExp(this,t)
def <<<(t:Term) = new LTExp(this,t)
def >>>(t:Term) = new GTExp(this,t)
def ===(t:Term) = new EqExp(this,t)
def !==(t:Term) = new NotEqExp(this,t)
}
case class IntVal(n:Int) extends Term
case class MinusTerm(op:Term) extends Term
abstract class MultTerm extends Term
case class MultMultTerm(left:Term,right:Term) extends MultTerm
case class DivMultTerm(left:Term,right:Term) extends MultTerm
case class ModMultTerm(left:Term,right:Term) extends MultTerm
abstract class AddTerm extends Term
case class AddAddTerm(left:Term,right:Term) extends AddTerm
case class SubAddTerm(left:Term,right:Term) extends AddTerm
abstract class RelationalExpr extends Exp
case class LTExp(left:Term,right:Term) extends RelationalExpr
case class GTExp(left:Term,right:Term) extends RelationalExpr
case class LTEQExp(left:Term,right:Term) extends RelationalExpr
case class GTEQExp(left:Term,right:Term) extends RelationalExpr
abstract class EqualityExpr extends RelationalExpr
case class EqExp(left:Term,right:Term) extends EqualityExpr
case class NotEqExp(left:Term,right:Term) extends EqualityExpr ;
//// Variables ////
case class FIDExp(fid:FID) extends Exp
case class AIDExp(aid:AID) extends Exp
case class AIDTerm(aid:AID) extends Term
//TODO: maybe add FIDTerm (features as terms) and qualified features.
|
joseproenca/connector-family
|
src/main/scala/connectorFamily/featureModel/AttrConstr.scala
|
Scala
|
mit
| 2,667 |
/*
* Copyright 2011-2017 Chris de Vreeze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.cdevreeze.yaidom.queryapi
/**
* Super-trait for all element query API traits, promising a self type.
*
* Simplicity and consistency of the entire query API are 2 important design considerations. For example, the query
* API methods themselves use no generics.
*
* @author Chris de Vreeze
*/
trait AnyElemApi {
// The type member below is used for implementing F-bounded polymorphism.
// Note that we need no surrounding cake, and we need no types like ThisApi#ThisElem.
// For F-bounded polymorphism in DOT, see http://www.cs.uwm.edu/~boyland/fool2012/papers/fool2012_submission_3.pdf.
/**
* The element type itself. It must be restricted to a sub-type of the query API trait in question.
*
* Concrete element classes will restrict this type to that element class itself.
*/
type ThisElem <: AnyElemApi
/**
* This element itself.
*/
def thisElem: ThisElem
}
|
dvreeze/yaidom
|
shared/src/main/scala/eu/cdevreeze/yaidom/queryapi/AnyElemApi.scala
|
Scala
|
apache-2.0
| 1,514 |
package fpinscala.parallelism
import java.util.concurrent._
object Par {
type Par[A] = ExecutorService => Future[A]
def run[A](s: ExecutorService)(a: Par[A]): Future[A] = a(s)
def unit[A](a: A): Par[A] = (es: ExecutorService) => UnitFuture(a)
// `unit` is represented as a function that returns a `UnitFuture`, which is a simple implementation of `Future` that just wraps a constant value. It doesn't use the `ExecutorService` at all. It's always done and can't be cancelled. Its `get` method simply returns the value that we gave it.
private case class UnitFuture[A](get: A) extends Future[A] {
def isDone = true
def get(timeout: Long, units: TimeUnit) = get
def isCancelled = false
def cancel(evenIfRunning: Boolean): Boolean = false
}
def map2[A, B, C](a: Par[A], b: Par[B])(f: (A, B) => C): Par[C] = // `map2` doesn't evaluate the call to `f` in a separate logical thread, in accord with our design choice of having `fork` be the sole function in the API for controlling parallelism. We can always do `fork(map2(a,b)(f))` if we want the evaluation of `f` to occur in a separate thread.
(es: ExecutorService) => {
val af = a(es)
val bf = b(es)
UnitFuture(f(af.get, bf.get)) // This implementation of `map2` does _not_ respect timeouts, and eagerly waits for the returned futures. This means that even if you have passed in "forked" arguments, using this map2 on them will make them wait. It simply passes the `ExecutorService` on to both `Par` values, waits for the results of the Futures `af` and `bf`, applies `f` to them, and wraps them in a `UnitFuture`. In order to respect timeouts, we'd need a new `Future` implementation that records the amount of time spent evaluating `af`, then subtracts that time from the available time allocated for evaluating `bf`.
}
def fork[A](a: => Par[A]): Par[A] = // This is the simplest and most natural implementation of `fork`, but there are some problems with it--for one, the outer `Callable` will block waiting for the "inner" task to complete. Since this blocking occupies a thread in our thread pool, or whatever resource backs the `ExecutorService`, this implies that we're losing out on some potential parallelism. Essentially, we're using two threads when one should suffice. This is a symptom of a more serious problem with the implementation, and we will discuss this later in the chapter.
es => es.submit(new Callable[A] {
def call = a(es).get
})
def map[A, B](pa: Par[A])(f: A => B): Par[B] =
map2(pa, unit(()))((a, _) => f(a))
def flatMap[A, B](p: Par[A])(choices: A => Par[B]): Par[B] =
es => {
val k = run(es)(p).get
run(es)(choices(k))
}
def sortPar(parList: Par[List[Int]]) = map(parList)(_.sorted)
def equal[A](e: ExecutorService)(p: Par[A], p2: Par[A]): Boolean =
p(e).get == p2(e).get
def delay[A](fa: => Par[A]): Par[A] =
es => fa(es)
def choice[A](cond: Par[Boolean])(t: Par[A], f: Par[A]): Par[A] =
es =>
if (run(es)(cond).get) t(es) // Notice we are blocking on the result of `cond`.
else f(es)
/* Gives us infix syntax for `Par`. */
implicit def toParOps[A](p: Par[A]): ParOps[A] = new ParOps(p)
class ParOps[A](p: Par[A]) {
}
}
object Examples {
import Par._
def sum(ints: IndexedSeq[Int]): Int = // `IndexedSeq` is a superclass of random-access sequences like `Vector` in the standard library. Unlike lists, these sequences provide an efficient `splitAt` method for dividing them into two parts at a particular index.
if (ints.size <= 1)
ints.headOption getOrElse 0 // `headOption` is a method defined on all collections in Scala. We saw this function in chapter 3.
else {
val (l, r) = ints.splitAt(ints.length / 2) // Divide the sequence in half using the `splitAt` function.
sum(l) + sum(r) // Recursively sum both halves and add the results together.
}
}
|
wickedwukong/fpinscala2
|
exercises/src/main/scala/fpinscala/parallelism/Par.scala
|
Scala
|
mit
| 3,930 |
package frameless
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop._
import org.scalacheck.{Arbitrary, Gen}
import scala.collection.JavaConverters._
import org.scalatest.matchers.should.Matchers
class RandomSplitTests extends TypedDatasetSuite with Matchers {
val nonEmptyPositiveArray: Gen[Array[Double]] = Gen.nonEmptyListOf(Gen.posNum[Double]).map(_.toArray)
test("randomSplit(weight, seed)") {
def prop[A: TypedEncoder : Arbitrary] = forAll(vectorGen[A], nonEmptyPositiveArray, arbitrary[Long]) {
(data: Vector[A], weights: Array[Double], seed: Long) =>
val dataset = TypedDataset.create(data)
dataset.randomSplit(weights, seed).map(_.count().run()) sameElements
dataset.dataset.randomSplit(weights, seed).map(_.count())
}
check(prop[Int])
check(prop[String])
}
test("randomSplitAsList(weight, seed)") {
def prop[A: TypedEncoder : Arbitrary] = forAll(vectorGen[A], nonEmptyPositiveArray, arbitrary[Long]) {
(data: Vector[A], weights: Array[Double], seed: Long) =>
val dataset = TypedDataset.create(data)
dataset.randomSplitAsList(weights, seed).asScala.map(_.count().run()) sameElements
dataset.dataset.randomSplitAsList(weights, seed).asScala.map(_.count())
}
check(prop[Int])
check(prop[String])
}
}
|
imarios/frameless
|
dataset/src/test/scala/frameless/forward/RandomSplitTests.scala
|
Scala
|
apache-2.0
| 1,342 |
package asobu.distributed
import akka.actor.ActorSystem
import scala.collection.JavaConverters._
object SystemValidator {
def validate(system: ActorSystem): Either[String, Unit] = {
val cfg = system.settings.config
val rolePath = "akka.cluster.distributed-data.role"
if (!cfg.hasPath(rolePath))
Left("akka.distributed-data must be enabled")
else {
val ddRole = cfg.getString(rolePath)
val roles = cfg.getStringList("akka.cluster.roles").asScala
if (!ddRole.isEmpty && !roles.contains(ddRole))
Left(s"cluster roles (${roles.mkString}) must contain distributed-data scope role $ddRole")
else
Right(())
}
}
}
|
kailuowang/asobu
|
distributed/src/main/scala/asobu/distributed/SystemValidator.scala
|
Scala
|
apache-2.0
| 678 |
/*
* Copyright (c) 2011. Alexandre Martins. All rights reserved.
*/
package pt.cnbc.wikimodels.client.record
import pt.cnbc.wikimodels.dataModel.Species
import xml.NodeSeq
import net.liftweb.common.Full._
import net.liftweb.common.{Full, Empty, Box}
import net.liftweb.http.{SHtml, S}
import net.liftweb.record._
/** TODO: Please document.
* @author Alexandre Martins
* Date: 29-12-2011
* Time: 16:49
* To change this template use File | Settings | File Templates. */
case class SpeciesRecord() extends SBaseRecord[SpeciesRecord] {
override val sbmlType = "Species"
override def meta = SpeciesRecord
override protected def relativeURLasList = "model" :: S.param("modelMetaId").openOrThrowException("TODO: replacement for usage of deprecated openTheBox method") :: "species" :: this.metaIdO.get :: Nil
override protected def relativeCreationURLasList = "model" :: S.param("modelMetaId").openOrThrowException("TODO: replacement for usage of deprecated openTheBox method") :: "species" :: Nil
// ### can be validated with validate ###
// ### can be presented as XHtml, Json, or as a Form. ###
override def toXHtml = {
trace("Calling SpeciesRecord.toXHtml")
<div>
<head>
<link type="text/css" rel="stylesheet" href="/css/sbml_present.css"></link>
</head>
{super.toXHtml}
</div>
}
override def toForm(f:SpeciesRecord => Unit):NodeSeq = {
trace("Calling SpeciesRecord.toForm( "+f+" )")
<div class="species_toform">
{super.toForm(f)}
</div>
}
// ### will contain fields which can be listed with allFields. ###
object idO extends Id(this, 100)
object nameO extends Name(this, 100)
object compartmentO extends SCompartment(this)
object constantO extends SConstant(this)
object notesO extends Notes(this, 1000)
object initialAmountO extends InitialAmount(this)
object initialConcentrationO extends InitialConcentration(this)
object boundaryConditionO extends BoundaryCondition(this)
// ### can be created directly from a Request containing params with names that match the fields on a Record ( see fromReq ). ###
var _parent:Box[SBMLModelRecord] = Empty
//TODO isn't there a better way to override a var than THIS?!??! Fucking asInstanceOf
override def parent:Box[SBMLModelRecord] = _parent
override def parent_=(p:Box[SBaseRecord[_]] ):Unit = {
_parent = p.asInstanceOf[Box[SBMLModelRecord]]
}
}
//TODO - DELETE IF NOT USED FOR ANYTHING
object SpeciesRecord extends SpeciesRecord with RestMetaRecord[SpeciesRecord] {
override def fieldOrder = List(metaIdO, idO, nameO, compartmentO, initialAmountO, initialConcentrationO, boundaryConditionO, constantO, notesO)
override def fields = fieldOrder
}
|
alexmsmartins/WikiModels
|
wm_web_client/src/main/scala/pt/cnbc/wikimodels/client/record/SpeciesRecord.scala
|
Scala
|
mit
| 2,732 |
object Problem {
def fact(n:Int):BigInt = if(n == 0) 1 else n * fact(n - 1)
def main(args: Array[String]) {
val start = System.currentTimeMillis
println(fact(100).toString.map(x => x.toInt - 48).sum)
val stop = System.currentTimeMillis
println("Time taken: " + (stop - start) + "ms")
}
}
|
Jiri-Kremser/euler
|
020/Problem.scala
|
Scala
|
gpl-2.0
| 312 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.jdbc
import java.sql.Types
import java.util.Locale
import org.apache.spark.sql.types._
private object DerbyDialect extends JdbcDialect {
override def canHandle(url: String): Boolean =
url.toLowerCase(Locale.ROOT).startsWith("jdbc:derby")
override def getCatalystType(
sqlType: Int, typeName: String, size: Int, md: MetadataBuilder): Option[DataType] = {
if (sqlType == Types.REAL) Option(FloatType) else None
}
override def getJDBCType(dt: DataType): Option[JdbcType] = dt match {
case StringType => Option(JdbcType("CLOB", java.sql.Types.CLOB))
case ByteType => Option(JdbcType("SMALLINT", java.sql.Types.SMALLINT))
case ShortType => Option(JdbcType("SMALLINT", java.sql.Types.SMALLINT))
case BooleanType => Option(JdbcType("BOOLEAN", java.sql.Types.BOOLEAN))
// 31 is the maximum precision and 5 is the default scale for a Derby DECIMAL
case t: DecimalType if t.precision > 31 =>
Option(JdbcType("DECIMAL(31,5)", java.sql.Types.DECIMAL))
case _ => None
}
override def isCascadingTruncateTable(): Option[Boolean] = Some(false)
// See https://db.apache.org/derby/docs/10.5/ref/rrefsqljrenametablestatement.html
override def renameTable(oldTable: String, newTable: String): String = {
s"RENAME TABLE $oldTable TO $newTable"
}
}
|
dbtsai/spark
|
sql/core/src/main/scala/org/apache/spark/sql/jdbc/DerbyDialect.scala
|
Scala
|
apache-2.0
| 2,137 |
package endToEnd.bloomfilter.mutable
import bloomfilter.mutable.BloomFilter
import org.scalatest.{FreeSpec, Matchers}
class SampleUsageSpec extends FreeSpec with Matchers {
"Create, put and check " in {
val bloomFilter = BloomFilter[String](1000, 0.01)
bloomFilter.add("")
bloomFilter.add("Hello!")
bloomFilter.add("8f16c986824e40e7885a032ddd29a7d3")
bloomFilter.mightContain("") shouldBe true
bloomFilter.mightContain("Hello!") shouldBe true
bloomFilter.mightContain("8f16c986824e40e7885a032ddd29a7d3") shouldBe true
bloomFilter.dispose()
}
}
|
alexandrnikitin/bloom-filter-scala
|
tests/src/endToEnd/scala/endToEnd/bloomfilter/mutable/SampleUsageSpec.scala
|
Scala
|
mit
| 585 |
import scala.reflect.macros.blackbox.Context
import scala.reflect.runtime.{universe => ru}
object Impls {
def foo1(c: Context) = 2
def foo2(c: Context) = ru.Literal(ru.Constant(42))
def foo3(c: Context) = ???
def foo5(c: Context) = c.universe.Literal(c.universe.Constant(42))
def foo6(c: Context) = c.Expr[Int](c.universe.Literal(c.universe.Constant(42)))
}
|
yusuke2255/dotty
|
tests/untried/neg/macro-invalidret/Impls_1.scala
|
Scala
|
bsd-3-clause
| 369 |
import java.time.{DayOfWeek, LocalDate}
import Schedule.Schedule
case class Meetup(month: Int, year: Int) {
private val thirteenth = LocalDate.of(year, month, 13)
private val firstDay = LocalDate.of(year, month, 1)
private val nextMonth = firstDay.plusMonths(1)
private val teenth: Scheduler = (dayOfWeek: Int) => thirteenth.next(dayOfWeek)
private val first: Scheduler = (dayOfWeek: Int) => firstDay.next(dayOfWeek)
private val second: Scheduler = (dayOfWeek: Int) => first.day(dayOfWeek).plusDays(7)
private val third: Scheduler = (dayOfWeek: Int) => second.day(dayOfWeek).plusDays(7)
private val fourth: Scheduler = (dayOfWeek: Int) => third.day(dayOfWeek).plusDays(7)
private val last: Scheduler = (dayOfWeek: Int) => nextMonth.next(dayOfWeek).minusDays(7)
private def schedulers: Map[Schedule, Scheduler] = Map(Schedule.Teenth -> teenth,
Schedule.First -> first,
Schedule.Second -> second,
Schedule.Third -> third,
Schedule.Fourth -> fourth,
Schedule.Last -> last)
def day(dayOfWeek: Int, schedule: Schedule): LocalDate =
schedulers(schedule).day(dayOfWeek)
implicit class LocalDateOps(self: LocalDate) {
def next(dayOfWeek: Int): LocalDate = self.plusDays(daysUntil(dayOfWeek))
def daysUntil(dayOfWeek: Int): Int = (Meetup.Sun - this.dayOfWeek + dayOfWeek) % 7
def dayOfWeek: Int = self.getDayOfWeek.getValue
}
trait Scheduler {
def day(dayOfWeek: Int): LocalDate
}
}
object Schedule extends Enumeration {
type Schedule = Value
val Teenth, First, Second, Third, Fourth, Last = Value
}
object Meetup {
val Mon = DayOfWeek.MONDAY.getValue
val Tue = DayOfWeek.TUESDAY.getValue
val Wed = DayOfWeek.WEDNESDAY.getValue
val Thu = DayOfWeek.THURSDAY.getValue
val Fri = DayOfWeek.FRIDAY.getValue
val Sat = DayOfWeek.SATURDAY.getValue
val Sun = DayOfWeek.SUNDAY.getValue
}
|
exercism/xscala
|
exercises/practice/meetup/.meta/Example.scala
|
Scala
|
mit
| 1,867 |
package mimir.util
import java.io.ByteArrayInputStream
import java.io.File
import java.io.InputStream
import java.util.List
import my.com.amazonaws.HttpMethod;
import my.com.amazonaws.auth.AWSCredentials
import my.com.amazonaws.auth.BasicAWSCredentials
import my.com.amazonaws.auth.AWSStaticCredentialsProvider
import my.com.amazonaws.auth.profile.ProfileCredentialsProvider
import my.com.amazonaws.services.s3.AmazonS3
import my.com.amazonaws.services.s3.AmazonS3Client
import my.com.amazonaws.services.s3.model.Bucket
import my.com.amazonaws.services.s3.model.CannedAccessControlList
import my.com.amazonaws.services.s3.model.ObjectMetadata
import my.com.amazonaws.services.s3.model.PutObjectRequest
import my.com.amazonaws.services.s3.model.S3ObjectSummary
import my.com.amazonaws.services.s3.AmazonS3ClientBuilder
import my.com.amazonaws.services.s3.model.GeneratePresignedUrlRequest
import scala.collection.JavaConversions._
import java.net.HttpURLConnection
import java.io.OutputStreamWriter
import java.net.URL
import java.io.BufferedReader
import java.io.InputStreamReader
import java.io.BufferedOutputStream
import java.io.BufferedInputStream
import my.com.amazonaws.services.s3.model.GetObjectRequest
import my.com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration
object S3Utils {
private val PATH_SEP: String = "/"
/**
* @param accessKeyID
* @param secretAccessKey
* @param clientRegion
* @return
*/
def authenticate(accessKeyID: String, secretAccessKey: String, clientRegion: String, endpoint:Option[String] = None): AmazonS3 = {
// credentials object identifying user for authentication
// user must have AWSConnector and AmazonS3FullAccess for
// credentials object identifying user for authentication
// user must have AWSConnector and AmazonS3FullAccess for
// this to work
val credentials: AWSCredentials =
new BasicAWSCredentials(accessKeyID, secretAccessKey)
// create a client connection based on credentials
//new AmazonS3Client(credentials)
(endpoint match {
case None => {
AmazonS3ClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials)) //new ProfileCredentialsProvider())
.withRegion(clientRegion)
}
case Some(ep) => {
val cb = AmazonS3ClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials)) //new ProfileCredentialsProvider())
val endpointConfiguration = new EndpointConfiguration(ep, clientRegion);
cb.setEndpointConfiguration(endpointConfiguration)
cb
}
}).build();
}
/**
* @param bucketName
* @param s3client
* @return
*/
def createBucket(bucketName: String,
s3client: AmazonS3) = {
s3client.createBucket(bucketName)
}
/**
* @param bucketName
* @param s3client
*/
def deleteBucket(bucketName: String,
s3client: AmazonS3) = {
s3client.deleteBucket(bucketName)
}
/**
* @param s3client
* @return
*/
def bucketsList(s3client: AmazonS3): Seq[String] = {
s3client.listBuckets().map(_.getName)
}
/**
* @param bucketName
* @param folderName
* @param client
*/
def createFolder(bucketName: String, folderName: String, s3client: AmazonS3): Unit = {
// create meta-data for your folder and set content-length to 0
val metadata: ObjectMetadata = new ObjectMetadata()
metadata.setContentLength(0)
// create empty content
val emptyContent: InputStream = new ByteArrayInputStream(
Array.ofDim[Byte](0))
// create a PutObjectRequest passing the folder name suffixed by /
val putObjectRequest: PutObjectRequest = new PutObjectRequest(
bucketName,
folderName + PATH_SEP,
emptyContent,
metadata)
// send request to S3 to create folder
s3client.putObject(putObjectRequest)
println("folder created: " + folderName)
}
/**
* @param bucketName
* @param folderPath
* @param s3client
*/
def createFolderAndParents(bucketName: String, folderPath: String, s3client: AmazonS3): Unit = {
val targetPaths = folderPath.split(File.separator)
createFolderAndParents(bucketName, targetPaths, s3client)
}
/**
* @param bucketName
* @param folderPath
* @param s3client
*/
def createFolderAndParents(bucketName: String, folderPath: Array[String], s3client: AmazonS3): Unit = {
folderPath.foldLeft("")((init, cur) => {
val curDir = init + cur
if(cur.isEmpty()){
init
}
else if(objectExists(bucketName, curDir , s3client)){
curDir + PATH_SEP
}
else{
createFolder(bucketName, curDir, s3client)
curDir + PATH_SEP
}
})
}
/**
* This method first deletes all the files in given folder and than the
* folder itself
*
* @param bucketName
* @param folderName
* @param client
*/
def deleteFolder(bucketName: String, folderName: String, client: AmazonS3): Unit = {
val fileList =
client.listObjects(bucketName, folderName).getObjectSummaries
for (file <- fileList) {
client.deleteObject(bucketName, file.getKey)
}
client.deleteObject(bucketName, folderName)
}
/**
* @param bucketName
* @param folderName
* @param srcFile
* @param targetFile
* @param s3client
* @return
*/
def uploadFile(bucketName: String, srcFile: String, targetFile: String, s3client: AmazonS3, overwrite:Boolean = false) = {
if(!objectExists(bucketName, targetFile, s3client) || overwrite){
// upload file to folder and set it to public
s3client.putObject(
new PutObjectRequest(bucketName,
targetFile,
new File(srcFile))
.withCannedAcl(CannedAccessControlList.Private))
}
}
/**
* @param bucketName
* @param objKey
* @param s3client
* @return
*/
def objectExists(bucketName: String, objKey: String, s3client: AmazonS3): Boolean = {
s3client.doesObjectExist(bucketName, objKey);
}
/**
* @param bucketName
* @param srcFile
* @param targetFile
* @param s3client
* @param overwrite
* @return
*/
def copyToS3(bucketName: String, srcFile: String, targetFile: String, s3client: AmazonS3, overwrite:Boolean = false) = {
if(!objectExists(bucketName, targetFile, s3client) || overwrite){
// Set the pre-signed URL to expire after one hour.
val expiration: java.util.Date = new java.util.Date()
var expTimeMillis: Long = expiration.getTime
expTimeMillis += 1000 * 60 * 60
expiration.setTime(expTimeMillis)
// Generate the pre-signed URL.
val generatePresignedUrlRequest: GeneratePresignedUrlRequest =
new GeneratePresignedUrlRequest(bucketName, targetFile)
.withMethod(HttpMethod.PUT)
.withExpiration(expiration)
val url: URL = s3client.generatePresignedUrl(generatePresignedUrlRequest)
// Create the connection and use it to upload the new object using the pre-signed URL.
val connection: HttpURLConnection =
url.openConnection().asInstanceOf[HttpURLConnection]
connection.setDoOutput(true)
connection.setRequestMethod("PUT")
val out = new BufferedOutputStream(connection.getOutputStream)
val sourceFile = new File(srcFile)
val srcUrl = if(sourceFile.getPath.contains(":/")) new java.net.URL(sourceFile.getPath.replaceFirst(":/", "://")) else sourceFile.toURI().toURL()
val input = new BufferedInputStream(srcUrl.openStream)
val bytes = new Array[Byte](1024) //1024 bytes - Buffer size
Iterator
.continually (input.read(bytes))
.takeWhile (_ != -1L)
.foreach (read=>out.write(bytes,0,read))
out.flush()
out.close()
connection.getResponseCode
}
}
def copyToS3Stream(bucketName: String, input:InputStream, targetFile: String, s3client: AmazonS3, overwrite:Boolean = false) = {
if(!objectExists(bucketName, targetFile, s3client) || overwrite){
// Set the pre-signed URL to expire after one hour.
val expiration: java.util.Date = new java.util.Date()
var expTimeMillis: Long = expiration.getTime
expTimeMillis += 1000 * 60 * 60
expiration.setTime(expTimeMillis)
// Generate the pre-signed URL.
val generatePresignedUrlRequest: GeneratePresignedUrlRequest =
new GeneratePresignedUrlRequest(bucketName, targetFile)
.withMethod(HttpMethod.PUT)
.withExpiration(expiration)
val url: URL = s3client.generatePresignedUrl(generatePresignedUrlRequest)
// Create the connection and use it to upload the new object using the pre-signed URL.
val connection: HttpURLConnection =
url.openConnection().asInstanceOf[HttpURLConnection]
connection.setDoOutput(true)
connection.setRequestMethod("PUT")
connection.setFixedLengthStreamingMode(input.available())
val out = new BufferedOutputStream(connection.getOutputStream)
val bytes = new Array[Byte](1024) //1024 bytes - Buffer size
Iterator
.continually (input.read(bytes))
.takeWhile (_ != -1L)
.foreach (read=>out.write(bytes,0,read))
out.flush()
out.close()
connection.getResponseCode
}
}
def readFromS3(bucketName: String, key: String, s3client: AmazonS3): InputStream = {
val s3object =
s3client.getObject(new GetObjectRequest(bucketName, key))
s3object.getObjectContent
}
}
|
UBOdin/mimir
|
src/main/scala/mimir/util/S3Utils.scala
|
Scala
|
apache-2.0
| 9,560 |
package TAPL2.Arith
import TAPL2.Util._
import TAPL2.Term
case object TmTrue extends Term
case object TmFalse extends Term
case class TmIf(cond: Term, t1: Term, t2: Term) extends Term
case object TmZero extends Term
case class TmSucc(t: Term) extends Term
case class TmPred(t: Term) extends Term
case class TmIsZero(t: Term) extends Term
object Bool {
trait Parser[F <: {val pE : PackratParser[Term]}] {
lexical.reserved += ("true", "false", "if", "then", "else")
lexical.delimiters += ("(", ")")
val pBoolE: (=> F) => PackratParser[Term] = l => {
lazy val e = l.pE
List(
"true" ^^ { _ => TmTrue },
"false" ^^ { _ => TmFalse },
("if" ~> e) ~ ("then" ~> e) ~ ("else" ~> e) ^^ { case e1 ~ e2 ~ e3 => TmIf(e1, e2, e3) },
"(" ~> e <~ ")"
).reduce((a, b) => a ||| b)
}
}
}
object Nat {
trait Parser[F <: {val pE : PackratParser[Term]}] {
lexical.reserved += ("iszero", "succ", "pred")
lexical.delimiters += ("(", ")")
val pNatE: (=> F) => PackratParser[Term] = l => {
lazy val e = l.pE
def num(x: Int): Term = if (x == 0) TmZero else TmSucc(num(x - 1))
List(
numericLit ^^ { x => num(x.toInt) },
"succ" ~> e ^^ TmSucc,
"pred" ~> e ^^ TmPred,
"iszero" ~> e ^^ TmIsZero,
"(" ~> e <~ ")"
).reduce((a, b) => a ||| b)
}
}
}
object Arith {
trait Parser[L <: {val pE : PackratParser[Term]}] extends Bool.Parser[L] with Nat.Parser[L] {
val pArithE: (=> L) => PackratParser[Term] = l => pBoolE(l) ||| pNatE(l)
// we cannot use pE, such a name has incompatible types when overridden. is there a solution?
}
}
object TestArith {
class List[E](pe: PackratParser[E]) {
val pE = pe
}
def parseAndPrint(inp: String) = {
def parser(l: => List[Term]): List[Term] = {
val lang = new Arith.Parser[List[Term]] {}
new List[Term](lang.pArithE(l))
}
val t = phrase(fix(parser).pE)(new lexical.Scanner(inp))
if (t.successful) println(t.get) else scala.sys.error(t.toString)
}
}
|
hy-zhang/parser
|
Scala/Old/TAPL2/Arith/Arith.scala
|
Scala
|
bsd-3-clause
| 2,082 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.mimir
import quasar.Data
import quasar.blueeyes.json.JValue
import quasar.yggdrasil.table.{ColumnarTableModule, Slice}
import delorean._
import fs2.async
import fs2.async.mutable.Queue
import fs2.interop.scalaz._
import scalaz.{\\/, -\\/, \\/-, ~>, StreamT}
import scalaz.concurrent.Task
import scalaz.syntax.monad._
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import java.util.concurrent.atomic.AtomicBoolean
trait TablePagerModule extends ColumnarTableModule[Future] {
final class TablePager private (
slices: StreamT[Task, Slice],
queue: Queue[Task, Throwable \\/ Vector[Data]]) {
private val running = new AtomicBoolean(true)
{
val driver = slices foreachRec { slice =>
for {
flag <- Task.delay(running.get())
_ <- if (flag && !slice.isEmpty) {
val json = slice.toJsonElements.map(JValue.toData)
if (json.isEmpty)
Task.now(())
else
queue.enqueue1(\\/-(json))
} else {
// we can't terminate early, because there are no finalizers in StreamT
Task.now(())
}
} yield ()
}
val ta = driver >> queue.enqueue1(\\/-(Vector.empty))
ta unsafePerformAsync {
case -\\/(t) => queue.enqueue1(-\\/(t)).unsafePerformAsync(_ => ())
case \\/-(_) => ()
}
}
def more: Task[Vector[Data]] =
queue.dequeue1.flatMap(_.fold(Task.fail, Task.now))
def close: Task[Unit] = Task.delay(running.set(false))
}
object TablePager {
def apply(table: Table, lookahead: Int = 1): Task[TablePager] = {
for {
q <- async.boundedQueue[Task, Throwable \\/ Vector[Data]](lookahead)
slices = table.slices.trans(λ[Future ~> Task](_.toTask))
back <- Task.delay(new TablePager(slices, q))
} yield back
}
}
}
|
jedesah/Quasar
|
mimir/src/main/scala/quasar/mimir/TablePager.scala
|
Scala
|
apache-2.0
| 2,517 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.ops
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath._
import com.intel.analytics.bigdl.dllib.utils.Table
import scala.reflect.ClassTag
/**
* Returns (x - y)(x - y) element-wise.
*/
class SquaredDifference[T: ClassTag]()(implicit ev: TensorNumeric[T])
extends Operation[Table, Tensor[_], T] {
def updateOutput(inputs: Table): Tensor[_] = {
val x = inputs[Tensor[NumericWildcard]](1)
val y = inputs[Tensor[NumericWildcard]](2)
require(x.getType() == y.getType(), "The numeric type of x and y must be the same, but got" +
s"x: ${x.getType()}, y: ${y.getType()}")
if (output.getType() != x.getType()) {
output = x.emptyInstance()
}
output.asInstanceOf[Tensor[NumericWildcard]]
.resizeAs(x).copy(x).sub(y).square()
output
}
}
object SquaredDifference {
def apply[T: ClassTag]()(implicit ev: TensorNumeric[T]): SquaredDifference[T]
= new SquaredDifference()
}
|
intel-analytics/BigDL
|
scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/ops/SquaredDifference.scala
|
Scala
|
apache-2.0
| 1,636 |
object Solution {
def numberOfWays(X:Int,N:Int):Int = {
// Compute the answer in this function over here
// It is fine to define new functions as and where required
numberOfWaysInner(X, N, X/2)
}
def numberOfWaysInner(X: Int, N: Int, upper: Int): Int = {
if (X <= 0 || upper <= 0) {
return 0
}
val calc: Int = math.pow(upper, N).toInt
if (calc > X) {
numberOfWaysInner(X, N, upper - 1)
} else if (calc == X) {
numberOfWaysInner(X, N, upper - 1) + 1
} else {
numberOfWaysInner(X, N, upper - 1) + numberOfWaysInner(X - calc, N, upper - 1)
}
}
def main(args: Array[String]) {
println(numberOfWays(readInt(),readInt()))
}
}
|
franklingu/HackerRank
|
functional-programming/recursion/the-sum-of-powers/the_sum_of_powers.scala
|
Scala
|
mit
| 780 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.exceptions
/**
* Wrapper exception that wraps an non-serializable exception encountered in <a href="../events/Event.html"><code>Event</code></a>.
*
* @param msg a string that explains the problem
* @param exceptionClassName the class name of the exception being wrapped
* @param exceptionStackTrace the stack trace of the exception being wrapped
*
* @throws NullArgumentException if either <code>message</code> or <code>failedCodeStackDepthFun</code> is <code>null</code>
*/
case class NotSerializableWrapperException(msg: String, exceptionClassName: String, exceptionStackTrace: Array[StackTraceElement]) extends Exception with Serializable
|
scalatest/scalatest
|
jvm/core/src/main/scala/org/scalatest/exceptions/NotSerializableWrapperException.scala
|
Scala
|
apache-2.0
| 1,273 |
// Copyright 2014 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cba.omnia.piped
import scalaz.ValidationNel
import com.twitter.scalding._
import cascading.flow.FlowDef
object PipeOps extends PipeOps
trait PipeOps {
implicit def TypedPipeToRichErrorPipe[T](pipe: TypedPipe[ValidationNel[String, T]]) = RichErrorPipe[T](pipe)
}
case class RichErrorPipe[T](pipe: TypedPipe[ValidationNel[String, T]]) {
/**
* Writes out any errors to the specified file and if continue is false throws an exception afterwards to terminate the job.
*/
def handleError(errorPath: String, continue: Boolean = true)(implicit flow: FlowDef, mode: Mode): TypedPipe[T] = {
// TODO(hoermast) revisit once we have determined the best way to handle failures in scalding.
pipe.
flatMap(_.swap.toList.flatMap(_.list))
.write(TypedPsv[String](errorPath))
if (continue) pipe.flatMap(_.toOption)
else pipe.map(_.valueOr(nel => throw new Exception(nel.list.mkString("; "))))
}
}
|
CommBank/piped
|
src/main/scala/com/cba/omnia/piped/RichTypedPipe.scala
|
Scala
|
apache-2.0
| 1,567 |
package views.changekeeper
object PrivateKeeperDetails {
final val BackId = "back"
final val SubmitId = "submit"
}
|
dvla/vehicles-change-keeper-online
|
app/views/changekeeper/PrivateKeeperDetails.scala
|
Scala
|
mit
| 119 |
package io.kaitai.struct.exprlang
import io.kaitai.struct.exprlang.Ast._
/**
* Namespace which holds a bunch of methods and case classes related to
* evaluation of constant expressions, e.g. it can predict that `1 + 2`
* will be always constant and equal to `3`, and anything with a variable
* in it is potentially non-constant.
*
* Evaluators below are relatively naive: expect no complex logic or symbolic
* simplification of expressions here: something like `x - x`, which is
* known to be always 0, will still report it as "potentially variable".
*/
object ConstEvaluator {
/**
* Evaluates the expression, if it's possible to get an integer constant
* as the result of evaluation (i.e. if it does not involve any variables
* or anything like that).
*
* @param ex expression to evaluate.
* @return integer result of evaluation if it's constant or None, if it's
* variable or potentially variable.
*/
def evaluateIntConst(ex: Ast.expr): Option[BigInt] = {
evaluate(ex) match {
case value.Int(x) => Some(x)
case _ => None
}
}
/**
* Evaluates the expression, if it's possible to get a constant as the result
* of evaluation (i.e. if it does not involve any variables or anything like
* that).
*
* @param ex expression to evaluate.
* @return [[value]] container.
*/
def evaluate(ex: Ast.expr): value = ex match {
case expr.IntNum(x) => value.Int(x)
case expr.Bool(x) => value.Bool(x)
case expr.Str(x) => value.Str(x)
case expr.UnaryOp(op, expr.IntNum(operand)) =>
value.Int(op match {
case unaryop.Invert => ~operand
case unaryop.Minus => -operand
case _ => return value.NonConst
})
case expr.UnaryOp(unaryop.Not, expr.Bool(operand)) => value.Bool(!operand)
case expr.BinOp(left, op, right) =>
val leftValue = evaluate(left)
val rightValue = evaluate(right)
(op, leftValue, rightValue) match {
case (operator.Add, value.Str(l), value.Str(r)) => value.Str(l + r)
case (_, value.Int(l), value.Int(r)) => value.Int(op match {
case operator.Add => l + r
case operator.Sub => l - r
case operator.Mult => l * r
case operator.Div => l / r
case operator.Mod =>
val res = l % r
if (res < 0) res + r else res
case operator.LShift => l << r.toInt
case operator.RShift => l >> r.toInt
case operator.BitOr => l | r
case operator.BitXor => l ^ r
case operator.BitAnd => l & r
})
case _ => value.NonConst
}
case expr.BoolOp(op, values) =>
value.Bool(values.foldLeft(true)((acc, right) => {
val rightValue = evaluate(right) match {
case value.Bool(x) => x
case _ => return value.NonConst
}
op match {
case boolop.And => acc && rightValue
case boolop.Or => acc || rightValue
}
}))
case expr.Compare(left, op, right) =>
val leftValue = evaluate(left)
val rightValue = evaluate(right)
value.Bool((op, leftValue, rightValue) match {
case (cmpop.Eq, value.Int(l), value.Int(r) ) => l == r
case (cmpop.Eq, value.Bool(l), value.Bool(r)) => l == r
case (cmpop.Eq, value.Str(l), value.Str(r)) => l == r
case (cmpop.NotEq, value.Int(l), value.Int(r) ) => l != r
case (cmpop.NotEq, value.Bool(l), value.Bool(r)) => l != r
case (cmpop.NotEq, value.Str(l), value.Str(r)) => l != r
case (cmpop.Lt, value.Int(l), value.Int(r)) => l < r
case (cmpop.LtE, value.Int(l), value.Int(r)) => l <= r
case (cmpop.Gt, value.Int(l), value.Int(r)) => l > r
case (cmpop.GtE, value.Int(l), value.Int(r)) => l >= r
case (cmpop.Lt, value.Str(l), value.Str(r)) => l < r
case (cmpop.LtE, value.Str(l), value.Str(r)) => l <= r
case (cmpop.Gt, value.Str(l), value.Str(r)) => l > r
case (cmpop.GtE, value.Str(l), value.Str(r)) => l >= r
case _ => return value.NonConst
})
case expr.IfExp(condition, ifTrue, ifFalse) => evaluate(condition) match {
case value.Bool(cond) =>
if (cond) {
evaluate(ifTrue)
} else {
evaluate(ifFalse)
}
case _ => value.NonConst
}
case expr.List(list) => value.List(list.map(evaluate))
case expr.Subscript(container, index) =>
val idx = evaluate(index) match {
case value.Int(x) if x >= 0 => x
case _ => return value.NonConst
}
evaluate(container) match {
case value.List(list) if idx < list.length => list(idx.toInt)
case _ => value.NonConst
}
case _ => value.NonConst
}
/**
* Result of the AST evaluation.
*
* Represents either a known-to-be constant value of certain type, or knowledge that this
* expression is non-constant.
* */
sealed trait value
object value {
/** Result known to potentially non-constant */
case object NonConst extends value
/** AST node evaluated to the logical value */
case class Bool(value: Boolean) extends value
/** AST node evaluated to the numerical value */
case class Int(value: BigInt) extends value
/** AST node evaluated to the string value */
case class Str(value: String) extends value
/** AST node evaluated to the array */
case class List(list: Seq[value]) extends value
}
}
|
kaitai-io/kaitai_struct_compiler
|
shared/src/main/scala/io/kaitai/struct/exprlang/ConstEvaluator.scala
|
Scala
|
gpl-3.0
| 5,504 |
package ghpages.pages
import japgolly.scalajs.react._, vdom.prefix_<^._
/**
* Created by chandrasekharkode on 11/16/14.
*/
object HomePage {
private val p =
<.p(^.margin := "1.3em 0")
val component = ReactComponentB.static("Home",
<.div(
<.h1(
<.a(
^.color := "#000",
^.href := "https://github.com/japgolly/scalajs-react",
"scalajs-react")),
<.section(
^.marginTop := "2.2em",
^.fontSize := "115%",
^.color := "#333",
p(
"Lifts Facebook's ",
<.a(^.href := "https://facebook.github.io/react", "React"),
" library into ",
<.a(^.href := "http://www.scala-js.org", "Scala.js"),
" and endeavours to make it as type-safe and Scala-friendly as possible."),
p(
"Provides (opt-in) support for pure functional programming, using ",
<.a(^.href := "https://github.com/scalaz/scalaz", "Scalaz"),
" and ",
<.a(^.href := "https://github.com/julien-truffaut/Monocle", "Monocle"),
"."),
p(
"Comes utility modules helpful for React in Scala(.js), rather than React in JS.",
"Includes a router, testing utils, performance utils, more."),
<.p(
^.fontSize := "85%",
^.marginTop := "3.3em",
^.fontStyle := "italic",
^.color := "#444",
"Big thanks to ",
<.a(^.href := "https://twitter.com/chandu0101", "@chandu0101"),
" for creating these pages.")))
).buildU
}
|
beni55/scalajs-react
|
gh-pages/src/main/scala/ghpages/pages/HomePage.scala
|
Scala
|
apache-2.0
| 1,553 |
package org.showgregator.service.view
import com.twitter.finatra.View
/**
* Created with IntelliJ IDEA.
* User: cmarshall
* Date: 1/25/15
* Time: 7:06 PM
* To change this template use File | Settings | File Templates.
*/
class SuccessfulRegisterView(val email: String) extends View {
def template: String = "templates/registered.mustache"
}
|
csm/showgregator
|
showgregator-service/src/main/scala/org/showgregator/service/view/SuccessfulRegisterView.scala
|
Scala
|
agpl-3.0
| 351 |
package scutil.gui
import java.awt.{ AWTEvent, Toolkit }
import java.awt.event._
import scutil.lang._
object GlobalAWTEvent {
// TODO using this is a Using
def connect(mask:Long)(handler:AWTEvent=>Unit):Disposer = {
val listener =
new AWTEventListener {
def eventDispatched(ev:AWTEvent):Unit = handler(ev)
}
val toolkit = Toolkit.getDefaultToolkit
toolkit.addAWTEventListener(listener, mask)
Disposer delay {
toolkit removeAWTEventListener listener
}
}
}
|
ritschwumm/scutil
|
modules/gui/src/main/scala/scutil/gui/GlobalAWTEvent.scala
|
Scala
|
bsd-2-clause
| 484 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional logInformation regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.sgc
import java.util.Date
import es.alvsanand.sgc.core.connector.{SgcDateSlot, SgcConnectorFactory, SgcConnectorParameters, SgcSlot}
import es.alvsanand.sgc.core.util.{Logging, Retry}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import scala.reflect.ClassTag
import scala.util.{Failure, Success}
/**
* SgcContext creates an RDD which provides core functionality for reading
* es.alvsanand.sgc.core.connector.SgcSlot retrieved by
* es.alvsanand.sgc.core.connector.SgcConnector.
*
* This is how it work:
*
* - Every es.alvsanand.sgc.core.connector.SgcSlot will be assigned to a
* org.apache.spark.Partition in order to be distributable. However, the number of partitions
* cannot be increased because it is impossible to now for sure if the data received by the
* es.alvsanand.sgc.core.connector.SgcConnector may be split.
*
* - When a parition is compute, a new instance of
* es.alvsanand.sgc.core.connector.SgcConnector is created and it is call its fetch()
* method in order to retrieve the data of the name.
*
* - Finally, the InputStream is parsed in order to uunzip the data if it is a GZIP name.
*
* @param sc The SparkContext
*/
case class SgcContext(@transient sc: SparkContext) extends Logging {
/**
* Create a org.apache.spark.streaming.sgc.SgcRDD from a SparkContext using a
* es.alvsanand.sgc.core.connector.SgcConnectorFactory and some parameters.
*
* Internally, SgcContext works like this:
*
* - Use the es.alvsanand.sgc.core.connector.SgcConnectorFactory to create a instance of
* the es.alvsanand.sgc.core.connector.SgcConnector.
*
* - List the slots using the es.alvsanand.sgc.core.connector.SgcConnector.
*
* - Optionally, filter the name list if a range of dates has been selected.
*
* - Create a org.apache.spark.streaming.sgc.SgcRDD.
*
* Note: the SgcContext is fault tolerant and is able to retry n times until the list() operation
* gets successful.
*
* @param sgcConnectorFactory The SgcConnectorFactory used to create the
* es.alvsanand.sgc.core.connector.SgcConnector.
* @param parameters The parameters of the
* es.alvsanand.sgc.core.connector.SgcConnector.
* @param charset The java.nio.charset.Charset name of the slots that are
* going to be
* fetched.
* @param maxRetries The maximum number times that an operation of a
* es.alvsanand.sgc.core.connector.SgcConnector is going to
* be repeated
* in case of failure.
* @tparam A The type of es.alvsanand.sgc.core.connector.SgcSlot
* @tparam B The type of es.alvsanand.sgc.core.connector.SgcConnectorParameters
* @return A org.apache.spark.streaming.sgc.SgcRDD with all available slots.
*/
def createSgcRDD[A <: SgcSlot : ClassTag, B <: SgcConnectorParameters : ClassTag]
(sgcConnectorFactory: SgcConnectorFactory[A, B],
parameters: B,
charset: String = "UTF-8",
maxRetries: Int = 3
): RDD[String] = {
createFilteredSgcRDD[A, B](sgcConnectorFactory, parameters, (slot: A) => true, charset, maxRetries)
}
/**
* Create a org.apache.spark.streaming.sgc.SgcRDD from a SparkContext using a
* es.alvsanand.sgc.core.connector.SgcConnectorFactory and some parameters.
*
* Internally, SgcContext works like this:
*
* - Use the es.alvsanand.sgc.core.connector.SgcConnectorFactory to create a instance of
* the es.alvsanand.sgc.core.connector.SgcConnector.
*
* - List the slots using the es.alvsanand.sgc.core.connector.SgcConnector.
*
* - Optionally, filter the name list if a range of dates has been selected.
*
* - Create a org.apache.spark.streaming.sgc.SgcRDD.
*
* Note: the SgcContext is fault tolerant and is able to retry n times until the list() operation
* gets succesful.
*
* @param sgcConnectorFactory The SgcConnectorFactory used to create the
* es.alvsanand.sgc.core.connector.SgcConnector.
* @param parameters The parameters of the es.alvsanand.sgc.core.connector
* .SgcConnector.
* @param previousSlots The previous slots that must be filtered.
* @param charset The java.nio.charset.Charset name of the slots that are
* going to be
* fetched.
* @param maxRetries The maximum number times that an operation of a
* es.alvsanand.sgc.core.connector.SgcConnector is going to
* be repeated
* in case of failure.
* @tparam A The type of es.alvsanand.sgc.core.connector.SgcSlot
* @tparam B The type of es.alvsanand.sgc.core.connector.SgcConnectorParameters
* @return A org.apache.spark.streaming.sgc.SgcRDD with all available slots.
*/
def createPreviousFilteredSgcRDD[A <: SgcSlot : ClassTag, B <: SgcConnectorParameters : ClassTag]
(sgcConnectorFactory: SgcConnectorFactory[A, B],
parameters: B,
previousSlots: Seq[A],
charset: String = "UTF-8",
maxRetries: Int = 3
): RDD[String] = {
val filterSlot = (slot: A) => {
!previousSlots.contains(slot)
}
createFilteredSgcRDD[A, B](sgcConnectorFactory, parameters, filterSlot, charset, maxRetries)
}
/**
* Create a org.apache.spark.streaming.sgc.SgcRDD from a SparkContext using a
* es.alvsanand.sgc.core.connector.SgcConnectorFactory and some parameters.
*
* Internally, SgcContext works like this:
*
* - Use the es.alvsanand.sgc.core.connector.SgcConnectorFactory to create a instance of
* the es.alvsanand.sgc.core.connector.SgcConnector.
*
* - List the slots using the es.alvsanand.sgc.core.connector.SgcConnector.
*
* - Optionally, filter the name list if a range of dates has been selected.
*
* - Create a org.apache.spark.streaming.sgc.SgcRDD.
*
* Note: the SgcContext is fault tolerant and is able to retry n times until the list() operation
* gets succesful.
*
* @param sgcConnectorFactory The SgcConnectorFactory used to create the
* es.alvsanand.sgc.core.connector.SgcConnector.
* @param parameters The parameters of the es.alvsanand.sgc.core.connector
* .SgcConnector.
* @param fromDate The beginning of the date range [inclusive]
* @param toDate The end of the date range [exclusive]
* @param charset The java.nio.charset.Charset name of the slots that are
* going to be
* fetched.
* @param maxRetries The maximum number times that an operation of a
* es.alvsanand.sgc.core.connector.SgcConnector is going to
* be repeated
* in case of failure.
* @tparam A The type of es.alvsanand.sgc.core.connector.SgcSlot
* @tparam B The type of es.alvsanand.sgc.core.connector.SgcConnectorParameters
* @return A org.apache.spark.streaming.sgc.SgcRDD with all available slots.
*/
def createDateFilteredSgcRDD[A <: SgcDateSlot : ClassTag, B <: SgcConnectorParameters : ClassTag]
(sgcConnectorFactory: SgcConnectorFactory[A, B],
parameters: B,
fromDate: Option[Date] = None,
toDate: Option[Date] = None,
charset: String = "UTF-8",
maxRetries: Int = 3
): RDD[String] = {
val filterSlot = (sgcDateSlot: A) => {
if ((fromDate.isDefined || toDate.isDefined) && sgcDateSlot.date == null) {
logDebug(s"$sgcDateSlot is rejected because it needs date")
false
} else {
// Inclusive
if (fromDate.isDefined && fromDate.get.getTime > sgcDateSlot.date.getTime) {
logDebug(s"$sgcDateSlot is rejected because date[${sgcDateSlot.date}]" +
s" is less than $fromDate")
false
}
else {
// Exclusive
if (toDate.isDefined && toDate.get.getTime <= sgcDateSlot.date.getTime) {
logDebug(s"$sgcDateSlot is rejected because date[${sgcDateSlot.date}]" +
s" is equal or greater than $toDate")
false
}
else {
true
}
}
}
}
createFilteredSgcRDD[A, B](sgcConnectorFactory, parameters, filterSlot, charset, maxRetries)
}
/**
* Create a org.apache.spark.streaming.sgc.SgcRDD from a SparkContext using a
* es.alvsanand.sgc.core.connector.SgcConnectorFactory and some parameters.
*
* Internally, SgcContext works like this:
*
* - Use the es.alvsanand.sgc.core.connector.SgcConnectorFactory to create a instance of
* the es.alvsanand.sgc.core.connector.SgcConnector.
*
* - List the slots using the es.alvsanand.sgc.core.connector.SgcConnector.
*
* - Optionally, filter the name list if a range of dates has been selected.
*
* - Create a org.apache.spark.streaming.sgc.SgcRDD.
*
* Note: the SgcContext is fault tolerant and is able to retry n times until the list() operation
* gets succesful.
*
* @param sgcConnectorFactory The SgcConnectorFactory used to create the
* es.alvsanand.sgc.core.connector.SgcConnector.
* @param parameters The parameters of the es.alvsanand.sgc.core.connector.SgcConnector.
* @param filterSlot The filter to apply.
* @param charset The java.nio.charset.Charset name of the slots that are
* going to be
* fetched.
* @param maxRetries The maximum number times that an operation of a
* es.alvsanand.sgc.core.connector.SgcConnector is going to
* be repeated
* in case of failure.
* @tparam A The type of es.alvsanand.sgc.core.connector.SgcSlot
* @tparam B The type of es.alvsanand.sgc.core.connector.SgcConnectorParameters
* @return A org.apache.spark.streaming.sgc.SgcRDD with all available slots.
*/
def createFilteredSgcRDD[A <: SgcSlot : ClassTag, B <: SgcConnectorParameters : ClassTag]
(sgcConnectorFactory: SgcConnectorFactory[A, B],
parameters: B,
filterSlot: (A) => Boolean,
charset: String = "UTF-8",
maxRetries: Int = 3
): RDD[String] = {
val connector = sgcConnectorFactory.get(parameters)
Retry(maxRetries) {
connector.list()
} match {
case Success(list) => {
val slots: Array[A] = list.filter(s => filterSlot(s))
.sortWith { case (a, b) => a.compare(b) < 0 }
.toArray[A]
logInfo(s"Detected slots[${slots.mkString(",")}] to process")
if (slots.length == 0) {
sc.emptyRDD[String]
}
else {
new SgcRDD[A, B](sc, slots, sgcConnectorFactory, parameters, charset, maxRetries)
}
}
case Failure(e) => {
logError(s"Error getting slots to process", e)
sc.emptyRDD[String]
}
}
}
}
|
alvsanand/spark-generic-connector
|
sgc-spark/src/main/scala/org/apache/spark/streaming/sgc/SgcContext.scala
|
Scala
|
apache-2.0
| 12,249 |
import scala.tools.nsc.doc.model._
import scala.tools.partest.ScaladocModelTest
object Test extends ScaladocModelTest {
override def code = s"""
/**
* Some scaladoc that contains string literal of HTML
* {{{
* val button = "<button>My Button</button>"
* }}}
*/
object Foo
"""
def scaladocSettings = ""
def testModel(root: Package) = {
import access._
val obj = root._object("Foo")
val safeTagMarker = '\\u000E'
obj.comment.get.body.blocks.foreach({
case block: scala.tools.nsc.doc.base.comment.Code => assert(!block.data.exists(_ == safeTagMarker), s"there should be no safeTagMarkers left in the String, found atleast one $safeTagMarker in the string: ${block.data}")
case _ => // do nothing
})
}
}
|
scala/scala
|
test/scaladoc/run/t11424.scala
|
Scala
|
apache-2.0
| 774 |
package controllers
import javax.inject.Singleton
import org.slf4j.{Logger, LoggerFactory}
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.json._
import play.api.mvc._
import play.modules.reactivemongo.MongoController
import play.modules.reactivemongo.json.collection.JSONCollection
import reactivemongo.api.Cursor
import scala.concurrent.Future
/**
* The Users controllers encapsulates the Rest endpoints and the interaction with the MongoDB, via ReactiveMongo
* play plugin. This provides a non-blocking driver for mongoDB as well as some useful additions for handling JSon.
* @see https://github.com/ReactiveMongo/Play-ReactiveMongo
*/
@Singleton
class TopicController extends Controller with MongoController {
private final val logger: Logger = LoggerFactory.getLogger(classOf[TopicController])
def topicCollection: JSONCollection = db.collection[JSONCollection]("topics")
def footnoteCollection: JSONCollection = db.collection[JSONCollection]("footnotes")
def metaCollection: JSONCollection = db.collection[JSONCollection]("media.meta")
import models.JsonFormats._
import models._
/**
* This Action creates a new topic
*
* @return
*/
def createTopic = Action.async(parse.json) {
request =>
request.body.validate[TopicModel].map {
topic =>
topicCollection.insert(topic).map {
lastError =>
logger.debug(s"Successfully inserted with LastError: $lastError")
Created(s"Topic Created")
}
}.getOrElse(Future.successful(BadRequest("invalid json")))
}
/**
* This Action updates a topic with the parsed data in the HTTP body
*
* @return
*/
def updateTopic = Action.async(parse.json) {
request =>
request.body.validate[TopicModel].map {
topic =>
val modifier = Json.obj( "$set" -> Json.obj("group" -> topic.group),
"$set" -> Json.obj("name" -> topic.name),
"$set" -> Json.obj("createdBy" -> topic.createdBy),
"$set" -> Json.obj("content" -> topic.content),
"$set" -> Json.obj("status" -> topic.status),
"$set" -> Json.obj("constraints" -> topic.constraints),
"$set" -> Json.obj("tagStore" -> topic.tagStore),
"$set" -> Json.obj("linkedTopics" -> topic.linkedTopics),
"$set" -> Json.obj("maxCharThreshold" -> topic.maxCharThreshold),
"$set" -> Json.obj("gps" -> topic.gps),
"$set" -> Json.obj("metaStore" -> topic.metaStore),
"$set" -> Json.obj("nextTextBlock" -> topic.nextTextBlock),
"$set" -> Json.obj("topicPicture" -> topic.topicPicture))
topicCollection.update(Json.obj("uID" -> topic.uID), modifier).map {
lastError =>
logger.debug(s"Successfully inserted with LastError: $lastError")
Created(s"Topic has been updated")
}
}.getOrElse(Future.successful(BadRequest("invalid json")))
}
/**
* Returns topics, which have the given status
* @param status
* @return
*/
def getTopicByStatus(status : String) = Action.async {
val cursor: Cursor[TopicModel] = topicCollection.find(Json.obj("status" -> status)).cursor[TopicModel]
val futureTopicList: Future[List[TopicModel]] = cursor.collect[List]()
val futureTopicsJsonArray: Future[JsArray] = futureTopicList.map { topics =>
Json.arr(topics)
}
futureTopicsJsonArray.map {
topics =>
Ok(topics(0))
}
}
/**
* Returns every topic in the db
*
* @return a list that contains every topic as a JSON object
*/
def getTopics = Action.async {
val cursor: Cursor[TopicModel] = topicCollection.find(Json.obj()).cursor[TopicModel]
val futureTopicsList: Future[List[TopicModel]] = cursor.collect[List]()
val futureTopicsJsonArray: Future[JsArray] = futureTopicsList.map { topics =>
Json.arr(topics)
}
futureTopicsJsonArray.map {
topics =>
Ok(topics(0))
}
}
/**
* Returns every topic in the db in the app format
*
* @return a list that contains every topic as a JSON object in the app format
*/
def getTopicsInAppFormat = Action.async {
val cursor: Cursor[TopicModel] = topicCollection.find(Json.obj()).cursor[TopicModel]
val futureTopicsList: Future[List[TopicModel]] = cursor.collect[List]()
val futureTopicsJsonArray: Future[JsArray] = futureTopicsList.map { topics =>
Json.arr(topics)
}
futureTopicsJsonArray.map {
topics =>
def createNewFormat(uID: JsValue, name: JsValue, content: JsValue, lat: JsValue, lng: JsValue): JsObject = {
return Json.obj(
"id" -> uID,
"name" -> name,
"categories" -> new java.util.Date().getTime(),
"description" -> content,
"lat" -> lat,
"lng" -> lng,
"tags" -> "")
}
/* Access the needed attributes */
val uIDs = topics(0).\\("uID")
val names = topics(0).\\("name")
val content = topics(0).\\("content")
val gps = topics(0).\\("gps")
var dataArray = new JsArray()
/* create new Array */
for( i <- 0 to uIDs.length-1){
dataArray = dataArray.append(createNewFormat(uIDs(i),names(i),content(i),gps(i)(0),gps(i)(1)))
}
/* put this into a new JSObject */
var returnThis = Json.obj("data" -> dataArray)
Ok(returnThis)
}
}
/**
* Action reroutes to the correct controller for finding the topic picture
* Used as a wrapper for the format that Timo has used in his bachelor thesis
* @return the topic picture for the given topicID
*/
def getTopicPicture(uIDPlusPost: String) = Action.async {
val topicID = uIDPlusPost.substring(0,uIDPlusPost.lastIndexOf('.'))
val cursor: Cursor[TopicModel] = topicCollection.find(Json.obj("uID"
-> topicID)).cursor[TopicModel]
val futureTopicsList: Future[List[TopicModel]] = cursor.collect[List]()
val futureTopicsJsonArray: Future[JsArray] = futureTopicsList.map { topics =>
Json.arr(topics)
}
futureTopicsJsonArray.map {
topics =>
val pictureID = topics(0).\\("topicPicture").toString()
val firstDoubleQuote = pictureID.indexOf('"') +1
val lastDoubleQuote = pictureID.lastIndexOf('"')
val cleanedPictureID = pictureID.substring(firstDoubleQuote, lastDoubleQuote)
Redirect(routes.FileController.getMediaFile(cleanedPictureID))
}
}
/**
* Returns a topic given by its uID
*
* @return a list that contains every topic as a JSON object
*/
def getTopic(uID : String) = Action.async {
val cursor: Cursor[TopicModel] = topicCollection.find(Json.obj("uID" -> uID)).cursor[TopicModel]
val futureTopicList: Future[List[TopicModel]] = cursor.collect[List]()
val futureTopicsJsonArray: Future[JsArray] = futureTopicList.map { topics =>
Json.arr(topics)
}
futureTopicsJsonArray.map {
topics =>
Ok(topics(0))
}
}
/**
* Returns a topic given by its creators' uID. Can also be used to returns the subtopic
* given by its parents' uID
*
* @return a list that contains every topic as a JSON object
*/
def getTopicByUser(uID : String) = Action.async {
val cursor: Cursor[TopicModel] = topicCollection.find(Json.obj("createdBy" -> uID)).cursor[TopicModel]
val futureTopicList: Future[List[TopicModel]] = cursor.collect[List]()
val futureTopicsJsonArray: Future[JsArray] = futureTopicList.map { topics =>
Json.arr(topics)
}
futureTopicsJsonArray.map {
topics =>
Ok(topics(0))
}
}
/**
* This Action returns the footnotes of a given topic
*
* @param uID
* @return
*/
def getFootnotesByTopic(uID: String) = Action.async {
val cursor: Cursor[FootnoteModel] = footnoteCollection.find(Json.obj("linkedToTopic" -> uID)).cursor[FootnoteModel]
val futureFootnotesList: Future[List[FootnoteModel]] = cursor.collect[List]()
val futureFootnotesJsonArray: Future[JsArray] = futureFootnotesList.map { footnotes =>
Json.arr(footnotes)
}
futureFootnotesJsonArray.map {
footnotes =>
Ok(footnotes(0))
}
}
/**
* This Action stores a new footnote in the database
*
* @return
*/
def storeFootnote = Action.async(parse.json) {
request =>
request.body.validate[FootnoteModel].map {
footnote =>
footnoteCollection.insert(footnote).map {
lastError =>
logger.debug(s"Successfully inserted with LastError: $lastError")
Created(s"Footnote Created")
}
}.getOrElse(Future.successful(BadRequest("invalid json")))
}
/**
* This Action deletes a given footnote from the database
*
* @param uID the uId of the footnote that should be removed
* @return
*/
def deleteFootnote(uID: String) = Action.async {
/* delete main topic from DB */
footnoteCollection.remove(Json.obj("uID" -> uID)).map {
lastError =>
Created(s"Item removed")
}
}
/**
* This Action deletes a given topic from the database
*
* @param uID the uID of the topic that should be removed
* @return
*/
def deleteTopic(uID: String) = Action.async {
/* delete main topic from DB */
topicCollection.remove(Json.obj("uID" -> uID)).map {
lastError =>
Created(s"Item removed")
}
}
/**
* This Action returns the media files for a given topic
*
* @param topicID topicID of the topic that contains the media files
* @return
*/
def getMediaForTopic(topicID: String) = Action.async {
val cursor: Cursor[MetadataModel] = metaCollection.find(Json.obj("topic" -> topicID)).cursor[MetadataModel]
val futureMediaList: Future[List[MetadataModel]] = cursor.collect[List]()
val futureMediaJsonArray: Future[JsArray] = futureMediaList.map { footnotes =>
Json.arr(footnotes)
}
futureMediaJsonArray.map {
footnotes =>
Ok(footnotes(0))
}
}
/**
* This Action changes the store ID of the used KV-Store for the given media file resp. picture
*
* @param uID of the image/media file
* @param storeID of the new KVStore
* @return
*/
def updateKVStore(uID: String, storeID: String) = Action.async {
val modifier = Json.obj("$set" -> Json.obj("kvStore" -> storeID))
metaCollection.update(Json.obj("uID" -> uID), modifier).map {
lastError =>
logger.debug(s"Successfully inserted with LastError: $lastError")
Created(s"StoreID has been changed")
Ok("");
}
}
}
|
HiP-App/HiPBackend
|
app/controllers/TopicController.scala
|
Scala
|
apache-2.0
| 11,075 |
object Test {
type A = Int
// Emits the implementation restriction but then proceeds to crash
// when creating the Foo.unapply.
case class Foo(a: A, b: A, c: A, d: A, e: A, f: A, g: A, h: A, i: A, j: A, k: A, l: A, m: A, n: A, o: A, p: A, q: A, r: A, s: A, t: A, u: A, v: A, w: A, x: A, y: A, Z: A)
}
|
yusuke2255/dotty
|
tests/pending/pos/t7296.scala
|
Scala
|
bsd-3-clause
| 309 |
package edu.cmu.cs.oak.lib.array
import edu.cmu.cs.oak.core.OakInterpreter
import edu.cmu.cs.oak.value.OakValue
import edu.cmu.cs.oak.lib.InterpreterPlugin
import edu.cmu.cs.oak.lib.InterpreterPluginProvider
import edu.cmu.cs.oak.env.Environment
import java.nio.file.Path
import com.caucho.quercus.expr.Expr
import edu.cmu.cs.oak.value.ArrayValue
import edu.cmu.cs.oak.core.SymbolFlag
import edu.cmu.cs.oak.value.SymbolValue
import edu.cmu.cs.oak.env.OakHeap
import edu.cmu.cs.oak.value.IntValue
import com.caucho.quercus.Location
import edu.cmu.cs.oak.value.NullValue
class ArrayShift extends InterpreterPlugin {
override def getName(): String = "array_shift"
override def visit(provider: InterpreterPluginProvider, args: List[OakValue], loc: Location, env: Environment): OakValue = {
val interpreter = provider.asInstanceOf[OakInterpreter]
/* Assert that the function has two arguments */
assert(args.size > 1)
val new_av = new ArrayValue()
args.head match {
case av: ArrayValue => {
// av.array.foreach {
// case (key, ref) => {
// val value = interpreter.call(args(1).toString, env.extract(ref) :: key :: args.slice(2, args.size), loc, env)
// new_av.set(key, value, env)
// }
// }
// new_av
av
}
case _ => {
NullValue
}
}
}
}
|
smba/oak
|
edu.cmu.cs.oak/src/main/scala/edu/cmu/cs/oak/lib/array/ArrayShift.scala
|
Scala
|
lgpl-3.0
| 1,379 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.