code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.github.diegopacheco.scala.playground.akka.cluster.scheduller
import akka.actor.Actor
import scala.concurrent.duration._
import akka.actor.ActorSystem
import akka.actor.Props
import com.typesafe.config.ConfigFactory
import akka.cluster.Cluster
class ScheduleInConstructor extends Actor {
import context.dispatcher
val tick = context.system.scheduler.schedule(500 millis, 1000 millis, self, "tick")
override def postStop() = tick.cancel()
def receive = {
case "tick" => println("tick " + System.currentTimeMillis())
}
}
object TickApp extends App {
def bootup(port: String): ActorSystem = {
val config = ConfigFactory.parseString("akka.remote.netty.tcp.port=" + port).withFallback(ConfigFactory.load())
val system = ActorSystem("ClusterSystem", config)
system
}
val node1 = bootup("2551")
val node2 = bootup("2552")
val node3 = bootup("0")
Thread.sleep(6000)
node1.actorOf(Props[ScheduleInConstructor], name = "TickActor")
}
|
diegopacheco/scala-playground
|
scala-akka-cluster-frontend-backend/src/main/scala/com/github/diegopacheco/scala/playground/akka/cluster/scheduller/TickApp.scala
|
Scala
|
unlicense
| 984 |
package im.actor.server.http
import akka.actor.ActorSystem
import akka.http.scaladsl.unmarshalling.{ FromRequestUnmarshaller, Unmarshal, Unmarshaller }
import akka.stream.Materializer
import de.heikoseeberger.akkahttpplayjson.PlayJsonSupport
import im.actor.api.rpc.{ AuthData, ClientData, PeersImplicits }
import im.actor.api.rpc.counters.UpdateCountersChanged
import im.actor.api.rpc.messaging._
import im.actor.api.rpc.misc.ResponseSeq
import im.actor.server._
import im.actor.server.api.http.json.Text
import im.actor.server.api.rpc.service.groups.{ GroupInviteConfig, GroupsServiceImpl }
import im.actor.server.api.rpc.service.messaging
import im.actor.server.api.rpc.service.messaging.{ CommandParser, ReverseHooksListener }
import im.actor.server.group.{ GroupExtension, GroupServiceMessages }
import im.actor.server.migrations.IntegrationTokenMigrator
import im.actor.server.webhooks.http.routes.WebhooksHttpHandler
import play.api.libs.json.Json
import shardakka.{ IntCodec, ShardakkaExtension }
import scala.concurrent.ExecutionContext
class WebhookHandlerSpec
extends BaseAppSuite
with GroupsServiceHelpers
with MessageParsing
with PeersImplicits
with ImplicitSequenceService
with ImplicitSessionRegion
with ImplicitAuthService
with SeqUpdateMatchers {
behavior of "WebhookHandler"
it should "create group bot on group creation" in t.createGroupAndBot()
it should "allow bot to send message to it's group" in t.sendInGroup()
"Integration Token Migrator" should "migrate integration tokens to key value" in t.tokenMigration()
"Reverse hooks listener" should "forward text messages in group to registered webhook" in t.reverseHooks()
val groupInviteConfig = GroupInviteConfig("http://actor.im")
implicit val groupsService = new GroupsServiceImpl(groupInviteConfig)
implicit val messagingService = messaging.MessagingServiceImpl()
private val groupExt = GroupExtension(system)
object t {
val (user1, authId1, authSid1, _) = createUser()
val (user2, authId2, authSid2, _) = createUser()
val sessionId = createSessionId()
implicit val clientData = ClientData(authId1, sessionId, Some(AuthData(user1.id, authSid1, 42)))
def createGroupAndBot() = {
val groupOutPeer = createGroup("Bot test group", Set(user2.id)).groupPeer
whenReady(db.run(persist.GroupBotRepo.findByGroup(groupOutPeer.groupId))) { optBot ⇒
optBot shouldBe defined
val bot = optBot.get
bot.groupId shouldEqual groupOutPeer.groupId
}
}
def sendInGroup() = {
val handler = new WebhooksHttpHandler()
val groupResponse = createGroup("Bot test group", Set(user2.id))
val groupOutPeer = groupResponse.groupPeer
val initSeq = groupResponse.seq
val initState = groupResponse.state
Thread.sleep(1000)
val token = whenReady(groupExt.getIntegrationToken(groupOutPeer.groupId, user1.id)) { optToken ⇒
optToken shouldBe defined
optToken.get
}
val firstMessage = Text("Alert! All tests are failed!")
whenReady(handler.send(firstMessage, token)) { _ ⇒
expectUpdate(initSeq, classOf[UpdateMessage]) { upd ⇒
upd.message shouldEqual ApiTextMessage(firstMessage.text, Vector.empty, None)
}
expectUpdate(initSeq, classOf[UpdateCountersChanged])(identity)
}
val (seq1, state1) = whenReady(sequenceService.handleGetState(Vector.empty)) { resp ⇒
val ResponseSeq(seq, state) = resp.toOption.get
(seq, state)
}
val secondMessage = Text("It's ok now!")
whenReady(handler.send(secondMessage, token)) { _ ⇒
expectUpdate(seq1, classOf[UpdateMessage]) { upd ⇒
upd.message shouldEqual ApiTextMessage(secondMessage.text, Vector.empty, None)
}
expectUpdate(seq1, classOf[UpdateCountersChanged])(identity)
}
}
def tokenMigration() = {
val groups = for (i ← 1 to 10) yield {
createGroup(s"$i", Set(user2.id)).groupPeer
}
IntegrationTokenMigrator.migrate()
val kv = ShardakkaExtension(system).simpleKeyValue[Int](KeyValueMappings.IntegrationTokens, IntCodec)
groups foreach { group ⇒
val token = whenReady(groupExt.getIntegrationToken(group.groupId, user1.id)) { optToken ⇒
optToken shouldBe defined
optToken.get
}
whenReady(kv.get(token)) { optGroupId ⇒
optGroupId shouldBe defined
optGroupId shouldEqual Some(group.groupId)
}
}
}
def reverseHooks() = {
val handler = new WebhooksHttpHandler()
val hook3000 = new DummyHookListener(3000)
val hook4000 = new DummyHookListener(4000)
val group = createGroup(s"Reverse hooks group", Set(user2.id)).groupPeer
ReverseHooksListener.startSingleton()
val token = whenReady(groupExt.getIntegrationToken(group.groupId)) { optToken ⇒
optToken shouldBe defined
optToken.get
}
whenReady(handler.register(token, "http://localhost:3000"))(_.isRight shouldBe true)
whenReady(handler.register(token, "http://localhost:4000"))(_.isRight shouldBe true)
Thread.sleep(4000)
val sendText = List("/task jump", "/task eat", "/command sleep", "/command die")
object Parser extends CommandParser
val commands = sendText map Parser.parseCommand
whenReady(messagingService.handleSendMessage(group.asOutPeer, 1L, ApiTextMessage(sendText.head, Vector.empty, None), None, None))(_ ⇒ ())
whenReady(messagingService.handleSendMessage(group.asOutPeer, 2L, GroupServiceMessages.changedTitle("xx"), None, None))(_ ⇒ ())
whenReady(messagingService.handleSendMessage(group.asOutPeer, 3L, ApiTextMessage(sendText(1), Vector.empty, None), None, None))(_ ⇒ ())
whenReady(messagingService.handleSendMessage(group.asOutPeer, 4L, ApiJsonMessage("Some info"), None, None))(_ ⇒ ())
whenReady(messagingService.handleSendMessage(group.asOutPeer, 5L, ApiTextMessage(sendText(2), Vector.empty, None), None, None))(_ ⇒ ())
whenReady(messagingService.handleSendMessage(group.asOutPeer, 6L, ApiDocumentMessage(1L, 2L, 1, "", "", None, None), None, None))(_ ⇒ ())
whenReady(messagingService.handleSendMessage(group.asOutPeer, 7L, ApiTextMessage(sendText(3), Vector.empty, None), None, None))(_ ⇒ ())
Thread.sleep(4000)
val messages3000 = hook3000.getMessages
messages3000 should have size 4
messages3000.map(m ⇒ Some(m.command → m.text)) should contain theSameElementsAs commands
val messages4000 = hook4000.getMessages
messages4000 should have size 4
messages4000.map(m ⇒ Some(m.command → m.text)) should contain theSameElementsAs commands
}
}
final class DummyHookListener(port: Int)(implicit system: ActorSystem, materializer: Materializer) extends PlayJsonSupport {
import akka.http.scaladsl.Http
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.unmarshalling.PredefinedFromEntityUnmarshallers._
import im.actor.server.api.rpc.service.messaging.ReverseHooksWorker._
implicit val ec: ExecutionContext = system.dispatcher
implicit val toMessage: FromRequestUnmarshaller[MessageToWebhook] = Unmarshaller { implicit ec ⇒ req ⇒
Unmarshal(req.entity).to[String].map { body ⇒
Json.parse(body).as[MessageToWebhook]
}
}
private var messages = scala.collection.mutable.Set.empty[MessageToWebhook]
def getMessages = messages
def clean() = messages = scala.collection.mutable.Set.empty[MessageToWebhook]
private def routes: Route =
post {
entity(as[List[MessageToWebhook]]) { received ⇒
received should have length 1
messages += received.head
complete("{}")
}
}
Http().bind("0.0.0.0", port).runForeach { connection ⇒
connection handleWith Route.handlerFlow(routes)
}
}
}
|
ljshj/actor-platform
|
actor-server/actor-tests/src/test/scala/im/actor/server/http/WebhookHandlerSpec.scala
|
Scala
|
mit
| 8,020 |
package org.jetbrains.plugins.scala.lang.psi
import com.intellij.psi._
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.TypeParamIdOwner
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScTypeAlias, ScTypeAliasDefinition}
import org.jetbrains.plugins.scala.lang.psi.types.api.ScTypePresentation.shouldExpand
import org.jetbrains.plugins.scala.lang.psi.types.api.designator.{DesignatorOwner, ScDesignatorType, ScProjectionType, ScThisType}
import org.jetbrains.plugins.scala.lang.psi.types.api.{TypeParameterType, _}
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.NonValueType
import org.jetbrains.plugins.scala.lang.psi.types.recursiveUpdate.AfterUpdate.{ProcessSubtypes, ReplaceWith}
import org.jetbrains.plugins.scala.lang.psi.types.recursiveUpdate.ScSubstitutor
import org.jetbrains.plugins.scala.lang.psi.types.result._
import org.jetbrains.plugins.scala.lang.refactoring.util.ScTypeUtil.AliasType
import org.jetbrains.plugins.scala.project.ProjectContext
import org.jetbrains.plugins.scala.util.ScEquivalenceUtil.areClassesEquivalent
import scala.util.control.NoStackTrace
/**
* @author adkozlov
*/
package object types {
implicit class ScTypeExt(val scType: ScType) extends AnyVal {
private def typeSystem = scType.typeSystem
private def projectContext = scType.projectContext
private def stdTypes = projectContext.stdTypes
def equiv(`type`: ScType): Boolean = {
typeSystem.equiv(scType, `type`)
}
def equiv(`type`: ScType, constraints: ConstraintSystem, falseUndef: Boolean = true): ConstraintsResult = {
typeSystem.equivInner(scType, `type`, constraints, falseUndef)
}
def conforms(`type`: ScType): Boolean = {
typeSystem.conformsInner(`type`, scType).isRight
}
def weakConforms(`type`: ScType): Boolean = {
typeSystem.conformsInner(`type`, scType, checkWeak = true).isRight
}
def conformanceSubstitutor(`type`: ScType): Option[ScSubstitutor] = {
implicit val context: ProjectContext = `type`.projectContext
conforms(`type`, ConstraintSystem.empty) match {
case ConstraintSystem(substitutor) => Some(substitutor)
case _ => None
}
}
def conforms(`type`: ScType,
constraints: ConstraintSystem,
checkWeak: Boolean = false): ConstraintsResult = {
typeSystem.conformsInner(`type`, scType, constraints = constraints, checkWeak = checkWeak)
}
def glb(`type`: ScType, checkWeak: Boolean = false): ScType = {
typeSystem.glb(scType, `type`, checkWeak)
}
def lub(`type`: ScType, checkWeak: Boolean = true): ScType = {
typeSystem.lub(scType, `type`, checkWeak)
}
def isBoolean: Boolean = scType == stdTypes.Boolean
def isAny: Boolean = scType == stdTypes.Any
def isAnyRef: Boolean = scType == stdTypes.AnyRef
def isAnyVal: Boolean = scType == stdTypes.AnyVal
def isNothing: Boolean = scType == stdTypes.Nothing
def isUnit: Boolean = scType == stdTypes.Unit
def isNull: Boolean = scType == stdTypes.Null
def isPrimitive: Boolean = scType match {
case v: ValType => !isUnit
case _ => false
}
def removeUndefines(): ScType = scType.updateRecursively {
case _: UndefinedType => stdTypes.Any
}
def removeVarianceAbstracts(): ScType = {
var index = 0
scType.recursiveVarianceUpdate({
case (ScAbstractType(_, lower, upper), v) =>
v match {
case Contravariant => ReplaceWith(lower)
case Covariant => ReplaceWith(upper)
case Invariant =>
index += 1
ReplaceWith(ScExistentialArgument(s"_$$$index", Nil, lower, upper))
}
case _ => ProcessSubtypes
}, Covariant).unpackedType
}
def toPsiType: PsiType = typeSystem.toPsiType(scType)
/**
* Returns named element associated with type.
* If withoutAliases is true expands alias definitions first
*
* @param expandAliases need to expand alias or not
* @return element and substitutor
*/
def extractDesignatedType(expandAliases: Boolean): Option[(PsiNamedElement, ScSubstitutor)] = {
new DesignatorExtractor(expandAliases, needSubstitutor = true)
.extractFrom(scType)
}
def extractDesignated(expandAliases: Boolean): Option[PsiNamedElement] = {
new DesignatorExtractor(expandAliases, needSubstitutor = false)
.extractFrom(scType).map(_._1)
}
def extractClassType: Option[(PsiClass, ScSubstitutor)] = {
new ClassTypeExtractor(needSubstitutor = true)
.extractFrom(scType)
}
def extractClass: Option[PsiClass] = {
new ClassTypeExtractor(needSubstitutor = false)
.extractFrom(scType).map(_._1)
}
//performance critical method!
//may return None even if extractClass is not empty
@scala.annotation.tailrec
final def extractClassSimple(visited: Set[ScTypeAlias] = Set.empty): Option[PsiClass] = scType match {
case ScDesignatorType(c: PsiClass) => Some(c)
case _: StdType => None
case ParameterizedType(des, _) => des.extractClassSimple(visited)
case ScProjectionType(_, c: PsiClass) => Some(c)
case ScProjectionType(_, ta: ScTypeAliasDefinition) if !visited.contains(ta) => ta.aliasedType.toOption match {
case Some(t) => t.extractClassSimple(visited + ta.physical)
case _ => None
}
case ScThisType(td) => Some(td)
case _ => None
}
//performance critical method!
def canBeSameOrInheritor(t: ScType): Boolean = checkSimpleClasses(t,
(c1, c2) => c1.sameOrInheritor(c2)
)
//performance critical method!
def canBeSameClass(t: ScType): Boolean = checkSimpleClasses(t, areClassesEquivalent)
private def checkSimpleClasses(t: ScType, condition: (PsiClass, PsiClass) => Boolean) = {
(scType.extractClassSimple(), t.extractClassSimple()) match {
case (Some(c1), Some(c2)) if !condition(c1, c2) => false
case _ => true
}
}
def removeAliasDefinitions(expandableOnly: Boolean = false): ScType = {
def needExpand(ta: ScTypeAliasDefinition) = !expandableOnly || shouldExpand(ta)
def innerUpdate(tp: ScType, visited: Set[ScType]): ScType = {
tp.recursiveUpdate {
`type` => `type`.isAliasType match {
case Some(AliasType(ta: ScTypeAliasDefinition, _, Failure(_))) if needExpand(ta) =>
ReplaceWith(projectContext.stdTypes.Any)
case Some(AliasType(ta: ScTypeAliasDefinition, _, Right(upper))) if needExpand(ta) =>
if (visited.contains(`type`)) throw RecursionException
val updated =
try innerUpdate(upper, visited + `type`)
catch {
case RecursionException =>
if (visited.nonEmpty) throw RecursionException
else `type`
}
ReplaceWith(updated)
case _ => ProcessSubtypes
}
}
}
innerUpdate(scType, Set.empty)
}
def extractDesignatorSingleton: Option[ScType] = scType match {
case desinatorOwner: DesignatorOwner => desinatorOwner.designatorSingletonType
case _ => None
}
def tryExtractDesignatorSingleton: ScType = extractDesignatorSingleton.getOrElse(scType)
def hasRecursiveTypeParameters[T](typeParamIds: Set[Long]): Boolean = scType.subtypeExists {
case tpt: TypeParameterType =>
typeParamIds.contains(tpt.typeParamId)
case _ => false
}
def widen: ScType = scType match {
case lit: ScLiteralType if lit.allowWiden => lit.wideType
case other => other
}
def tryWrapIntoSeqType(implicit scope: ElementScope): ScType =
scope
.getCachedClass("scala.collection.Seq")
.map(ScalaType.designator)
.map(ScParameterizedType(_, Seq(scType)))
.getOrElse(scType)
def tryUnwrapSeqType: ScType = scType match {
case ParameterizedType(ScDesignatorType(des: PsiClass), Seq(targ))
if des.qualifiedName == "scala.collection.Seq" =>
targ
case _ => scType
}
}
implicit class ScTypesExt(val types: Seq[ScType]) extends AnyVal {
def glb(checkWeak: Boolean = false)(implicit project: ProjectContext): ScType = {
project.typeSystem.glb(types, checkWeak)
}
def lub(checkWeak: Boolean = true)(implicit project: ProjectContext): ScType = {
project.typeSystem.lub(types, checkWeak)
}
}
private trait Extractor[T <: PsiNamedElement] {
def filter(named: PsiNamedElement, subst: ScSubstitutor): Option[(T, ScSubstitutor)]
def expandAliases: Boolean
def needSubstitutor: Boolean
def extractFrom(scType: ScType,
visitedAliases: Set[ScTypeAlias] = Set.empty): Option[(T, ScSubstitutor)] = {
def needExpand(definition: ScTypeAliasDefinition) = expandAliases && !visitedAliases(definition)
scType match {
case nonValueType: NonValueType =>
extractFrom(nonValueType.inferValueType, visitedAliases)
case thisType: ScThisType => filter(thisType.element, ScSubstitutor(thisType))
case projType: ScProjectionType =>
val actualSubst = projType.actualSubst
val actualElement = projType.actualElement
actualElement match {
case definition: ScTypeAliasDefinition if needExpand(definition) =>
definition.aliasedType.toOption match {
case Some(ParameterizedType(des, _)) if !needSubstitutor =>
extractFrom(actualSubst.subst(des), visitedAliases + definition.physical)
case Some(tp) =>
extractFrom(actualSubst.subst(tp), visitedAliases + definition.physical)
case _ => None
}
case _ => filter(actualElement, actualSubst)
}
case designatorOwner: DesignatorOwner =>
designatorOwner.element match {
case definition: ScTypeAliasDefinition if needExpand(definition) =>
definition.aliasedType.toOption.flatMap {
extractFrom(_, visitedAliases + definition.physical)
}
case elem => filter(elem, ScSubstitutor.empty)
}
case parameterizedType: ParameterizedType =>
extractFrom(parameterizedType.designator, visitedAliases).map {
case (element, substitutor) =>
val withFollower = if (needSubstitutor) substitutor.followed(parameterizedType.substitutor) else ScSubstitutor.empty
(element, withFollower)
}
case stdType: StdType =>
stdType.syntheticClass.flatMap {
filter(_, ScSubstitutor.empty)
}
case ScExistentialType(quantified, _) =>
extractFrom(quantified, visitedAliases)
case TypeParameterType.ofPsi(psiTypeParameter) =>
filter(psiTypeParameter, ScSubstitutor.empty)
case _ => None
}
}
}
private class DesignatorExtractor(override val expandAliases: Boolean, override val needSubstitutor: Boolean) extends Extractor[PsiNamedElement] {
override def filter(named: PsiNamedElement, subst: ScSubstitutor): Option[(PsiNamedElement, ScSubstitutor)] =
Some(named, subst)
}
private class ClassTypeExtractor(override val needSubstitutor: Boolean) extends Extractor[PsiClass] {
override def filter(named: PsiNamedElement, subst: ScSubstitutor): Option[(PsiClass, ScSubstitutor)] =
named match {
case _: PsiTypeParameter => None
case c: PsiClass => Some(c, subst)
case _ => None
}
override val expandAliases: Boolean = true
}
private object RecursionException extends NoStackTrace
}
|
jastice/intellij-scala
|
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/types/package.scala
|
Scala
|
apache-2.0
| 11,864 |
/**
* ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \\ /__\\ (_ )(_ _)( ___)/ __) ( _ \\( )( )( _ \\ Read
* ) / /(__)\\ / /_ _)(_ )__) \\__ \\ )___/ )(__)( ) _ < README.txt
* (_)\\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
*/
package razie.audit
import com.mongodb.casbah.Imports._
import com.novus.salat.grater
import org.bson.types.ObjectId
import org.joda.time.DateTime
import razie.Logging
import razie.db.RazSalatContext.ctx
import razie.db.{ROne, RazMongo}
import razie.wiki.{BasicServices}
/**
* razie's default Audit implementation - stores them events in a Mongo table. Use this as an example to write your own auditing service.
*
* Upon review, move them to the cleared/history table and purge them sometimes
*/
class MdbAuditService extends AuditService with Logging {
/** log a db operation */
def logdb(what: String, details: Any*) = {
val d = details.mkString(",")
BasicServices ! Audit("a", what, d)
val s = what + " " + d
razie.Log.audit(s)
s
}
/** log a db operation */
def logdbWithLink(what: String, link: String, details: Any*) = {
val d = details.mkString(",")
BasicServices ! Audit("a", what, d, Some(link))
val s = what + " " + d
razie.Log.audit(s)
s
}
}
object ClearAudits {
/** move from review to archive. archive is purged separately. */
def clearAudit(id: String, userId: String) = {
ROne[Audit](new ObjectId(id)) map { ae =>
val o = grater[Audit].asDBObject(ae)
o.putAll(Map("clearedBy" -> userId, "clearedDtm" -> DateTime.now))
RazMongo("AuditCleared") += o
RazMongo("Audit").remove(Map("_id" -> new ObjectId(id)))
}
}
}
|
razie/wikireactor
|
diesel/src/main/scala/razie/audit/MdbAuditService.scala
|
Scala
|
apache-2.0
| 1,742 |
package com.eevolution.context.dictionary.infrastructure.repository
import java.util.UUID
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.TreeNodeCMC
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession
import scala.concurrent.{ExecutionContext, Future}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/e-Evolution
* Created by [email protected] , www.e-evolution.com
*/
/**
* Tree Node CMC Repository
* @param session
* @param executionContext
*/
class TreeNodeCMCRepository (session: JdbcSession)(implicit executionContext: ExecutionContext)
extends api.repository.TreeNodeCMCRepository[TreeNodeCMC , Int]
with TreeNodeCMCMapping {
def getById(id: Int): Future[TreeNodeCMC] = {
getByNode(id , 0)
}
def getByNode(id: Int , node : Int): Future[TreeNodeCMC] = {
Future(run(queryTreeNodeCMC.filter(treeNodeCMC => treeNodeCMC.treeId == lift(id)
&& treeNodeCMC.nodeId == lift(node))).headOption.get)
}
def getByUUID(uuid: UUID): Future[TreeNodeCMC] = {
Future(run(queryTreeNodeCMC.filter(_.uuid == lift(uuid.toString))).headOption.get)
}
def getByTreeNodeCMCId(id : Int) : Future[List[TreeNodeCMC]] = {
Future(run(queryTreeNodeCMC))
}
def getAll() : Future[List[TreeNodeCMC]] = {
Future(run(queryTreeNodeCMC))
}
def getAllByPage(page: Int, pageSize: Int): Future[PaginatedSequence[TreeNodeCMC]] = {
val offset = page * pageSize
val limit = (page + 1) * pageSize
for {
count <- countTreeNodeCMC()
elements <- if (offset > count) Future.successful(Nil)
else selectTreeNodeCMC(offset, limit)
} yield {
PaginatedSequence(elements, page, pageSize, count)
}
}
private def countTreeNodeCMC() = {
Future(run(queryTreeNodeCMC.size).toInt)
}
private def selectTreeNodeCMC(offset: Int, limit: Int): Future[Seq[TreeNodeCMC]] = {
Future(run(queryTreeNodeCMC).drop(offset).take(limit).toSeq)
}
}
|
adempiere/ADReactiveSystem
|
dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/TreeNodeCMCRepository.scala
|
Scala
|
gpl-3.0
| 2,901 |
package org.clulab.twitter4food.util
import java.io.File
import com.typesafe.config.ConfigFactory
import org.slf4j.LoggerFactory
import scala.util.Random
/**
* Test for one-tailed statistical significance of all systems compared to a named baseline
* @author Dane Bell
*/
object BootstrapSignificance {
def bss(gold: Seq[String],
baseline: Seq[String],
predicted: Seq[String],
label: String,
reps: Int = 10000): Double = {
val betterThanBaseline = Array.fill[Double](reps)(0)
val pb = new me.tongfei.progressbar.ProgressBar("bootstrap", 100)
pb.start()
pb.maxHint(reps)
pb.setExtraMessage("sampling...")
// for each rep, randomly sample indices once, then compare the baseline's F1 to each other model's
for {
i <- (0 until reps).par
sampleIdx = for (j <- gold.indices) yield Random.nextInt(gold.length - 1) // random sample with replacement
sampleGold = sampleIdx.map(gold.apply)
samplePred = sampleIdx.map(predicted.apply)
sampleBase = sampleIdx.map(baseline.apply)
} {
val predF1 = Eval.f1ForLabel(label)(sampleGold.zip(samplePred))
val baselineF1 = Eval.f1ForLabel(label)(sampleGold.zip(sampleBase))
if (predF1 > baselineF1) betterThanBaseline(i) = 1
pb.step()
}
pb.stop()
1.0 - (betterThanBaseline.sum / reps.toDouble)
}
case class Config(
variable: String = "overweight",
scoreMetric: String = "Overweight",
repetitions:Int = 10000)
def main(args: Array[String]): Unit = {
def parseArgs(args: Array[String]): Config = {
val parser = new scopt.OptionParser[Config]("bootstrapping") {
head("bootstrapping", "0.x")
opt[String]('v', "variable") action { (x, c) =>
c.copy(variable = x)
} text "classifier to evaluate"
opt[String]('s', "scoreMetric") action { (x, c) =>
c.copy(scoreMetric = x)
} text "metric for scoring; can be 'micro', 'macro', or a variable name for unaveraged F1"
opt[Int]('r', "repetitions") action { (x, c) =>
c.copy(repetitions = x)
} text "number of repetitions in bootstrap"
}
parser.parse(args, Config()).get
}
val logger = LoggerFactory.getLogger(this.getClass)
val config = ConfigFactory.load
val params = parseArgs(args)
val baselineFeatures = config.getString(s"classifiers.${params.variable}.baseline")
val predictionDir = new File(config.getString(s"classifiers.${params.variable}.results"))
// Prefer overweightF1 >> microF1 >> macroF1
def scoreMetric(gold: Seq[String], pred: Seq[String]): Double = {
params.scoreMetric match {
case "macro" => Eval.macroOnly(gold.zip(pred))
case "micro" => Eval.microOnly(gold.zip(pred))
case lbl => Eval.f1ForLabel(lbl)(gold.zip(pred))
}
}
// The directory of results files must exist
assert(predictionDir.exists && predictionDir.isDirectory)
val folders = predictionDir.listFiles.filter(_.isDirectory)
// gather each set of (gold data, prediction) from each result available
// IndexedSeq for very frequent indexing later
val predictionsWithGold: Map[String, IndexedSeq[(String, String)]] = (for {
folder <- folders.toSeq
if folder.list.contains("predicted.txt")
predFile = scala.io.Source.fromFile(folder.getPath + "/predicted.txt")
preds = predFile
.getLines
.map(_.stripLineEnd.split("\\t"))
.map(line => (line(0), line(1)))
.toIndexedSeq
.tail // first row is header info
.sortBy(_._1) // gold columns are in different orders, so sort
} yield folder.getName -> preds).toMap
// If the baseline is not present, we can't compare against it.
assert(predictionsWithGold.keys.toSeq.contains(baselineFeatures))
val (gold, baseline) = predictionsWithGold(baselineFeatures).unzip
// Ignore results that have different Gold annotations and thus different users or user order
val comparable = predictionsWithGold.filter(pred => pred._2.unzip._1 == gold)
val incomparable = predictionsWithGold.keySet diff comparable.keySet
if(incomparable.nonEmpty) {
logger.debug(s"""$incomparable did not have the same gold annotations as baseline""")
}
val predictions = comparable.map(featureSet => featureSet._1 -> featureSet._2.unzip._2)
// initialize a buffer for tracking whether each model's F1 exceeds the baseline
val betterThanBaseline: Map[String, scala.collection.mutable.ListBuffer[Double]] = (for {
key <- predictions.keys
} yield key -> new scala.collection.mutable.ListBuffer[Double]).toMap
logger.info(s"classifier: ${params.variable}, repetitions: ${params.repetitions}, models: ${predictions.size}")
val pb = new me.tongfei.progressbar.ProgressBar("bootstrap", 100)
pb.start()
pb.maxHint(params.repetitions * betterThanBaseline.size)
pb.setExtraMessage("sampling...")
// for each rep, randomly sample indices once, then compare the baseline's F1 to each other model's
for {
i <- 0 until params.repetitions
sampleIdx = for (j <- gold.indices) yield Random.nextInt(gold.length - 1) // random sample with replacement
sampleGold = for (j <- sampleIdx) yield gold(j) // ground truth labels for sampled accts
featureSet <- predictions.keys // same sample applied to each eligible featureSet
pred = predictions(featureSet)
samplePred = for (j <- sampleIdx) yield pred(j) // comparison predictions for sampled accts
sampleBase = for (j <- sampleIdx) yield baseline(j) // baseline predictions for sampled accts
} {
val baselineF1 = scoreMetric(sampleGold, sampleBase)
val predF1 = scoreMetric(sampleGold, samplePred)
betterThanBaseline(featureSet).append(if (predF1 > baselineF1) 1.0 else 0.0)
pb.step()
}
pb.stop()
// Calculate all stats just to be sure.
val stats = (for (k <- predictions.keys) yield {
val (eval, macroAvg, microAvg) = Eval.evaluate(gold, predictions(k))
val lbl = if (!Seq("micro","macro").contains(params.scoreMetric)) params.scoreMetric else gold.distinct.sorted.head
k -> (eval(lbl).P, eval(lbl).R, eval(lbl).F, microAvg, macroAvg)
}).toMap
// print out results
println("model\\tprecision\\trecall\\toverweight F1\\tmicro F1\\tmacro F1\\tpval")
betterThanBaseline.toSeq.sortBy(_._1).reverse.foreach{
case (featureSet, isBetter) =>
val baselineLabel = if (featureSet == baselineFeatures) " (baseline)" else ""
println(f"$featureSet$baselineLabel\\t${stats(featureSet)._1}%1.4f\\t${stats(featureSet)._2}%1.4f\\t" +
f"${stats(featureSet)._3}%1.4f\\t${stats(featureSet)._4}%1.4f\\t${stats(featureSet)._5}%1.4f\\t" +
f"${1.0 - isBetter.sum / params.repetitions.toDouble}%1.4f")
}
}
}
|
clulab/twitter4food
|
src/main/scala/org/clulab/twitter4food/util/BootstrapSignificance.scala
|
Scala
|
apache-2.0
| 6,898 |
/*-------------------------------------------------------------------------*\\
** ScalaCheck **
** Copyright (c) 2007-2015 Rickard Nilsson. All rights reserved. **
** http://www.scalacheck.org **
** **
** This software is released under the terms of the Revised BSD License. **
** There is NO WARRANTY. See the file LICENSE for the full text. **
\\*------------------------------------------------------------------------ */
package org.scalacheck
import language.higherKinds
import concurrent.Future
import scala.util.{Failure, Success, Try}
import util.{FreqMap, Buildable}
sealed abstract class Arbitrary[T] {
val arbitrary: Gen[T]
}
/** Defines implicit [[org.scalacheck.Arbitrary]] instances for common types.
* <p>
* ScalaCheck
* uses implicit [[org.scalacheck.Arbitrary]] instances when creating properties
* out of functions with the `Prop.property` method, and when
* the `Arbitrary.arbitrary` method is used. For example, the
* following code requires that there exists an implicit
* `Arbitrary[MyClass]` instance:
* </p>
*
* {{{
* val myProp = Prop.forAll { myClass: MyClass =>
* ...
* }
*
* val myGen = Arbitrary.arbitrary[MyClass]
* }}}
*
* <p>
* The required implicit definition could look like this:
* </p>
*
* {{{
* implicit val arbMyClass: Arbitrary[MyClass] = Arbitrary(...)
* }}}
*
* <p>
* The factory method `Arbitrary(...)` takes a generator of type
* `Gen[T]` and returns an instance of `Arbitrary[T]`.
* </p>
*
* <p>
* The `Arbitrary` module defines implicit [[org.scalacheck.Arbitrary]]
* instances for common types, for convenient use in your properties and
* generators.
* </p>
*/
object Arbitrary extends ArbitraryLowPriority with ArbitraryArities
/** separate trait to have same priority as ArbitraryArities */
private[scalacheck] sealed trait ArbitraryLowPriority {
import Gen.{const, choose, sized, frequency, oneOf, buildableOf, resize}
import collection.{immutable, mutable}
import java.util.Date
/** Creates an Arbitrary instance */
def apply[T](g: => Gen[T]): Arbitrary[T] = new Arbitrary[T] {
lazy val arbitrary = g
}
/** Returns an arbitrary generator for the type T. */
def arbitrary[T](implicit a: Arbitrary[T]): Gen[T] = a.arbitrary
/**** Arbitrary instances for each AnyVal ****/
/** Arbitrary AnyVal */
implicit lazy val arbAnyVal: Arbitrary[AnyVal] = Arbitrary(oneOf(
arbitrary[Unit], arbitrary[Boolean], arbitrary[Char], arbitrary[Byte],
arbitrary[Short], arbitrary[Int], arbitrary[Long], arbitrary[Float],
arbitrary[Double]
))
/** Arbitrary instance of Boolean */
implicit lazy val arbBool: Arbitrary[Boolean] =
Arbitrary(oneOf(true, false))
/** Arbitrary instance of Int */
implicit lazy val arbInt: Arbitrary[Int] = Arbitrary(
Gen.chooseNum(Int.MinValue, Int.MaxValue)
)
/** Arbitrary instance of Long */
implicit lazy val arbLong: Arbitrary[Long] = Arbitrary(
Gen.chooseNum(Long.MinValue, Long.MaxValue)
)
/** Arbitrary instance of Float */
implicit lazy val arbFloat: Arbitrary[Float] = Arbitrary(
for {
s <- choose(0, 1)
e <- choose(0, 0xfe)
m <- choose(0, 0x7fffff)
} yield java.lang.Float.intBitsToFloat((s << 31) | (e << 23) | m)
)
/** Arbitrary instance of Double */
implicit lazy val arbDouble: Arbitrary[Double] = Arbitrary(
for {
s <- choose(0L, 1L)
e <- choose(0L, 0x7feL)
m <- choose(0L, 0xfffffffffffffL)
} yield java.lang.Double.longBitsToDouble((s << 63) | (e << 52) | m)
)
/** Arbitrary instance of Char */
implicit lazy val arbChar: Arbitrary[Char] = Arbitrary(
Gen.frequency(
(0xD800-Char.MinValue, Gen.choose[Char](Char.MinValue,0xD800-1)),
(Char.MaxValue-0xDFFF, Gen.choose[Char](0xDFFF+1,Char.MaxValue))
)
)
/** Arbitrary instance of Byte */
implicit lazy val arbByte: Arbitrary[Byte] = Arbitrary(
Gen.chooseNum(Byte.MinValue, Byte.MaxValue)
)
/** Arbitrary instance of Short */
implicit lazy val arbShort: Arbitrary[Short] = Arbitrary(
Gen.chooseNum(Short.MinValue, Short.MaxValue)
)
/** Absolutely, totally, 100% arbitrarily chosen Unit. */
implicit lazy val arbUnit: Arbitrary[Unit] = Arbitrary(const(()))
/**** Arbitrary instances of other common types ****/
/** Arbitrary instance of String */
implicit lazy val arbString: Arbitrary[String] =
Arbitrary(arbitrary[List[Char]] map (_.mkString))
/** Arbitrary instance of Date */
implicit lazy val arbDate: Arbitrary[Date] = Arbitrary(for {
l <- arbitrary[Long]
d = new Date
} yield new Date(d.getTime + l))
/** Arbitrary instance of Throwable */
implicit lazy val arbThrowable: Arbitrary[Throwable] =
Arbitrary(oneOf(const(new Exception), const(new Error)))
/** Arbitrary instance of Exception */
implicit lazy val arbException: Arbitrary[Exception] =
Arbitrary(const(new Exception))
/** Arbitrary instance of Error */
implicit lazy val arbError: Arbitrary[Error] =
Arbitrary(const(new Error))
/** Arbitrary BigInt */
implicit lazy val arbBigInt: Arbitrary[BigInt] = {
def chooseBigInt: Gen[BigInt] =
sized((s: Int) => choose(-s, s)) map (x => BigInt(x))
def chooseReallyBigInt: Gen[BigInt] = for {
bi <- chooseBigInt
n <- choose(32,128)
} yield bi << n
Arbitrary(
frequency(
(5, chooseBigInt),
(10, chooseReallyBigInt),
(1, BigInt(0)),
(1, BigInt(1)),
(1, BigInt(-1)),
(1, BigInt(Int.MaxValue) + 1),
(1, BigInt(Int.MinValue) - 1),
(1, BigInt(Long.MaxValue)),
(1, BigInt(Long.MinValue)),
(1, BigInt(Long.MaxValue) + 1),
(1, BigInt(Long.MinValue) - 1)
)
)
}
/** Arbitrary BigDecimal */
implicit lazy val arbBigDecimal: Arbitrary[BigDecimal] = {
import java.math.MathContext._
val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128)
val bdGen = for {
x <- arbBigInt.arbitrary
mc <- mcGen
limit <- const(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0))
scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue)
} yield {
try {
BigDecimal(x, scale, mc)
} catch {
case ae: java.lang.ArithmeticException => BigDecimal(x, scale, UNLIMITED) // Handle the case where scale/precision conflict
}
}
Arbitrary(bdGen)
}
/** Arbitrary java.lang.Number */
implicit lazy val arbNumber: Arbitrary[Number] = {
val gen = Gen.oneOf(
arbitrary[Byte], arbitrary[Short], arbitrary[Int], arbitrary[Long],
arbitrary[Float], arbitrary[Double]
)
Arbitrary(gen map (_.asInstanceOf[Number]))
// XXX TODO - restore BigInt and BigDecimal
// Arbitrary(oneOf(arbBigInt.arbitrary :: (arbs map (_.arbitrary) map toNumber) : _*))
}
/** Generates an arbitrary property */
implicit lazy val arbProp: Arbitrary[Prop] = {
import Prop._
val undecidedOrPassed = forAll { b: Boolean =>
b ==> true
}
Arbitrary(frequency(
(4, falsified),
(4, passed),
(3, proved),
(3, undecidedOrPassed),
(2, undecided),
(1, exception(null))
))
}
/** Arbitrary instance of test parameters */
implicit lazy val arbTestParameters: Arbitrary[Test.Parameters] =
Arbitrary(for {
_minSuccTests <- choose(10,200)
_maxDiscardRatio <- choose(0.2f,10f)
_minSize <- choose(0,500)
sizeDiff <- choose(0,500)
_maxSize <- choose(_minSize, _minSize + sizeDiff)
_workers <- choose(1,4)
} yield Test.Parameters.default
.withMinSuccessfulTests(_minSuccTests)
.withMaxDiscardRatio(_maxDiscardRatio)
.withMinSize(_minSize)
.withMaxSize(_maxSize)
.withWorkers(_workers)
)
/** Arbitrary instance of gen params */
implicit lazy val arbGenParams: Arbitrary[Gen.Parameters] =
Arbitrary(for {
sz <- arbitrary[Int] suchThat (_ >= 0)
} yield Gen.Parameters.default.withSize(sz))
// Specialised collections //
/** Arbitrary instance of scala.collection.BitSet */
implicit lazy val arbBitSet: Arbitrary[collection.BitSet] = Arbitrary(
buildableOf[collection.BitSet,Int](sized(sz => choose(0,sz)))
)
// Higher-order types //
/** Arbitrary instance of [[org.scalacheck.Gen]] */
implicit def arbGen[T](implicit a: Arbitrary[T]): Arbitrary[Gen[T]] =
Arbitrary(frequency(
(5, arbitrary[T] map (const(_))),
(1, Gen.fail)
))
/** Arbitrary instance of the Option type */
implicit def arbOption[T](implicit a: Arbitrary[T]): Arbitrary[Option[T]] =
Arbitrary(sized(n =>
// When n is larger, make it less likely that we generate None,
// but still do it some of the time. When n is zero, we always
// generate None, since it's the smallest value.
frequency(
(n, resize(n / 2, arbitrary[T]).map(Some(_))),
(1, const(None)))))
/** Arbitrary instance of the Either type */
implicit def arbEither[T, U](implicit at: Arbitrary[T], au: Arbitrary[U]): Arbitrary[Either[T, U]] =
Arbitrary(oneOf(arbitrary[T].map(Left(_)), arbitrary[U].map(Right(_))))
/** Arbitrary instance of the Future type */
implicit def arbFuture[T](implicit a: Arbitrary[T]): Arbitrary[Future[T]] =
Arbitrary(Gen.oneOf(arbitrary[T].map(Future.successful), arbitrary[Throwable].map(Future.failed)))
/** Arbitrary instance of the Try type */
implicit def arbTry[T](implicit a: Arbitrary[T]): Arbitrary[Try[T]] =
Arbitrary(Gen.oneOf(arbitrary[T].map(Success(_)), arbitrary[Throwable].map(Failure(_))))
/** Arbitrary instance of any [[org.scalacheck.util.Buildable]] container
* (such as lists, arrays, streams, etc). The maximum size of the container
* depends on the size generation parameter. */
implicit def arbContainer[C[_],T](implicit
a: Arbitrary[T], b: Buildable[T,C[T]], t: C[T] => Traversable[T]
): Arbitrary[C[T]] = Arbitrary(buildableOf[C[T],T](arbitrary[T]))
/** Arbitrary instance of any [[org.scalacheck.util.Buildable]] container
* (such as maps). The maximum size of the container depends on the size
* generation parameter. */
implicit def arbContainer2[C[_,_],T,U](implicit
a: Arbitrary[(T,U)], b: Buildable[(T,U),C[T,U]], t: C[T,U] => Traversable[(T,U)]
): Arbitrary[C[T,U]] = Arbitrary(buildableOf[C[T,U],(T,U)](arbitrary[(T,U)]))
implicit def arbEnum[A <: java.lang.Enum[A]](implicit A: reflect.ClassTag[A]): Arbitrary[A] = {
val values = A.runtimeClass.getEnumConstants.asInstanceOf[Array[A]]
Arbitrary(Gen.oneOf(values))
}
}
|
sid-kap/scalacheck
|
src/main/scala/org/scalacheck/Arbitrary.scala
|
Scala
|
bsd-3-clause
| 10,828 |
package scalangine.engine
trait GameRunner extends ScreenCleaner {
var screenWidth = 640
var screenHeight = 480
}
|
sanecommajustin/scalangine
|
src/main/scala/scalangine/engine/GameRunner.scala
|
Scala
|
mit
| 116 |
package com.wavesplatform.lang.v1.evaluator
import cats.syntax.functor._
import cats.syntax.either._
import cats.syntax.applicative._
import cats.{Eval, Monad}
import com.wavesplatform.lang.ExecutionError
import com.wavesplatform.lang.v1.compiler.Terms.{EVALUATED, EXPR}
import com.wavesplatform.lang.v1.compiler.Types.TYPE
import monix.eval.Coeval
abstract class ContextfulNativeFunction[C[_[_]]](name: String, resultType: TYPE, args: Seq[(String, TYPE)]) {
def ev[F[_]: Monad](input: (C[F], List[EVALUATED])): F[Either[ExecutionError, EVALUATED]]
final def apply[F[_]: Monad](input: (C[F], List[EVALUATED])): F[Either[ExecutionError, EVALUATED]] = {
try {
ev(input)
} catch {
case _: SecurityException =>
Either
.left[ExecutionError, EVALUATED](
s"""An access to <$name(${args.toSeq.map(a => s"${a._1}: ${a._2}").mkString(", ")}): $resultType> is denied"""
)
.pure[F]
case e: Throwable =>
Either
.left[ExecutionError, EVALUATED](s"""An error during run <$name(${args.toSeq
.map(a => s"${a._1}: ${a._2}")
.mkString(", ")}): $resultType>: ${e.getClass()} ${e.getMessage() match {
case null => e.toString
case msg => msg
}}""")
.pure[F]
}
}
def evaluateExtended[F[_]: Monad](env: C[F], args: List[EVALUATED], availableComplexity: Int): Coeval[F[(Either[ExecutionError, EVALUATED], Int)]] =
Coeval.now(apply((env, args)).map((_, 0)))
}
trait ContextfulUserFunction[C[_[_]]] {
def apply[F[_]: Monad](context: C[F], startArgs: List[EXPR]): EXPR
}
object ContextfulUserFunction {
def pure[C[_[_]]](expr: EXPR): ContextfulUserFunction[C] =
new ContextfulUserFunction[C] {
override def apply[F[_]: Monad](context: C[F], startArgs: List[EXPR]): EXPR = expr
}
}
trait ContextfulVal[C[_[_]]] {
val isPure: Boolean = false
def apply[F[_]: Monad](context: C[F]): Eval[F[Either[ExecutionError, EVALUATED]]]
}
object ContextfulVal {
def fromEval[C[_[_]]](v: Eval[Either[ExecutionError, EVALUATED]]): ContextfulVal[C] =
new ContextfulVal[C] {
override def apply[F[_]: Monad](context: C[F]): Eval[F[Either[ExecutionError, EVALUATED]]] =
v.map(_.pure[F])
}
def pure[C[_[_]]](v: EVALUATED): ContextfulVal[C] =
new ContextfulVal[C] {
override val isPure: Boolean = true
override def apply[F[_]: Monad](context: C[F]): Eval[F[Either[ExecutionError, EVALUATED]]] =
v.asRight[ExecutionError].pure[F].pure[Eval]
}
trait Lifted[C[_[_]]] extends ContextfulVal[C] {
override def apply[F[_]: Monad](context: C[F]): Eval[F[Either[ExecutionError, EVALUATED]]] =
liftF(context).map(_.pure[F])
def liftF[F[_]: Monad](context: C[F]): Eval[Either[ExecutionError, EVALUATED]]
}
}
object Contextful {
type NoContext[_[_]] = Any
def empty[F[_]]: NoContext[F] = ()
}
|
wavesplatform/Waves
|
lang/shared/src/main/scala/com/wavesplatform/lang/v1/evaluator/Contextful.scala
|
Scala
|
mit
| 2,920 |
package com.twitter.finagle.mysql
import com.twitter.finagle.mysql.transport.{MysqlBuf, Packet}
import com.twitter.io.Buf
import com.twitter.util.{Return, Try}
import scala.collection.immutable.IndexedSeq
sealed trait Result
/**
* A decoder for Results contained in a single packet.
*/
trait Decoder[T <: Result] extends (Packet => Try[T]) {
def apply(packet: Packet): Try[T] = Try(decode(packet))
def decode(packet: Packet): T
}
/**
* First result received from the server as part of the connection phase.
* [[http://dev.mysql.com/doc/internals/en/connection-phase-packets.html]]
*/
object HandshakeInit extends Decoder[HandshakeInit] {
def decode(packet: Packet) = {
val br = MysqlBuf.reader(packet.body)
val protocol = br.readByte()
val bytesVersion = br.readNullTerminatedBytes()
val threadId = br.readIntLE()
val salt1 = Buf.ByteArray.Owned.extract(br.readBytes(8))
br.skip(1) // 1 filler byte always 0x00
// the rest of the fields are optional and protocol version specific
val capLow = if (br.remaining >= 2) br.readUnsignedShortLE() else 0
require(protocol == 10 && (capLow & Capability.Protocol41) != 0,
"unsupported protocol version")
val charset = br.readUnsignedByte().toShort
val status = br.readShortLE().toShort
val capHigh = br.readUnsignedShortLE() << 16
val serverCap = Capability(capHigh, capLow)
// auth plugin data. Currently unused but we could verify
// that our secure connections respect the expected size.
br.skip(1)
// next 10 bytes are all reserved
br.readBytes(10)
val salt2 =
if (!serverCap.has(Capability.SecureConnection)) Array.empty[Byte]
else br.readNullTerminatedBytes()
HandshakeInit(
protocol,
new String(bytesVersion, Charset(charset)),
threadId,
Array.concat(salt1, salt2),
serverCap,
charset,
status
)
}
}
case class HandshakeInit(
protocol: Byte,
version: String,
threadId: Int,
salt: Array[Byte],
serverCap: Capability,
charset: Short,
status: Short
) extends Result
/**
* Represents the OK Packet received from the server. It is sent
* to indicate that a command has completed succesfully.
* [[http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-OK_Packet]]
*/
object OK extends Decoder[OK] {
def decode(packet: Packet) = {
val br = MysqlBuf.reader(packet.body)
br.skip(1)
OK(
br.readVariableLong(),
br.readVariableLong(),
br.readUnsignedShortLE(),
br.readUnsignedShortLE(),
new String(br.take(br.remaining))
)
}
}
case class OK(
affectedRows: Long,
insertId: Long,
serverStatus: Int,
warningCount: Int,
message: String
) extends Result
/**
* Represents the Error Packet received from the server and the data sent along with it.
* [[http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-ERR_Packet]]
*/
object Error extends Decoder[Error] {
def decode(packet: Packet) = {
// start reading after flag byte
val br = MysqlBuf.reader(packet.body)
br.skip(1)
val code = br.readShortLE()
val state = new String(br.take(6))
val msg = new String(br.take(br.remaining))
Error(code, state, msg)
}
}
case class Error(code: Short, sqlState: String, message: String) extends Result
/**
* Represents and EOF result received from the server which
* contains any warnings and the server status.
* [[http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet]]
*/
object EOF extends Decoder[EOF] {
def decode(packet: Packet) = {
val br = MysqlBuf.reader(packet.body)
br.skip(1)
EOF(br.readShortLE(), ServerStatus(br.readShortLE()))
}
}
case class EOF(warnings: Short, serverStatus: ServerStatus) extends Result
/**
* Represents the column meta-data associated with a query.
* Sent during ResultSet transmission and as part of the
* meta-data associated with a Row.
* [[http://dev.mysql.com/doc/internals/en/com-query-response.html#packet-Protocol::ColumnDefinition41]]
*/
object Field extends Decoder[Field] {
def decode(packet: Packet): Field = {
val bw = MysqlBuf.reader(packet.body)
val bytesCatalog = bw.readLengthCodedBytes()
val bytesDb = bw.readLengthCodedBytes()
val bytesTable = bw.readLengthCodedBytes()
val bytesOrigTable = bw.readLengthCodedBytes()
val bytesName = bw.readLengthCodedBytes()
val bytesOrigName = bw.readLengthCodedBytes()
bw.readVariableLong() // length of the following fields (always 0x0c)
val charset = bw.readShortLE()
val jCharset = Charset(charset)
val catalog = new String(bytesCatalog, jCharset)
val db = new String(bytesDb, jCharset)
val table = new String(bytesTable, jCharset)
val origTable = new String(bytesOrigTable, jCharset)
val name = new String(bytesName, jCharset)
val origName = new String(bytesOrigName, jCharset)
val length = bw.readIntLE()
val fieldType = bw.readUnsignedByte()
val flags = bw.readShortLE()
val decimals = bw.readByte()
Field(
catalog,
db,
table,
origTable,
name,
origName,
charset,
length,
fieldType,
flags,
decimals
)
}
}
case class Field(
catalog: String,
db: String,
table: String,
origTable: String,
name: String,
origName: String,
charset: Short,
displayLength: Int,
fieldType: Short,
flags: Short,
decimals: Byte
) extends Result {
def id: String = if (name.isEmpty) origName else name
override val toString = "Field(%s)".format(id)
}
/**
* Meta data returned from the server in response to
* a prepared statement initialization request
* COM_STMT_PREPARE.
* [[http://dev.mysql.com/doc/internals/en/com-stmt-prepare-response.html#packet-COM_STMT_PREPARE_OK]]
*/
object PrepareOK extends Decoder[PrepareOK] {
def decode(header: Packet) = {
val br = MysqlBuf.reader(header.body)
br.skip(1)
val stmtId = br.readIntLE()
val numCols = br.readUnsignedShortLE()
val numParams = br.readUnsignedShortLE()
br.skip(1)
val warningCount = br.readUnsignedShortLE()
PrepareOK(stmtId, numCols, numParams, warningCount)
}
}
case class PrepareOK(
id: Int,
numOfCols: Int,
numOfParams: Int,
warningCount: Int,
columns: Seq[Field] = Nil,
params: Seq[Field] = Nil
) extends Result
/**
* Used internally to synthesize a response from
* the server when sending a prepared statement
* CloseRequest
*/
object CloseStatementOK extends OK(0,0,0,0, "Internal Close OK")
/**
* Resultset returned from the server containing field definitions and
* rows. The rows can be binary encoded (for prepared statements)
* or text encoded (for regular queries).
* [[http://dev.mysql.com/doc/internals/en/com-query-response.html#packet-ProtocolText::Resultset]]
* [[http://dev.mysql.com/doc/internals/en/binary-protocol-resultset.html]]
*/
object ResultSet {
def apply(isBinaryEncoded: Boolean)(
header: Packet,
fieldPackets: Seq[Packet],
rowPackets: Seq[Packet]
): Try[ResultSet] = Try(decode(isBinaryEncoded)(header, fieldPackets, rowPackets))
def decode(isBinaryEncoded: Boolean)(header: Packet, fieldPackets: Seq[Packet], rowPackets: Seq[Packet]): ResultSet = {
val fields = fieldPackets.map(Field.decode).toIndexedSeq
decodeRows(isBinaryEncoded, rowPackets, fields)
}
def decodeRows(isBinaryEncoded: Boolean, rowPackets: Seq[Packet], fields: IndexedSeq[Field]): ResultSet = {
// A name -> index map used to allow quick lookups for rows based on name.
val indexMap = fields.map(_.id).zipWithIndex.toMap
/**
* Rows can be encoded as Strings or Binary depending
* on if the ResultSet is created by a normal query or
* a prepared statement, respectively.
*/
val rows = rowPackets.map { p: Packet =>
if (!isBinaryEncoded)
new StringEncodedRow(p.body, fields, indexMap)
else
new BinaryEncodedRow(p.body, fields, indexMap)
}
ResultSet(fields, rows)
}
}
case class ResultSet(fields: Seq[Field], rows: Seq[Row]) extends Result {
override def toString = s"ResultSet(${fields.size}, ${rows.size})"
}
object FetchResult {
def apply(
rowPackets: Seq[Packet],
eofPacket: EOF
): Try[FetchResult] = {
Try {
val containsLastRow: Boolean = eofPacket.serverStatus.has(ServerStatus.LastRowSent)
FetchResult(rowPackets, containsLastRow)
}
}
def apply(
err: Error
): Try[FetchResult] = {
Return(FetchResult(Seq(), containsLastRow = true))
}
}
case class FetchResult(rowPackets: Seq[Packet], containsLastRow: Boolean) extends Result {
override def toString: String = s"FetchResult(rows=${rowPackets.size}, containsLastRow=$containsLastRow)"
}
|
spockz/finagle
|
finagle-mysql/src/main/scala/com/twitter/finagle/mysql/Result.scala
|
Scala
|
apache-2.0
| 8,786 |
package com.rocketfuel.sdbc.postgresql.jdbc
import java.time.format.{DateTimeFormatterBuilder, DateTimeFormatter}
package object implementation {
private[implementation] val offsetTimeFormatter: DateTimeFormatter = {
new DateTimeFormatterBuilder().
parseCaseInsensitive().
append(DateTimeFormatter.ISO_LOCAL_TIME).
optionalStart().
appendOffset("+HH:mm", "+00").
optionalEnd().
toFormatter
}
private[implementation] val offsetDateTimeFormatter: DateTimeFormatter = {
new DateTimeFormatterBuilder().
parseCaseInsensitive().
append(DateTimeFormatter.ISO_LOCAL_DATE).
appendLiteral(' ').
append(offsetTimeFormatter).
toFormatter
}
}
|
wdacom/sdbc
|
postgresql/src/main/scala/com/rocketfuel/sdbc/postgresql/jdbc/implementation/package.scala
|
Scala
|
bsd-3-clause
| 715 |
package com.twitter.finagle.http
import com.twitter.io.{Buf, Reader => BufReader, Writer => BufWriter}
import com.twitter.finagle.netty3.ChannelBufferBuf
import com.twitter.util.{Await, Duration}
import java.io.{InputStream, InputStreamReader, OutputStream, OutputStreamWriter, Reader, Writer}
import java.util.{Iterator => JIterator}
import java.nio.charset.Charset
import java.util.{Date, TimeZone}
import org.apache.commons.lang.StringUtils
import org.apache.commons.lang.time.FastDateFormat
import org.jboss.netty.buffer._
import org.jboss.netty.channel.ChannelFuture
import org.jboss.netty.handler.codec.http.{HttpMessage, HttpHeaders, HttpMethod,
HttpVersion, DefaultHttpChunk, HttpChunk}
import scala.collection.JavaConverters._
/**
* Rich HttpMessage
*
* Base class for Request and Response. There are both input and output
* methods, though only one set of methods should be used.
*/
abstract class Message extends HttpMessage {
private[this] val readerWriter = BufReader.writable()
/**
* A read-only handle to the internal stream of bytes, representing the
* message body. See [[com.twitter.util.Reader]] for more information.
**/
def reader: BufReader = readerWriter
/**
* A write-only handle to the internal stream of bytes, representing the
* message body. See [[com.twitter.util.Writer]] for more information.
**/
def writer: BufWriter = readerWriter
def isRequest: Boolean
def isResponse = !isRequest
def content: ChannelBuffer = getContent()
def content_=(content: ChannelBuffer) { setContent(content) }
def version: HttpVersion = getProtocolVersion()
def version_=(version: HttpVersion) { setProtocolVersion(version) }
lazy val headerMap: HeaderMap = new MessageHeaderMap(this)
// Java users: use Netty HttpHeaders interface for headers
/**
* Cookies. In a request, this uses the Cookie headers.
* In a response, it uses the Set-Cookie headers.
*/
lazy val cookies = new CookieMap(this)
// Java users: use the interface below for cookies
/** Get iterator over Cookies */
def getCookies(): JIterator[Cookie] = cookies.valuesIterator.asJava
/** Add a cookie */
def addCookie(cookie: Cookie) {
cookies += cookie
}
/** Remove a cookie */
def removeCookie(name: String) {
cookies -= name
}
/** Accept header */
def accept: Seq[String] =
Option(headers.get(HttpHeaders.Names.ACCEPT)) match {
case Some(s) => s.split(",").map(_.trim).filter(_.nonEmpty)
case None => Seq()
}
/** Set Accept header */
def accept_=(value: String) { headers.set(HttpHeaders.Names.ACCEPT, value) }
/** Set Accept header with list of values */
def accept_=(values: Iterable[String]) { accept = values.mkString(", ") }
/** Accept header media types (normalized, no parameters) */
def acceptMediaTypes: Seq[String] =
accept.map {
_.split(";", 2).headOption
.map(_.trim.toLowerCase) // media types are case-insensitive
.filter(_.nonEmpty) // skip blanks
}.flatten
/** Allow header */
def allow: Option[String] = Option(headers.get(HttpHeaders.Names.ALLOW))
/** Set Authorization header */
def allow_=(value: String) { headers.set(HttpHeaders.Names.ALLOW, value) }
/** Set Authorization header */
def allow_=(values: Iterable[HttpMethod]) { allow = values.mkString(",") }
/** Get Authorization header */
def authorization: Option[String] = Option(headers.get(HttpHeaders.Names.AUTHORIZATION))
/** Set Authorization header */
def authorization_=(value: String) { headers.set(HttpHeaders.Names.AUTHORIZATION, value) }
/** Get Cache-Control header */
def cacheControl: Option[String] = Option(headers.get(HttpHeaders.Names.CACHE_CONTROL))
/** Set Cache-Control header */
def cacheControl_=(value: String) { headers.set(HttpHeaders.Names.CACHE_CONTROL, value) }
/** Set Cache-Control header with a max-age (and must-revalidate). */
def cacheControl_=(maxAge: Duration) {
cacheControl = "max-age=" + maxAge.inSeconds.toString + ", must-revalidate"
}
/** Get charset from Content-Type header */
def charset: Option[String] = {
contentType.foreach { contentType =>
val parts = StringUtils.split(contentType, ';')
1.to(parts.length - 1) foreach { i =>
val part = parts(i).trim
if (part.startsWith("charset=")) {
val equalsIndex = part.indexOf('=')
val charset = part.substring(equalsIndex + 1)
return Some(charset)
}
}
}
None
}
/** Set charset in Content-Type header. This does not change the content. */
def charset_=(value: String) {
val contentType = this.contentType.getOrElse("")
val parts = StringUtils.split(contentType, ';')
if (parts.isEmpty) {
this.contentType = ";charset=" + value // malformed
return
}
val builder = new StringBuilder(parts(0))
if (!(parts.exists { _.trim.startsWith("charset=") })) {
// No charset parameter exist, add charset after media type
builder.append(";charset=")
builder.append(value)
// Copy other parameters
1.to(parts.length - 1) foreach { i =>
builder.append(";")
builder.append(parts(i))
}
} else {
// Replace charset= parameter(s)
1.to(parts.length - 1) foreach { i =>
val part = parts(i)
if (part.trim.startsWith("charset=")) {
builder.append(";charset=")
builder.append(value)
} else {
builder.append(";")
builder.append(part)
}
}
}
this.contentType = builder.toString
}
/** Get Content-Length header. Use length to get the length of actual content. */
def contentLength: Option[Long] =
Option(headers.get(HttpHeaders.Names.CONTENT_LENGTH)).map { _.toLong }
/** Set Content-Length header. Normally, this is automatically set by the
* Codec, but this method allows you to override that. */
def contentLength_=(value: Long) {
headers.set(HttpHeaders.Names.CONTENT_LENGTH, value.toString)
}
/** Get Content-Type header */
def contentType: Option[String] = Option(headers.get(HttpHeaders.Names.CONTENT_TYPE))
/** Set Content-Type header */
def contentType_=(value: String) { headers.set(HttpHeaders.Names.CONTENT_TYPE, value) }
/** Set Content-Type header by media-type and charset */
def setContentType(mediaType: String, charset: String = "utf-8") {
headers.set(HttpHeaders.Names.CONTENT_TYPE, mediaType + ";charset=" + charset)
}
/** Set Content-Type header to application/json;charset=utf-8 */
def setContentTypeJson() { headers.set(HttpHeaders.Names.CONTENT_TYPE, Message.ContentTypeJson) }
/** Get Date header */
def date: Option[String] = Option(headers.get(HttpHeaders.Names.DATE))
/** Set Date header */
def date_=(value: String) { headers.set(HttpHeaders.Names.DATE, value) }
/** Set Date header by Date */
def date_=(value: Date) { date = Message.httpDateFormat(value) }
/** Get Expires header */
def expires: Option[String] = Option(headers.get(HttpHeaders.Names.EXPIRES))
/** Set Expires header */
def expires_=(value: String) { headers.set(HttpHeaders.Names.EXPIRES, value) }
/** Set Expires header by Date */
def expires_=(value: Date) { expires = Message.httpDateFormat(value) }
/** Get Host header */
def host: Option[String] = Option(headers.get(HttpHeaders.Names.HOST))
/** Set Host header */
def host_=(value: String) { headers.set(HttpHeaders.Names.HOST, value) }
/** Get Last-Modified header */
def lastModified: Option[String] = Option(headers.get(HttpHeaders.Names.LAST_MODIFIED))
/** Set Last-Modified header */
def lastModified_=(value: String) { headers.set(HttpHeaders.Names.LAST_MODIFIED, value) }
/** Set Last-Modified header by Date */
def lastModified_=(value: Date) { lastModified = Message.httpDateFormat(value) }
/** Get Location header */
def location: Option[String] = Option(headers.get(HttpHeaders.Names.LOCATION))
/** Set Location header */
def location_=(value: String) { headers.set(HttpHeaders.Names.LOCATION, value) }
/** Get media-type from Content-Type header */
def mediaType: Option[String] =
contentType.flatMap { contentType =>
val beforeSemi =
contentType.indexOf(";") match {
case -1 => contentType
case n => contentType.substring(0, n)
}
val mediaType = beforeSemi.trim
if (mediaType.nonEmpty)
Some(mediaType.toLowerCase)
else
None
}
/**
* Set media-type in Content-Type header. Charset and parameter values are
* preserved, though may not be appropriate for the new media type.
*/
def mediaType_=(value: String) {
contentType match {
case Some(contentType) =>
val parts = StringUtils.split(contentType, ";", 2)
if (parts.length == 2) {
this.contentType = value + ";" + parts(1)
} else {
this.contentType = value
}
case None =>
this.contentType = value
}
}
/** Get Referer [sic] header */
def referer: Option[String] = Option(headers.get(HttpHeaders.Names.REFERER))
/** Set Referer [sic] header */
def referer_=(value: String) { headers.set(HttpHeaders.Names.REFERER, value) }
/** Get Retry-After header */
def retryAfter: Option[String] = Option(headers.get(HttpHeaders.Names.RETRY_AFTER))
/** Set Retry-After header */
def retryAfter_=(value: String) { headers.set(HttpHeaders.Names.RETRY_AFTER, value) }
/** Set Retry-After header by seconds */
def retryAfter_=(value: Long) { retryAfter = value.toString }
/** Get Server header */
def server: Option[String] = Option(headers.get(HttpHeaders.Names.SERVER))
/** Set Server header */
def server_=(value: String) { headers.set(HttpHeaders.Names.SERVER, value) }
/** Get User-Agent header */
def userAgent: Option[String] = Option(headers.get(HttpHeaders.Names.USER_AGENT))
/** Set User-Agent header */
def userAgent_=(value: String) { headers.set(HttpHeaders.Names.USER_AGENT, value) }
/** Get WWW-Authenticate header */
def wwwAuthenticate: Option[String] = Option(headers.get(HttpHeaders.Names.WWW_AUTHENTICATE))
/** Set WWW-Authenticate header */
def wwwAuthenticate_=(value: String) { headers.set(HttpHeaders.Names.WWW_AUTHENTICATE, value) }
/** Get X-Forwarded-For header */
def xForwardedFor: Option[String] = Option(headers.get("X-Forwarded-For"))
/** Set X-Forwarded-For header */
def xForwardedFor_=(value: String) { headers.set("X-Forwarded-For", value) }
/**
* Check if X-Requested-With contains XMLHttpRequest, usually signalling a
* request from a JavaScript AJAX libraries. Some servers treat these
* requests specially. For example, an endpoint might render JSON or XML
* instead HTML if it's an XmlHttpRequest. (Tip: don't do this - it's gross.)
*/
def isXmlHttpRequest = {
Option(headers.get("X-Requested-With")) exists { _.toLowerCase.contains("xmlhttprequest") }
}
/** Get length of content. */
def length: Int = getContent.readableBytes
def getLength(): Int = length
/** Get the content as a string. */
def contentString: String = {
val encoding = try {
Charset.forName(charset getOrElse "UTF-8")
} catch {
case _: Throwable => Message.Utf8
}
getContent.toString(encoding)
}
def getContentString(): String = contentString
/** Set the content as a string. */
def contentString_=(value: String) {
if (value != "")
setContent(ChannelBuffers.wrappedBuffer(value.getBytes("UTF-8")))
else
setContent(ChannelBuffers.EMPTY_BUFFER)
}
def setContentString(value: String) { contentString = value }
/**
* Use content as InputStream. The underlying channel buffer's reader
* index is advanced. (Scala interface. Java users can use getInputStream().)
*/
def withInputStream[T](f: InputStream => T): T = {
val inputStream = getInputStream()
val result = f(inputStream) // throws
inputStream.close()
result
}
/**
* Get InputStream for content. Caller must close. (Java interface. Scala
* users should use withInputStream.)
*/
def getInputStream(): InputStream =
new ChannelBufferInputStream(getContent)
/** Use content as Reader. (Scala interface. Java usrs can use getReader().) */
def withReader[T](f: Reader => T): T = {
withInputStream { inputStream =>
val reader = new InputStreamReader(inputStream)
f(reader)
}
}
/** Get Reader for content. (Java interface. Scala users should use withReader.) */
def getReader(): Reader =
new InputStreamReader(getInputStream())
/** Append string to content. */
def write(string: String) {
write(string.getBytes("UTF-8"))
}
/** Append bytes to content. */
def write(bytes: Array[Byte]) {
getContent match {
case buffer: DynamicChannelBuffer =>
buffer.writeBytes(bytes)
case _ =>
val buffer = ChannelBuffers.wrappedBuffer(bytes)
write(buffer)
}
}
/** Append ChannelBuffer to content.
*
* If `isChunked` then multiple writes must be composed using `writer` and
* `flatMap` to have the appropriate backpressure semantics.
*
* Attempting to `write` after calling `close` will result in a thrown
* [[com.twitter.util.Reader.ReaderDiscarded]].
*/
@throws(classOf[BufReader.ReaderDiscarded])
@throws(classOf[IllegalStateException])
def write(buffer: ChannelBuffer) {
if (isChunked) writeChunk(buffer) else {
getContent match {
case ChannelBuffers.EMPTY_BUFFER =>
setContent(buffer)
case content =>
setContent(ChannelBuffers.wrappedBuffer(content, buffer))
}
}
}
/**
* Use content as OutputStream. Content is replaced with stream contents.
* (Java users can use this with a Function, or use Netty's ChannelBufferOutputStream
* and then call setContent() with the underlying buffer.)
*/
def withOutputStream[T](f: OutputStream => T): T = {
// Use buffer size of 1024. Netty default is 256, which seems too small.
// Netty doubles buffers on resize.
val outputStream = new ChannelBufferOutputStream(ChannelBuffers.dynamicBuffer(1024))
val result = f(outputStream) // throws
outputStream.close()
write(outputStream.buffer)
result
}
/** Use as a Writer. Content is replaced with writer contents. */
def withWriter[T](f: Writer => T): T = {
withOutputStream { outputStream =>
val writer = new OutputStreamWriter(outputStream, Message.Utf8)
val result = f(writer)
writer.close()
// withOutputStream will write()
result
}
}
/** Clear content (set to ""). */
def clearContent() {
setContent(ChannelBuffers.EMPTY_BUFFER)
}
/** End the response stream. */
def close() = writer.write(Buf.Eof)
private[this] def writeChunk(buf: ChannelBuffer) {
if (buf.readable) {
val future = writer.write(new ChannelBufferBuf(buf))
// Unwraps the future in the Return case, or throws exception in the Throw case.
if (future.isDefined) Await.result(future)
}
}
}
object Message {
private[http] val Utf8 = Charset.forName("UTF-8")
@deprecated("Use MediaType.Json", "6.1.5")
val MediaTypeJson = "application/json"
@deprecated("Use MediaType.Javascript", "6.1.5")
val MediaTypeJavascript = "application/javascript"
@deprecated("Use MediaType.WwwForm", "6.1.5")
val MediaTypeWwwForm = "application/x-www-form-urlencoded"
val CharsetUtf8 = "charset=utf-8"
val ContentTypeJson = MediaType.Json + ";" + CharsetUtf8
val ContentTypeJavascript = MediaType.Javascript + ";" + CharsetUtf8
val ContentTypeWwwFrom = MediaType.WwwForm + ";" + CharsetUtf8
private val HttpDateFormat = FastDateFormat.getInstance("EEE, dd MMM yyyy HH:mm:ss",
TimeZone.getTimeZone("GMT"))
def httpDateFormat(date: Date): String =
HttpDateFormat.format(date) + " GMT"
}
|
JustinTulloss/finagle
|
finagle-http/src/main/scala/com/twitter/finagle/http/Message.scala
|
Scala
|
apache-2.0
| 16,054 |
package scaladex.core.model
import org.scalatest.funspec.AsyncFunSpec
import org.scalatest.matchers.should.Matchers
class LanguageTests extends AsyncFunSpec with Matchers {
describe("Scala 3 versions") {
it("Scala 3 label") {
Scala.`3`.label shouldBe "3.x"
}
it("should not accept minor versions") {
Scala(MinorVersion(3, 0)).isValid shouldBe false
}
it("should not accept patch versions") {
Scala(PatchVersion(3, 0, 1)).isValid shouldBe false
}
}
}
|
scalacenter/scaladex
|
modules/core/shared/src/test/scala/scaladex/core/model/LanguageTests.scala
|
Scala
|
bsd-3-clause
| 500 |
/***********************************************************************
* Copyright (c) 2013-2015 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0 which
* accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import com.typesafe.scalalogging.slf4j.Logging
import com.vividsolutions.jts.geom.Polygon
import org.apache.accumulo.core.client.mock.MockInstance
import org.apache.accumulo.core.client.security.tokens.PasswordToken
import org.geotools.data.simple.SimpleFeatureStore
import org.geotools.data.{DataUtilities, Query}
import org.geotools.filter.text.ecql.ECQL
import org.joda.time.{DateTime, DateTimeZone, Interval}
import org.junit.runner.RunWith
import org.locationtech.geomesa.accumulo._
import org.locationtech.geomesa.accumulo.data.AccumuloDataStoreFactory
import org.locationtech.geomesa.accumulo.index.IndexSchema
import org.locationtech.geomesa.accumulo.iterators.TestData._
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.GenSeq
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class MultiIteratorTest extends Specification with Logging {
sequential
object IteratorTest {
def setupMockFeatureSource(entries: GenSeq[TestData.Entry], tableName: String = "test_table"): SimpleFeatureStore = {
val mockInstance = new MockInstance("dummy")
val c = mockInstance.getConnector("user", new PasswordToken("pass".getBytes))
// Remember we need to delete all 4 tables now
List(
tableName,
s"${tableName}_${TestData.featureType.getTypeName}_st_idx",
s"${tableName}_${TestData.featureType.getTypeName}_records",
s"${tableName}_${TestData.featureType.getTypeName}_attr_idx"
).foreach { t => if (c.tableOperations.exists(t)) c.tableOperations.delete(t) }
val dsf = new AccumuloDataStoreFactory
import org.locationtech.geomesa.accumulo.data.AccumuloDataStoreFactory.params._
val ds = dsf.createDataStore(Map(
zookeepersParam.key -> "dummy",
instanceIdParam.key -> "dummy",
userParam.key -> "user",
passwordParam.key -> "pass",
authsParam.key -> "S,USA",
tableNameParam.key -> tableName,
mockParam.key -> "true"))
ds.createSchema(TestData.featureType)
val fs = ds.getFeatureSource(TestData.featureName).asInstanceOf[SimpleFeatureStore]
val dataFeatures = entries.par.map(createSF)
val featureCollection = DataUtilities.collection(dataFeatures.toArray)
fs.addFeatures(featureCollection)
fs.getTransaction.commit()
fs
}
}
def getQuery(ecqlFilter: Option[String] = None,
dtFilter: Interval = null,
overrideGeometry: Boolean = false,
indexIterator: Boolean = false): Query = {
val polygon: Polygon = overrideGeometry match {
case true => IndexSchema.everywhere
case false => WKTUtils.read(TestData.wktQuery).asInstanceOf[Polygon]
}
val gf = s"INTERSECTS(geom, ${polygon.toText})"
val dt: Option[String] = Option(dtFilter).map(int =>
s"(dtg between '${int.getStart}' AND '${int.getEnd}')"
)
def red(f: String, og: Option[String]) = og match {
case Some(g) => s"$f AND $g"
case None => f
}
val tfString = red(red(gf, dt), ecqlFilter)
val tf = ECQL.toFilter(tfString)
if (indexIterator) {
// select a few attributes to trigger the IndexIterator
val outputAttributes = Array("geom", "dtg")
new Query(TestData.featureType.getTypeName, tf, outputAttributes)
} else {
new Query(TestData.featureType.getTypeName, tf)
}
}
"Mock Accumulo with fullData" should {
val fs = IteratorTest.setupMockFeatureSource(TestData.fullData, "mock_full_data")
val features = TestData.fullData.map(createSF)
"return the same result for our iterators" in {
val q = getQuery(None)
val indexOnlyQuery = getQuery(indexIterator = true)
val filteredCount = features.count(q.getFilter.evaluate)
val stQueriedCount = fs.getFeatures(q).size
val indexOnlyCount = fs.getFeatures(indexOnlyQuery).size
logger.debug(s"Filter: ${q.getFilter} queryCount: $stQueriedCount filteredCount: $filteredCount indexOnlyCount: $indexOnlyCount")
indexOnlyCount mustEqual filteredCount
stQueriedCount mustEqual filteredCount
}
"return a full results-set" in {
val filterString = "true = true"
val q = getQuery(Some(filterString))
val indexOnlyQuery = getQuery(Some(filterString), indexIterator = true)
val filteredCount = features.count(q.getFilter.evaluate)
val stQueriedCount = fs.getFeatures(q).size
val indexOnlyCount = fs.getFeatures(indexOnlyQuery).size
logger.debug(s"Filter: ${q.getFilter} queryCount: $stQueriedCount filteredCount: $filteredCount indexOnlyCount: $indexOnlyCount")
// validate the total number of query-hits
indexOnlyCount mustEqual filteredCount
stQueriedCount mustEqual filteredCount
}
"return a partial results-set" in {
val filterString = """(attr2 like '2nd___')"""
val fs = IteratorTest.setupMockFeatureSource(TestData.fullData, "mock_attr_filt")
val features = TestData.fullData.map(createSF)
val q = getQuery(Some(filterString))
val indexOnlyQuery = getQuery(Some(filterString), indexIterator = true)
val filteredCount = features.count(q.getFilter.evaluate)
val stQueriedCount = fs.getFeatures(q).size
val indexOnlyCount = fs.getFeatures(indexOnlyQuery).size
logger.debug(s"Filter: ${q.getFilter} queryCount: $stQueriedCount filteredCount: $filteredCount indexOnlyCount: $indexOnlyCount")
// validate the total number of query-hits
indexOnlyCount mustEqual filteredCount
stQueriedCount mustEqual filteredCount
}
}
"Mock Accumulo with a small table" should {
"cover corner cases" in {
val fs = IteratorTest.setupMockFeatureSource(TestData.shortListOfPoints, "mock_small_corner_cases")
val features = TestData.shortListOfPoints.map(createSF)
val q = getQuery(None)
val indexOnlyQuery = getQuery(None, indexIterator = true)
val filteredCount = features.count(q.getFilter.evaluate)
val stQueriedCount = fs.getFeatures(q).size
val indexOnlyCount = fs.getFeatures(indexOnlyQuery).size
logger.debug(s"Filter: ${q.getFilter} queryCount: $stQueriedCount filteredCount: $filteredCount indexOnlyCount: $indexOnlyCount")
// validate the total number of query-hits
// Since we are playing with points, we can count **exactly** how many results we should
// get back. This is important to check corner cases.
indexOnlyCount mustEqual filteredCount
stQueriedCount mustEqual filteredCount
}
}
"Realistic Mock Accumulo" should {
"handle edge intersection false positives" in {
val fs = IteratorTest.setupMockFeatureSource(TestData.shortListOfPoints ++ TestData.geohashHitActualNotHit, "mock_small")
val features = (TestData.shortListOfPoints ++ TestData.geohashHitActualNotHit).map(createSF)
val q = getQuery(None)
val indexOnlyQuery = getQuery(None, indexIterator = true)
val filteredCount = features.count(q.getFilter.evaluate)
val stQueriedCount = fs.getFeatures(q).size
val indexOnlyCount = fs.getFeatures(indexOnlyQuery).size
logger.debug(s"Filter: ${q.getFilter} queryCount: $stQueriedCount filteredCount: $filteredCount indexOnlyCount: $indexOnlyCount")
// validate the total number of query-hits
indexOnlyCount mustEqual filteredCount
stQueriedCount mustEqual filteredCount
}
}
"Large Mock Accumulo" should {
val fs = IteratorTest.setupMockFeatureSource(TestData.hugeData, "mock_huge")
val features = TestData.hugeData.map(createSF)
"return a partial results-set with a meaningful attribute-filter" in {
val filterString = "(not " + DEFAULT_DTG_PROPERTY_NAME +
" after 2010-08-08T23:59:59Z) and (not dtg_end_time before 2010-08-08T00:00:00Z)"
val q = getQuery(Some(filterString))
val indexOnlyQuery = getQuery(Some(filterString), indexIterator = true)
val filteredCount = features.count(q.getFilter.evaluate)
val stQueriedCount = fs.getFeatures(q).size
val indexOnlyCount = fs.getFeatures(indexOnlyQuery).size
logger.debug(s"Filter: ${q.getFilter} queryCount: $stQueriedCount filteredCount: $filteredCount indexOnlyCount: $indexOnlyCount")
// validate the total number of query-hits
indexOnlyCount mustEqual filteredCount
stQueriedCount mustEqual filteredCount
}
"return a filtered results-set with a meaningful time-range" in {
val filterString = "true = true"
val dtFilter = new Interval(
new DateTime(2010, 8, 8, 0, 0, 0, DateTimeZone.forID("UTC")),
new DateTime(2010, 8, 8, 23, 59, 59, DateTimeZone.forID("UTC"))
)
val fs = IteratorTest.setupMockFeatureSource(TestData.hugeData, "mock_huge_time")
val features = TestData.hugeData.map(createSF)
val q = getQuery(Some(filterString), dtFilter)
val indexOnlyQuery = getQuery(Some(filterString), dtFilter, indexIterator = true)
val filteredCount = features.count(q.getFilter.evaluate)
val stQueriedCount = fs.getFeatures(q).size
val indexOnlyCount = fs.getFeatures(indexOnlyQuery).size
logger.debug(s"Filter: ${q.getFilter} queryCount: $stQueriedCount filteredCount: $filteredCount indexOnlyCount: $indexOnlyCount")
// validate the total number of query-hits
indexOnlyCount mustEqual filteredCount
stQueriedCount mustEqual filteredCount
}
"return a filtered results-set with a degenerate time-range" in {
val filterString = "true = true"
val dtFilter = IndexSchema.everywhen
val q = getQuery(Some(filterString), dtFilter)
val indexOnlyQuery = getQuery(Some(filterString), dtFilter, indexIterator = true)
val filteredCount = features.count(q.getFilter.evaluate)
val stQueriedCount = fs.getFeatures(q).size
val indexOnlyCount = fs.getFeatures(indexOnlyQuery).size
logger.debug(s"Filter: ${q.getFilter} queryCount: $stQueriedCount filteredCount: $filteredCount indexOnlyCount: $indexOnlyCount")
// validate the total number of query-hits
indexOnlyCount mustEqual filteredCount
stQueriedCount mustEqual filteredCount
}
"return an unfiltered results-set with a global request" in {
val dtFilter = IndexSchema.everywhen
val q = getQuery(None, dtFilter, overrideGeometry = true)
val indexOnlyQuery = getQuery(None, dtFilter, overrideGeometry = true, indexIterator = true)
val filteredCount = features.count(q.getFilter.evaluate)
val stQueriedCount = fs.getFeatures(q).size
val indexOnlyCount = fs.getFeatures(indexOnlyQuery).size
logger.debug(s"Filter: ${q.getFilter} queryCount: $stQueriedCount filteredCount: $filteredCount indexOnlyCount: $indexOnlyCount")
// validate the total number of query-hits
indexOnlyCount mustEqual filteredCount
stQueriedCount mustEqual filteredCount
}
}
}
|
drackaer/geomesa
|
geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/iterators/MultiIteratorTest.scala
|
Scala
|
apache-2.0
| 11,570 |
package edu.osu.cse.groenkeb.logic.proof.rules
import edu.osu.cse.groenkeb.logic._
import edu.osu.cse.groenkeb.logic.proof._
trait Rule {
/**
* True if this rule can accept the given sentence as a major premise for inference, false otherwise.
* Rules that do not define a major premise should return false on all inputs.
*/
def major(sentence: Sentence): Boolean
/**
* True if this rule yields the given sentence as a conclusion, false otherwise.
*/
def yields(sentence: Sentence): Boolean
/**
* Returns the RuleParams necessary for inference given the conclusion ("goal") of the current ProofContext and the
* optional major premise of the rule. If this rule does not define a major premise, the supplied argument
* should be None. Implementations should return None for all conclusion/major patterns that are not defined
* the Rule.
*/
def params(major: Option[Sentence] = None)(implicit context: ProofContext): Option[RuleParams]
/**
* Returns a Proof with the current goal of the given ProofContext as the conclusion and the arguments supplied
* in 'args' as the premises, or None if the given arguments do not satisfy this Rule's parameters for inference.
*/
def infer(args: RuleArgs)(implicit context: ProofContext): Option[Proof]
}
|
bgroenks96/AutoMoL
|
core/src/main/scala/edu/osu/cse/groenkeb/logic/proof/rules/Rule.scala
|
Scala
|
mit
| 1,303 |
package com._3tierlogic.KinesisManager
import ch.qos.logback.classic.LoggerContext
import ch.qos.logback.classic.joran.JoranConfigurator
import ch.qos.logback.core.joran.spi.JoranException
import ch.qos.logback.core.util.StatusPrinter
import org.slf4j.Logger
import org.slf4j.LoggerFactory
/** '''Provide Logging Handle to Root Class'''
*
* @author Eric Kolotyluk
*/
trait LogbackLogging {
// This statement is actually here to force the LogbackLogging object to instantiate first,
// as this causes the status of the logback environment to be logged before anything else
// is logged. Isn't side effect programming wonderful :-)
if (LogbackLogging.loggerContext == null) println("LogbackLogging.loggerContext == null")
val logger = LoggerFactory.getLogger(getClass);
}
/** '''Singleton Logging Configuration'''
*
* Log the current logging environment as if we were in debug mode. This is especially useful
* when troubleshooting, or reverse engineering code, and trying to understand the logging
* environment.
*
* @author Eric Kolotyluk
*
* @see [[http://logback.qos.ch/manual/configuration.html LogBack Configuration]]
*
*/
object LogbackLogging {
// assume SLF4J is bound to logback in the current environment
val loggerContext = LoggerFactory.getILoggerFactory().asInstanceOf[LoggerContext]
// print logback's internal status
StatusPrinter.print(loggerContext)
}
|
3tierlogic/kinesis-manager
|
kinesis-common/src/main/scala/com/_3tierlogic/KinesisManager/LogbackLogging.scala
|
Scala
|
apache-2.0
| 1,422 |
/* ____ __ ____ ____ ____,,___ ____ __ __ ____
* ( _ \\ /__\\ (_ )(_ _)( ___)/ __) ( _ \\( )( )( _ \\ Read
* ) / /(__)\\ / /_ _)(_ )__) \\__ \\ )___/ )(__)( ) _ < README.txt
* (_)\\_)(__)(__)(____)(____)(____)(___/ (__) (______)(____/ LICENSE.txt
*/
package razie.diesel.engine
import api.dwix
import org.bson.types.ObjectId
import razie.diesel.Diesel
import razie.diesel.dom.RDOM.{P, ParmSource}
import razie.diesel.dom.RDomain
import razie.diesel.engine.nodes.EVal
import razie.diesel.expr.{ECtx, SimpleECtx}
import razie.diesel.model.DieselMsg
import razie.hosting.{RkReactors, Website}
import razie.tconf.{DSpec, DUsers}
import razie.wiki.{Config, Services}
import scala.collection.JavaConverters.propertiesAsScalaMapConverter
/** specific root context for an engine instance
*
* it can keep unique IDs and such, to help with tests ran continuously
*/
class DomEngECtx(val settings: DomEngineSettings, cur: List[P] = Nil, base: Option[ECtx] = None)
extends SimpleECtx(cur, base, None) {
var overwritten: Option[ECtx] = None
var persisted: Boolean = false
var engine: Option[DomEngine] = None
override def name = "DomEngCtx"
/** NOT for moving from engine to engine */
def withEngine(e: DomEngine) = {
this.engine = Some(e);
this
}
def withSpecs(s: List[DSpec]) = {
_specs = s ::: _specs
this
}
def withDomain(r: RDomain) = {
_domain = Some(r)
this
}
override def apply(name: String): String =
overwritten
.map(_.apply(name))
// todo not call ps to parse random payloads
// have a separate expression for body access or are we adding each in ctx ?
// .orElse(ps(name).map(_.currentStringValue))
.orElse(pu(name).map(_.currentStringValue))
.getOrElse(super.apply(name))
override def getp(name: String): Option[P] =
if(name.length > 0)
overwritten
.flatMap(_.getp(name))
// todo not call ps to parse random payloads
// have a separate expression for body access or are we adding each in ctx ?
// .orElse(ps(name))
.orElse(pu(name))
.orElse(super.getp(name))
else None
override def put(p: P) {
overwritten.map(_.put(p)).getOrElse(super.put(p))
}
override def putAll(p: List[P]) {
overwritten.map(_.putAll(p)).getOrElse(super.putAll(p))
}
override def root = overwritten.map(_.root).getOrElse(super.root)
override def exists(f: scala.Function1[P, scala.Boolean]): scala.Boolean =
overwritten
.map(_.exists(f))
// todo remove use of postedContent - what the heck, see ps() as well
// .orElse(settings.postedContent.map(_.exists(f)))
.getOrElse(super.exists(f))
/** set some of the unique values to help rerun tests */
def setu (p:P) : Boolean = p.name match {
case "diesel.settings.mockQuery" => true
case _ => false
}
// figure out the environment for this user
// todo once the engine starts, store the start value in settings - it can't change and we should store it,m if it's not overwritten...
def dieselEnv(ctx:ECtx) = {
val settings = ctx.root.engine.map(_.settings)
val au = settings
.flatMap(_.userId)
.map(new ObjectId(_))
.flatMap(DUsers.impl.findUserById)
val ret = dwix.dieselEnvFor(settings.flatMap(_.realm).mkString, au)
ret
}
// user id if any
def dieselUser(ctx: ECtx) = {
val au = ctx.root.engine.map(_.settings).flatMap(_.userId)
au.mkString
}
// user id if any
def dieselUsername(ctx: ECtx) = {
val au = ctx.root.engine.map(_.settings).flatMap(_.userId)
val x = au.flatMap(id => DUsers.impl.findUserById(new ObjectId(id))).map(_.userName)
x.mkString
}
/** source from settings - only if there's some value... otherwise base won't cascade */
private def ps(name: String): Option[P] =
settings.postedContent.filter(_.body.length > 0).flatMap(_.getp(name))
/** used for instance when persisting a context - will overwrite the default */
def overwrite(ctx: ECtx): Unit =
if (this != ctx)
overwritten = Some(ctx)
/** reset this engine's values */
override def clear: Unit = {
this.attrs = Nil
this.overwritten.foreach(_.clear)
this.base.foreach(_.clear)
}
override def toString = this.getClass.getSimpleName + ":cur==" +
cur.mkString(",") + ":attrs==" + attrs.mkString(",") //+ base.map(_.toString).mkString
}
/** source for parms starting with "diesel"
*
* @param ctx is the root context of this engine
*/
class DieselParmSource (ctx:DomEngECtx) extends ParmSource {
def name = "diesel"
def remove(name: String): Option[P] = ???
def getp(name: String): Option[P] = name match {
case "env" =>
// first overrides, then settings and lastly current envList setting
None
.orElse(ctx.settings.env.map(P("diesel.env", _)))
.orElse(Some(P("diesel.env", ctx.dieselEnv(ctx))))
case "user" => Some(P("diesel.user", ctx.dieselUser(ctx)))
case "username" => Some(P("diesel.username", ctx.dieselUsername(ctx)))
case "isLocalhost" => Some(P.fromTypedValue("diesel.isLocalhost", razie.wiki.Services.config.isLocalhost))
// todo deprecated, remove - search in all realms
case "realm.props" | "props.realm" => {
val p = Website.getRealmProps(ctx.root.settings.realm.mkString)
Some(P.fromTypedValue("diesel.realm.props", p))
}
case "props" => {
next("diesel.props", Map(
"system" -> (n => {
val m = if (Config.isLocalhost) {
System.getProperties.asScala
} else {
throw new IllegalArgumentException("Error: No permission")
}
Left(P.fromSmartTypedValue("diesel.props.system", m))
}),
"realm" -> (n => Right(new DieselRealmParmSource(ctx)))
))
}
case "realm" => {
next("diesel.realm", Map(
"name" -> (n => Left(P.fromSmartTypedValue("diesel.realm.name", ctx.settings.realm.mkString))),
"local" -> {
val p = if (Services.config.isLocalhost)
P.fromSmartTypedValue("diesel.realm.local", RkReactors.forHost(Services.config.simulateHost).mkString)
else P.undefined("diesel.realm.local")
(n => Left(p))
},
"props" -> (n => Right(new DieselRealmParmSource(ctx)))
))
}
case "server" => Some(P.fromSmartTypedValue("diesel.server", Map(
"node" -> Config.node,
"host" -> java.net.InetAddress.getLocalHost.getCanonicalHostName,
"hostName" -> java.net.InetAddress.getLocalHost.getHostName,
"ip" -> java.net.InetAddress.getLocalHost.getHostAddress
)))
// case "engine" => {
// next("diesel.engine", Map(
// "description" -> (n => Left(
// P(DieselMsg.ENGINE.DIESEL_ENG_DESC, ctx.engine.map(_.description).mkString)
// ))
// ))
// }
case _ => None
}
def next(name: String, values: Map[String, String => Either[P, ParmSource]]) =
Some(P.fromTypedValue(name, new NextDParmSource(ctx, name, values)))
def put(p: P): Unit = p.name match {
// for "env" we could override locally only
case _ => throw new DieselException("Can't overwrite values in this context!")
}
def listAttrs: List[P] = Nil
}
/** source for parms static at realm level */
class DieselCtxParmSource(val name: String, ctx: ECtx, origCtx: ECtx) extends ParmSource {
def remove(name: String): Option[P] = ctx.remove(name)
def getp(name: String): Option[P] = ctx.getp(name)
def put(p: P): Unit = {
if ("dieselScope" == name) {
// remove any overload from all contexts until the scope
origCtx.allToScope.foreach(
_.remove(p.name)
)
// scope vars are set in the closest enclosing ScopeECtx or EngCtx
// the idea is to bypass the enclosing RuleScopeECtx
}
ctx.put(p)
}
def listAttrs: List[P] = ctx.listAttrs
}
/** source for parms static at realm level */
class DieselRealmParmSource(ctx: DomEngECtx) extends ParmSource {
val realm = ctx.root.settings.realm.mkString
def parms = {
Website.getRealmProps(realm)
}
def name = "diesel.realm.props"
def remove(name: String): Option[P] = ???
def getp(name: String): Option[P] = name match {
case _ => {
val p = parms
p.get(name)
}
}
def put(p: P): Unit = {
Website.putRealmProps(realm, p.name, p.calculatedP(ctx))
Website.forRealm(realm).map(_.put(p.name, p.calculatedValue(ctx)))
}
def listAttrs: List[P] = {
val p = parms
p.values.toList
}
}
/** todo hierarchical source for objects? */
class NextParmSource(ctx: DomEngECtx, pname: String, value: P) extends ParmSource {
def name = pname
def remove(name: String): Option[P] = ???
def getp(name: String): Option[P] = name match {
case pname => Some(value)
case _ => None
}
def put(p: P): Unit = ???
def listAttrs: List[P] = ???
}
/** todo hierarchical source for objects? */
class NextDParmSource(ctx: ECtx, pname: String, values: Map[String, String => Either[P, ParmSource]]) extends
ParmSource {
def name = pname
def getp(name: String): Option[P] = values.get(name).flatMap(_.apply(name) match {
case Left(p) => Some(p)
case Right(ps) => Some(P.fromSmartTypedValue(pname + "." + name, ps))
})
def remove(name: String): Option[P] = ???
def put(p: P): Unit = ???
def listAttrs: List[P] = ???
}
|
razie/diesel-hydra
|
diesel/src/main/scala/razie/diesel/engine/DomEngECtx.scala
|
Scala
|
apache-2.0
| 9,472 |
package com.airbnb.common.ml.strategy.eval
import scala.util.Try
import org.apache.spark.rdd.RDD
case class BinaryMetrics(
posCount: Int,
negCount: Int,
posSugHigher: Int,
posSugLower: Int,
negSugHigher: Int,
negSugLower: Int,
increasePrecision: Double,
increaseRecall: Double,
decreasePrecision: Double,
decreaseRecall: Double,
trueRegret: Double,
trueRegretMedian: Double,
trueRegret75Percentile: Double,
falseRegret: Double,
trueIncreaseMagnitude: Double,
trueDecreaseMagnitude: Double,
falseDecreaseMagnitude: Double,
falseIncreaseMagnitude: Double,
trueDecreaseSum: Double,
trueIncreaseSum: Double,
falseDecreaseSum: Double,
falseIncreaseSum: Double
) {
def toTSVRow: String = {
Vector(
posCount, negCount, posSugHigher, posSugLower, negSugHigher, negSugLower, // raw counts
// precision-recall
increasePrecision, increaseRecall, decreasePrecision, decreaseRecall,
trueRegret, trueRegretMedian, trueRegret75Percentile, falseRegret,
trueIncreaseMagnitude,
trueDecreaseMagnitude,
falseDecreaseMagnitude,
falseIncreaseMagnitude // magnitude metrics
).mkString("\t")
}
override def toString: String = {
Vector(
// save to database
toTSVRow,
// 4 additional magnitude metrics
trueDecreaseSum, trueIncreaseSum, falseDecreaseSum, falseIncreaseSum,
// 2 loss metrics
falseIncreaseSum / negSugHigher,
trueDecreaseSum / posSugLower
).mkString("\t")
}
// For ease of printing with field names
def toArray: Array[(String, Any)] = {
this.getClass
.getDeclaredFields
.map(_.getName) // all field names
.zip(this.productIterator.to)
}
def +(that: BinaryMetrics): BinaryMetrics = {
BinaryMetrics(
this.posCount + that.posCount,
this.negCount + that.negCount,
this.posSugHigher + that.posSugHigher,
this.posSugLower + that.posSugLower,
this.negSugHigher + that.negSugHigher,
this.negSugLower + that.negSugLower,
// metrics can't be added
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
this.trueDecreaseSum + that.trueDecreaseSum,
this.trueIncreaseSum + that.trueIncreaseSum,
this.falseDecreaseSum + that.falseDecreaseSum,
this.falseIncreaseSum + that.falseIncreaseSum
)
}
def recompute: BinaryMetrics = {
val lowerCount = posSugLower + negSugLower
val higherCount = posSugHigher + negSugHigher
BinaryMetrics(
posCount = posCount,
negCount = negCount,
posSugHigher = posSugHigher,
posSugLower = posSugLower,
negSugHigher = negSugHigher,
negSugLower = negSugLower,
increasePrecision = BinaryMetrics.safeDiv(posSugHigher, higherCount),
increaseRecall = BinaryMetrics.safeDiv(posSugHigher, posCount),
decreasePrecision = BinaryMetrics.safeDiv(negSugLower, lowerCount),
decreaseRecall = BinaryMetrics.safeDiv(negSugLower, negCount),
trueRegret = BinaryMetrics.safeDiv(trueDecreaseSum, posCount),
trueRegretMedian = trueRegretMedian,
trueRegret75Percentile = trueRegret75Percentile,
falseRegret = BinaryMetrics.safeDiv(falseIncreaseSum, negCount),
trueIncreaseMagnitude = BinaryMetrics.safeDiv(trueIncreaseSum, posSugHigher),
trueDecreaseMagnitude = BinaryMetrics.safeDiv(trueDecreaseSum, posSugLower),
falseDecreaseMagnitude = BinaryMetrics.safeDiv(falseDecreaseSum, negSugLower),
falseIncreaseMagnitude = BinaryMetrics.safeDiv(falseIncreaseSum, negSugHigher),
trueDecreaseSum,
trueIncreaseSum,
falseDecreaseSum,
falseIncreaseSum
)
}
}
object BinaryMetrics {
final val metricNames: Seq[String] =
Vector(
"posCount", "negCount", "posSugHigher", "posSugLower", "negSugHigher", "negSugLower",
"increasePrecision", "increaseRecall", "decreasePrecision", "decreaseRecall",
"trueRegret", "trueRegretMedian", "trueRegret75Percentile", "falseRegret",
"trueIncreaseMagnitude",
"trueDecreaseMagnitude",
"falseDecreaseMagnitude",
"falseIncreaseMagnitude"
)
final val metricsHeader: String = metricNames.mkString("\t")
/**
* Compute our evaluation metrics for a set of prediction results.
*
* @param results prediction results to eval: (label, predictionLower) -> (count, sum)
* @param trueRegretMedian median true regret score
* @param trueRegret75Percentile 75th percentil true regret
* @return a populated BinaryMetrics instance
*/
def computeEvalMetricFromCounts(
results: Map[(Boolean, Boolean), (Int, Double)],
trueRegretMedian: Double,
trueRegret75Percentile: Double
): BinaryMetrics = {
val posSugHigher: Int = Try(results((true, false))._1).getOrElse(0)
val posSugLower: Int = Try(results((true, true))._1).getOrElse(0)
val negSugHigher: Int = Try(results((false, false))._1).getOrElse(0)
val negSugLower: Int = Try(results((false, true))._1).getOrElse(0)
val posCount: Int = posSugHigher + posSugLower
val negCount: Int = negSugHigher + negSugLower
val lowerCount: Int = posSugLower + negSugLower
val higherCount: Int = posSugHigher + negSugHigher
val trueDecreaseSum: Double = Try(results((true, true))._2).getOrElse(0.0)
val trueIncreaseSum: Double = Try(results((true, false))._2).getOrElse(0.0)
val falseDecreaseSum: Double = Try(results((false, true))._2).getOrElse(0.0)
val falseIncreaseSum: Double = Try(results((false, false))._2).getOrElse(0.0)
BinaryMetrics(
posCount = posCount,
negCount = negCount,
posSugHigher = posSugHigher,
posSugLower = posSugLower,
negSugHigher = negSugHigher,
negSugLower = negSugLower,
increasePrecision = safeDiv(posSugHigher, higherCount),
increaseRecall = safeDiv(posSugHigher, posCount),
decreasePrecision = safeDiv(negSugLower, lowerCount),
decreaseRecall = safeDiv(negSugLower, negCount),
trueRegret = safeDiv(trueDecreaseSum, posCount),
trueRegretMedian = trueRegretMedian,
trueRegret75Percentile = trueRegret75Percentile,
falseRegret = safeDiv(falseIncreaseSum, negCount),
trueIncreaseMagnitude = safeDiv(trueIncreaseSum, posSugHigher),
trueDecreaseMagnitude = safeDiv(trueDecreaseSum, posSugLower),
falseDecreaseMagnitude = safeDiv(falseDecreaseSum, negSugLower),
falseIncreaseMagnitude = safeDiv(falseIncreaseSum, negSugHigher),
trueDecreaseSum = trueDecreaseSum,
trueIncreaseSum = trueIncreaseSum,
falseDecreaseSum = falseDecreaseSum,
falseIncreaseSum = falseIncreaseSum
)
}
def combineEvalMetricFromRDD(data: RDD[BinaryMetrics]): BinaryMetrics = {
val metrics = data.reduce((a, b) => {
a + b
})
metrics.recompute
}
def safeDiv(numerator: Double, denominator: Double): Double = {
if (denominator == 0) {
0
} else {
numerator / denominator
}
}
}
|
TDDFT/aerosolve
|
airlearner/airlearner-strategy/src/main/scala/com/airbnb/common/ml/strategy/eval/BinaryMetrics.scala
|
Scala
|
apache-2.0
| 7,082 |
package com.atomist.rug.kind.json
import com.atomist.rug.kind.core.ProjectMutableView
import com.atomist.source.{EmptyArtifactSource, SimpleFileBasedArtifactSource, StringFileArtifact}
import com.atomist.tree.content.text.TextTreeNodeLifecycle
import com.atomist.tree.pathexpression.PathExpressionEngine
import com.atomist.tree.{ContainerTreeNode, MutableTreeNode}
import org.scalatest.{FlatSpec, Matchers}
class JsonMutableViewTest extends FlatSpec with Matchers {
import JsonParserTest._
import com.atomist.tree.pathexpression.PathExpressionParser._
val jsonParser = (new JsonType).parser
it should "parse and find node in root" in {
val f = StringFileArtifact("glossary.json", Simple)
val proj = SimpleFileBasedArtifactSource(f)
val pmv = new ProjectMutableView(EmptyArtifactSource(""), proj)
val fmv = pmv.findFile("glossary.json")
val cheatyPosNode = jsonParser.parse(f.content).get
val cheatyNode = TextTreeNodeLifecycle.makeReady("json", Seq(cheatyPosNode), fmv).head
val j = new JsonMutableView(f, pmv, cheatyNode)
j.nodeTags.contains("Json") should be (true)
assert(j.childrenNamed("glossary").size === 1)
}
it should "support path find" in {
val ee = new PathExpressionEngine
val expr = "/glossary/GlossDiv/title"
val f = StringFileArtifact("glossary.json", Simple)
val proj = SimpleFileBasedArtifactSource(f)
val pmv = new ProjectMutableView(EmptyArtifactSource(""), proj)
val fmv = pmv.findFile("glossary.json")
val cheatyPosNode = jsonParser.parse(f.content).get
val cheatyNode = TextTreeNodeLifecycle.makeReady("json", Seq(cheatyPosNode), fmv).head
val j = new JsonMutableView(f, pmv, cheatyNode)
val rtn = ee.evaluate(j, expr)
assert(rtn.right.get.size === 1)
assert(rtn.right.get.head.asInstanceOf[ContainerTreeNode].childrenNamed("STRING").head.value === "S")
}
it should "update path find" in {
val ee = new PathExpressionEngine
val expr = "/glossary/GlossDiv/GlossList/GlossEntry/GlossSee"
val f = StringFileArtifact("glossary.json", Simple)
val proj = SimpleFileBasedArtifactSource(f)
val pmv = new ProjectMutableView(EmptyArtifactSource(""), proj)
val fmv = pmv.findFile("glossary.json")
val cheatyPosNode = jsonParser.parse(f.content).get
val cheatyNode = TextTreeNodeLifecycle.makeReady("json", Seq(cheatyPosNode), fmv).head
val j = new JsonMutableView(f, pmv, cheatyNode)
val rtn = ee.evaluate(j, expr)
assert(rtn.right.get.size === 1)
val target = rtn.right.get.head.asInstanceOf[ContainerTreeNode].childrenNamed("STRING").head.asInstanceOf[MutableTreeNode]
assert(target.value === "markup")
target.update("XSLT")
assert(pmv.findFile("glossary.json").content === Simple.replace("\\"markup", "\\"XSLT"))
}
it should "find descendant in project" in {
val ee = new PathExpressionEngine
val expr = "/src/main/resources//Json()//GlossSee"
val f = StringFileArtifact("src/main/resources/glossary.json", Simple)
val proj = SimpleFileBasedArtifactSource(f)
val pmv = new ProjectMutableView(EmptyArtifactSource(""), proj)
val rtn = ee.evaluate(pmv, expr)
assert(rtn.right.get.size === 1)
val x = rtn.right.get.head.asInstanceOf[ContainerTreeNode]
assert(x.nodeName === "GlossSee")
val target = x.childrenNamed("STRING").head.asInstanceOf[MutableTreeNode]
assert(target.value === "markup")
target.update("XSLT")
//j.value should equal(Simple.replace("\\"markup", "\\"XSLT"))
}
}
|
atomist/rug
|
src/test/scala/com/atomist/rug/kind/json/JsonMutableViewTest.scala
|
Scala
|
gpl-3.0
| 3,517 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.clustering
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.annotation.Since
import org.apache.spark.internal.Logging
import org.apache.spark.ml.clustering.{KMeans => NewKMeans}
import org.apache.spark.ml.util.Instrumentation
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.mllib.linalg.BLAS.{axpy, scal}
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.Utils
import org.apache.spark.util.random.XORShiftRandom
/**
* K-means clustering with a k-means++ like initialization mode
* (the k-means|| algorithm by Bahmani et al).
*
* This is an iterative algorithm that will make multiple passes over the data, so any RDDs given
* to it should be cached by the user.
*/
@Since("0.8.0")
class KMeans private (
private var k: Int,
private var maxIterations: Int,
private var runs: Int,
private var initializationMode: String,
private var initializationSteps: Int,
private var epsilon: Double,
private var seed: Long) extends Serializable with Logging {
/**
* Constructs a KMeans instance with default parameters: {k: 2, maxIterations: 20, runs: 1,
* initializationMode: "k-means||", initializationSteps: 5, epsilon: 1e-4, seed: random}.
*/
@Since("0.8.0")
def this() = this(2, 20, 1, KMeans.K_MEANS_PARALLEL, 5, 1e-4, Utils.random.nextLong())
/**
* Number of clusters to create (k).
*/
@Since("1.4.0")
def getK: Int = k
/**
* Set the number of clusters to create (k). Default: 2.
*/
@Since("0.8.0")
def setK(k: Int): this.type = {
require(k > 0,
s"Number of clusters must be positive but got ${k}")
this.k = k
this
}
/**
* Maximum number of iterations allowed.
*/
@Since("1.4.0")
def getMaxIterations: Int = maxIterations
/**
* Set maximum number of iterations allowed. Default: 20.
*/
@Since("0.8.0")
def setMaxIterations(maxIterations: Int): this.type = {
require(maxIterations >= 0,
s"Maximum of iterations must be nonnegative but got ${maxIterations}")
this.maxIterations = maxIterations
this
}
/**
* The initialization algorithm. This can be either "random" or "k-means||".
*/
@Since("1.4.0")
def getInitializationMode: String = initializationMode
/**
* Set the initialization algorithm. This can be either "random" to choose random points as
* initial cluster centers, or "k-means||" to use a parallel variant of k-means++
* (Bahmani et al., Scalable K-Means++, VLDB 2012). Default: k-means||.
*/
@Since("0.8.0")
def setInitializationMode(initializationMode: String): this.type = {
KMeans.validateInitMode(initializationMode)
this.initializationMode = initializationMode
this
}
/**
* This function has no effect since Spark 2.0.0.
*/
@Since("1.4.0")
def getRuns: Int = {
logWarning("Getting number of runs has no effect since Spark 2.0.0.")
runs
}
/**
* This function has no effect since Spark 2.0.0.
*/
@Since("0.8.0")
def setRuns(runs: Int): this.type = {
logWarning("Setting number of runs has no effect since Spark 2.0.0.")
this
}
/**
* Number of steps for the k-means|| initialization mode
*/
@Since("1.4.0")
def getInitializationSteps: Int = initializationSteps
/**
* Set the number of steps for the k-means|| initialization mode. This is an advanced
* setting -- the default of 5 is almost always enough. Default: 5.
*/
@Since("0.8.0")
def setInitializationSteps(initializationSteps: Int): this.type = {
require(initializationSteps > 0,
s"Number of initialization steps must be positive but got ${initializationSteps}")
this.initializationSteps = initializationSteps
this
}
/**
* The distance threshold within which we've consider centers to have converged.
*/
@Since("1.4.0")
def getEpsilon: Double = epsilon
/**
* Set the distance threshold within which we've consider centers to have converged.
* If all centers move less than this Euclidean distance, we stop iterating one run.
*/
@Since("0.8.0")
def setEpsilon(epsilon: Double): this.type = {
require(epsilon >= 0,
s"Distance threshold must be nonnegative but got ${epsilon}")
this.epsilon = epsilon
this
}
/**
* The random seed for cluster initialization.
*/
@Since("1.4.0")
def getSeed: Long = seed
/**
* Set the random seed for cluster initialization.
*/
@Since("1.4.0")
def setSeed(seed: Long): this.type = {
this.seed = seed
this
}
// Initial cluster centers can be provided as a KMeansModel object rather than using the
// random or k-means|| initializationMode
private var initialModel: Option[KMeansModel] = None
/**
* Set the initial starting point, bypassing the random initialization or k-means||
* The condition model.k == this.k must be met, failure results
* in an IllegalArgumentException.
*/
@Since("1.4.0")
def setInitialModel(model: KMeansModel): this.type = {
require(model.k == k, "mismatched cluster count")
initialModel = Some(model)
this
}
/**
* Train a K-means model on the given set of points; `data` should be cached for high
* performance, because this is an iterative algorithm.
*/
@Since("0.8.0")
def run(data: RDD[Vector]): KMeansModel = {
run(data, None)
}
private[spark] def run(
data: RDD[Vector],
instr: Option[Instrumentation[NewKMeans]]): KMeansModel = {
if (data.getStorageLevel == StorageLevel.NONE) {
logWarning("The input data is not directly cached, which may hurt performance if its"
+ " parent RDDs are also uncached.")
}
// Compute squared norms and cache them.
val norms = data.map(Vectors.norm(_, 2.0))
norms.persist()
val zippedData = data.zip(norms).map { case (v, norm) =>
new VectorWithNorm(v, norm)
}
val model = runAlgorithm(zippedData, instr)
norms.unpersist()
// Warn at the end of the run as well, for increased visibility.
if (data.getStorageLevel == StorageLevel.NONE) {
logWarning("The input data was not directly cached, which may hurt performance if its"
+ " parent RDDs are also uncached.")
}
model
}
/**
* Implementation of K-Means algorithm.
*/
private def runAlgorithm(
data: RDD[VectorWithNorm],
instr: Option[Instrumentation[NewKMeans]]): KMeansModel = {
val sc = data.sparkContext
val initStartTime = System.nanoTime()
// Only one run is allowed when initialModel is given
val numRuns = if (initialModel.nonEmpty) {
if (runs > 1) logWarning("Ignoring runs; one run is allowed when initialModel is given.")
1
} else {
runs
}
val centers = initialModel match {
case Some(kMeansCenters) =>
Array(kMeansCenters.clusterCenters.map(s => new VectorWithNorm(s)))
case None =>
if (initializationMode == KMeans.RANDOM) {
initRandom(data)
} else {
initKMeansParallel(data)
}
}
val initTimeInSeconds = (System.nanoTime() - initStartTime) / 1e9
logInfo(s"Initialization with $initializationMode took " + "%.3f".format(initTimeInSeconds) +
" seconds.")
val active = Array.fill(numRuns)(true)
val costs = Array.fill(numRuns)(0.0)
var activeRuns = new ArrayBuffer[Int] ++ (0 until numRuns)
var iteration = 0
val iterationStartTime = System.nanoTime()
instr.map(_.logNumFeatures(centers(0)(0).vector.size))
// Execute iterations of Lloyd's algorithm until all runs have converged
while (iteration < maxIterations && !activeRuns.isEmpty) {
type WeightedPoint = (Vector, Long)
def mergeContribs(x: WeightedPoint, y: WeightedPoint): WeightedPoint = {
axpy(1.0, x._1, y._1)
(y._1, x._2 + y._2)
}
val activeCenters = activeRuns.map(r => centers(r)).toArray
val costAccums = activeRuns.map(_ => sc.accumulator(0.0))
val bcActiveCenters = sc.broadcast(activeCenters)
// Find the sum and count of points mapping to each center
val totalContribs = data.mapPartitions { points =>
val thisActiveCenters = bcActiveCenters.value
val runs = thisActiveCenters.length
val k = thisActiveCenters(0).length
val dims = thisActiveCenters(0)(0).vector.size
val sums = Array.fill(runs, k)(Vectors.zeros(dims))
val counts = Array.fill(runs, k)(0L)
points.foreach { point =>
(0 until runs).foreach { i =>
val (bestCenter, cost) = KMeans.findClosest(thisActiveCenters(i), point)
costAccums(i) += cost
val sum = sums(i)(bestCenter)
axpy(1.0, point.vector, sum)
counts(i)(bestCenter) += 1
}
}
val contribs = for (i <- 0 until runs; j <- 0 until k) yield {
((i, j), (sums(i)(j), counts(i)(j)))
}
contribs.iterator
}.reduceByKey(mergeContribs).collectAsMap()
bcActiveCenters.unpersist(blocking = false)
// Update the cluster centers and costs for each active run
for ((run, i) <- activeRuns.zipWithIndex) {
var changed = false
var j = 0
while (j < k) {
val (sum, count) = totalContribs((i, j))
if (count != 0) {
scal(1.0 / count, sum)
val newCenter = new VectorWithNorm(sum)
if (KMeans.fastSquaredDistance(newCenter, centers(run)(j)) > epsilon * epsilon) {
changed = true
}
centers(run)(j) = newCenter
}
j += 1
}
if (!changed) {
active(run) = false
logInfo("Run " + run + " finished in " + (iteration + 1) + " iterations")
}
costs(run) = costAccums(i).value
}
activeRuns = activeRuns.filter(active(_))
iteration += 1
}
val iterationTimeInSeconds = (System.nanoTime() - iterationStartTime) / 1e9
logInfo(s"Iterations took " + "%.3f".format(iterationTimeInSeconds) + " seconds.")
if (iteration == maxIterations) {
logInfo(s"KMeans reached the max number of iterations: $maxIterations.")
} else {
logInfo(s"KMeans converged in $iteration iterations.")
}
val (minCost, bestRun) = costs.zipWithIndex.min
logInfo(s"The cost for the best run is $minCost.")
new KMeansModel(centers(bestRun).map(_.vector))
}
/**
* Initialize `runs` sets of cluster centers at random.
*/
private def initRandom(data: RDD[VectorWithNorm])
: Array[Array[VectorWithNorm]] = {
// Sample all the cluster centers in one pass to avoid repeated scans
val sample = data.takeSample(true, runs * k, new XORShiftRandom(this.seed).nextInt()).toSeq
Array.tabulate(runs)(r => sample.slice(r * k, (r + 1) * k).map { v =>
new VectorWithNorm(Vectors.dense(v.vector.toArray), v.norm)
}.toArray)
}
/**
* Initialize `runs` sets of cluster centers using the k-means|| algorithm by Bahmani et al.
* (Bahmani et al., Scalable K-Means++, VLDB 2012). This is a variant of k-means++ that tries
* to find with dissimilar cluster centers by starting with a random center and then doing
* passes where more centers are chosen with probability proportional to their squared distance
* to the current cluster set. It results in a provable approximation to an optimal clustering.
*
* The original paper can be found at http://theory.stanford.edu/~sergei/papers/vldb12-kmpar.pdf.
*/
private def initKMeansParallel(data: RDD[VectorWithNorm])
: Array[Array[VectorWithNorm]] = {
// Initialize empty centers and point costs.
val centers = Array.tabulate(runs)(r => ArrayBuffer.empty[VectorWithNorm])
var costs = data.map(_ => Array.fill(runs)(Double.PositiveInfinity))
// Initialize each run's first center to a random point.
val seed = new XORShiftRandom(this.seed).nextInt()
val sample = data.takeSample(true, runs, seed).toSeq
// Could be empty if data is empty; fail with a better message early:
require(sample.size >= runs, s"Required $runs samples but got ${sample.size} from $data")
val newCenters = Array.tabulate(runs)(r => ArrayBuffer(sample(r).toDense))
/** Merges new centers to centers. */
def mergeNewCenters(): Unit = {
var r = 0
while (r < runs) {
centers(r) ++= newCenters(r)
newCenters(r).clear()
r += 1
}
}
// On each step, sample 2 * k points on average for each run with probability proportional
// to their squared distance from that run's centers. Note that only distances between points
// and new centers are computed in each iteration.
var step = 0
while (step < initializationSteps) {
val bcNewCenters = data.context.broadcast(newCenters)
val preCosts = costs
costs = data.zip(preCosts).map { case (point, cost) =>
Array.tabulate(runs) { r =>
math.min(KMeans.pointCost(bcNewCenters.value(r), point), cost(r))
}
}.persist(StorageLevel.MEMORY_AND_DISK)
val sumCosts = costs
.aggregate(new Array[Double](runs))(
seqOp = (s, v) => {
// s += v
var r = 0
while (r < runs) {
s(r) += v(r)
r += 1
}
s
},
combOp = (s0, s1) => {
// s0 += s1
var r = 0
while (r < runs) {
s0(r) += s1(r)
r += 1
}
s0
}
)
bcNewCenters.unpersist(blocking = false)
preCosts.unpersist(blocking = false)
val chosen = data.zip(costs).mapPartitionsWithIndex { (index, pointsWithCosts) =>
val rand = new XORShiftRandom(seed ^ (step << 16) ^ index)
pointsWithCosts.flatMap { case (p, c) =>
val rs = (0 until runs).filter { r =>
rand.nextDouble() < 2.0 * c(r) * k / sumCosts(r)
}
if (rs.length > 0) Some((p, rs)) else None
}
}.collect()
mergeNewCenters()
chosen.foreach { case (p, rs) =>
rs.foreach(newCenters(_) += p.toDense)
}
step += 1
}
mergeNewCenters()
costs.unpersist(blocking = false)
// Finally, we might have a set of more than k candidate centers for each run; weigh each
// candidate by the number of points in the dataset mapping to it and run a local k-means++
// on the weighted centers to pick just k of them
val bcCenters = data.context.broadcast(centers)
val weightMap = data.flatMap { p =>
Iterator.tabulate(runs) { r =>
((r, KMeans.findClosest(bcCenters.value(r), p)._1), 1.0)
}
}.reduceByKey(_ + _).collectAsMap()
bcCenters.unpersist(blocking = false)
val finalCenters = (0 until runs).par.map { r =>
val myCenters = centers(r).toArray
val myWeights = (0 until myCenters.length).map(i => weightMap.getOrElse((r, i), 0.0)).toArray
LocalKMeans.kMeansPlusPlus(r, myCenters, myWeights, k, 30)
}
finalCenters.toArray
}
}
/**
* Top-level methods for calling K-means clustering.
*/
@Since("0.8.0")
object KMeans {
// Initialization mode names
@Since("0.8.0")
val RANDOM = "random"
@Since("0.8.0")
val K_MEANS_PARALLEL = "k-means||"
/**
* Trains a k-means model using the given set of parameters.
*
* @param data Training points as an `RDD` of `Vector` types.
* @param k Number of clusters to create.
* @param maxIterations Maximum number of iterations allowed.
* @param runs This param has no effect since Spark 2.0.0.
* @param initializationMode The initialization algorithm. This can either be "random" or
* "k-means||". (default: "k-means||")
* @param seed Random seed for cluster initialization. Default is to generate seed based
* on system time.
*/
@Since("1.3.0")
def train(
data: RDD[Vector],
k: Int,
maxIterations: Int,
runs: Int,
initializationMode: String,
seed: Long): KMeansModel = {
new KMeans().setK(k)
.setMaxIterations(maxIterations)
.setInitializationMode(initializationMode)
.setSeed(seed)
.run(data)
}
/**
* Trains a k-means model using the given set of parameters.
*
* @param data Training points as an `RDD` of `Vector` types.
* @param k Number of clusters to create.
* @param maxIterations Maximum number of iterations allowed.
* @param runs This param has no effect since Spark 2.0.0.
* @param initializationMode The initialization algorithm. This can either be "random" or
* "k-means||". (default: "k-means||")
*/
@Since("0.8.0")
def train(
data: RDD[Vector],
k: Int,
maxIterations: Int,
runs: Int,
initializationMode: String): KMeansModel = {
new KMeans().setK(k)
.setMaxIterations(maxIterations)
.setInitializationMode(initializationMode)
.run(data)
}
/**
* Trains a k-means model using specified parameters and the default values for unspecified.
*/
@Since("0.8.0")
def train(
data: RDD[Vector],
k: Int,
maxIterations: Int): KMeansModel = {
train(data, k, maxIterations, 1, K_MEANS_PARALLEL)
}
/**
* Trains a k-means model using specified parameters and the default values for unspecified.
*/
@Since("0.8.0")
def train(
data: RDD[Vector],
k: Int,
maxIterations: Int,
runs: Int): KMeansModel = {
train(data, k, maxIterations, runs, K_MEANS_PARALLEL)
}
/**
* Returns the index of the closest center to the given point, as well as the squared distance.
*/
private[mllib] def findClosest(
centers: TraversableOnce[VectorWithNorm],
point: VectorWithNorm): (Int, Double) = {
var bestDistance = Double.PositiveInfinity
var bestIndex = 0
var i = 0
centers.foreach { center =>
// Since `\|a - b\| \geq |\|a\| - \|b\||`, we can use this lower bound to avoid unnecessary
// distance computation.
var lowerBoundOfSqDist = center.norm - point.norm
lowerBoundOfSqDist = lowerBoundOfSqDist * lowerBoundOfSqDist
if (lowerBoundOfSqDist < bestDistance) {
val distance: Double = fastSquaredDistance(center, point)
if (distance < bestDistance) {
bestDistance = distance
bestIndex = i
}
}
i += 1
}
(bestIndex, bestDistance)
}
/**
* Returns the K-means cost of a given point against the given cluster centers.
*/
private[mllib] def pointCost(
centers: TraversableOnce[VectorWithNorm],
point: VectorWithNorm): Double =
findClosest(centers, point)._2
/**
* Returns the squared Euclidean distance between two vectors computed by
* [[org.apache.spark.mllib.util.MLUtils#fastSquaredDistance]].
*/
private[clustering] def fastSquaredDistance(
v1: VectorWithNorm,
v2: VectorWithNorm): Double = {
MLUtils.fastSquaredDistance(v1.vector, v1.norm, v2.vector, v2.norm)
}
private[spark] def validateInitMode(initMode: String): Boolean = {
initMode match {
case KMeans.RANDOM => true
case KMeans.K_MEANS_PARALLEL => true
case _ => false
}
}
}
/**
* A vector with its norm for fast distance computation.
*
* @see [[org.apache.spark.mllib.clustering.KMeans#fastSquaredDistance]]
*/
private[clustering]
class VectorWithNorm(val vector: Vector, val norm: Double) extends Serializable {
def this(vector: Vector) = this(vector, Vectors.norm(vector, 2.0))
def this(array: Array[Double]) = this(Vectors.dense(array))
/** Converts the vector to a dense vector. */
def toDense: VectorWithNorm = new VectorWithNorm(Vectors.dense(vector.toArray), norm)
}
|
xieguobin/Spark_2.0.0_cn1
|
mllib/clustering/KMeans.scala
|
Scala
|
apache-2.0
| 20,695 |
package scala.meta.tests
package prettyprinters
import java.io._
import java.nio.charset.Charset
import munit._
import scala.meta._
import compat.Platform.EOL
class PublicSuite extends FunSuite {
test("scala.meta.Dialect.toString") {
// covered below
}
test("scala.meta.Tree.toString (manual)") {
val tree = Term.ApplyInfix(Term.Name("foo"), Term.Name("+"), Nil, List(Term.Name("bar")))
assert(tree.toString == "foo + bar")
}
test("scala.meta.Tree.structure (manual)") {
val tree = Term.ApplyInfix(Term.Name("foo"), Term.Name("+"), Nil, List(Term.Name("bar")))
assert(
tree.structure == """Term.ApplyInfix(Term.Name("foo"), Term.Name("+"), Nil, List(Term.Name("bar")))"""
)
}
test("scala.meta.Tree.syntax") {
val tree = Term.ApplyInfix(Term.Name("foo"), Term.Name("+"), Nil, List(Term.Name("bar")))
assert(tree.syntax == "foo + bar")
}
test("scala.meta.Tree.toString (parsed)") {
val tree = "foo + bar // baz".parse[Term].get
assert(tree.toString == "foo + bar // baz")
}
test("scala.meta.Tree.structure (parsed)") {
val tree = "foo + bar // baz".parse[Term].get
assert(
tree.structure == """Term.ApplyInfix(Term.Name("foo"), Term.Name("+"), Nil, List(Term.Name("bar")))"""
)
}
test("scala.meta.Tree.syntax (parsed)") {
val tree = "foo + bar // baz".parse[Term].get
assert(tree.syntax == "foo + bar // baz")
}
test("scala.meta.Tree.toString (quasiquotes)") {
val tree = q"foo + bar // baz"
assert(tree.toString == "foo + bar")
}
test("scala.meta.Tree.structure (quasiquoted)") {
val tree = q"foo + bar // baz"
assert(
tree.structure == """Term.ApplyInfix(Term.Name("foo"), Term.Name("+"), Nil, List(Term.Name("bar")))"""
)
}
test("scala.meta.Tree.syntax (quasiquoted)") {
val tree = q"foo + bar // baz"
assert(tree.syntax == "foo + bar")
}
test("scala.meta.classifiers.Classifiable.toString") {
// n/a
}
test("scala.meta.classifiers.Classifier.toString") {
// n/a
}
test("scala.meta.cli.Metac.toString") {
// n/a
}
test("scala.meta.cli.Metacp.toString") {
// n/a
}
test("scala.meta.cli.Metai.toString") {
// n/a
}
test("scala.meta.cli.Metap.toString") {
// n/a
}
test("scala.meta.cli.Reporter.toString") {
// n/a
}
test("scala.meta.common.Convert.toString") {
// n/a
}
test("scala.meta.common.Optional.toString") {
// n/a
}
test("scala.meta.dialects.Scala3.toString") {
assertNoDiff(scala.meta.dialects.Scala3.toString, "Scala3")
}
test("scala.meta.dialects.Dotty.toString") {
// NOTE(olafur): `Dotty` and `Scala3` are identical so it's expected that
// `toString` returns "Scala3" instead of "Dotty".
assertEquals(scala.meta.dialects.Dotty.toString, "Scala3")
}
test("scala.meta.dialects.Sbt0136.toString") {
assertNoDiff(scala.meta.dialects.Sbt0136.toString, "Sbt0136")
}
test("scala.meta.dialects.Sbt0137.toString") {
assertNoDiff(scala.meta.dialects.Sbt0137.toString, "Sbt0137")
}
test("scala.meta.dialects.Sbt.toString") {
assertNoDiff(scala.meta.dialects.Sbt.toString, "Sbt1")
}
test("scala.meta.dialects.Sbt1.toString") {
assertNoDiff(scala.meta.dialects.Sbt1.toString, "Sbt1")
}
test("scala.meta.dialects.Scala210.toString") {
assertNoDiff(scala.meta.dialects.Scala210.toString, "Scala210")
}
test("scala.meta.dialects.Scala211.toString") {
assertNoDiff(scala.meta.dialects.Scala211.toString, "Scala211")
}
test("scala.meta.dialects.Scala212.toString") {
assertNoDiff(scala.meta.dialects.Scala212.toString, "Scala212")
}
test("scala.meta.dialects.Scala212Source3.toString") {
assertNoDiff(scala.meta.dialects.Scala212Source3.toString, "Scala212Source3")
}
test("scala.meta.dialects.Scala213.toString") {
assertNoDiff(scala.meta.dialects.Scala213.toString, "Scala213")
}
test("scala.meta.dialects.Scala213Source3.toString") {
assertNoDiff(scala.meta.dialects.Scala213Source3.toString, "Scala213Source3")
}
test("scala.meta.dialects.Scala.toString") {
assertNoDiff(scala.meta.dialects.Scala.toString, "Scala213")
}
test("scala.meta.dialects.Typelevel211.toString") {
assertNoDiff(scala.meta.dialects.Typelevel211.toString, "Typelevel211")
}
test("scala.meta.dialects.Typelevel212.toString") {
assertNoDiff(scala.meta.dialects.Typelevel212.toString, "Typelevel212")
}
test("scala.meta.dialects.Paradise211.toString") {
assertNoDiff(scala.meta.dialects.Paradise211.toString, "Paradise211")
}
test("scala.meta.dialects.Paradise212.toString") {
assertNoDiff(scala.meta.dialects.Paradise212.toString, "Paradise212")
}
test("scala.meta.dialects.ParadiseTypelevel211.toString") {
assertNoDiff(scala.meta.dialects.ParadiseTypelevel211.toString, "ParadiseTypelevel211")
}
test("scala.meta.dialects.ParadiseTypelevel212.toString") {
assertNoDiff(scala.meta.dialects.ParadiseTypelevel212.toString, "ParadiseTypelevel212")
}
test("scala.meta.inputs.Input.toString") {
// covered below
}
test("scala.meta.dialects.AllowEverything.toString") {
// Satisfy surface suite.
}
test("scala.meta.Member.Case.toString") {
// Satisfy surface suite.
}
test("scala.meta.inputs.Input.None.toString") {
assert(Input.None.toString == "Input.None")
}
test("scala.meta.inputs.Input.File.toString") {
val path = RelativePath("hello.scala").toAbsolute
val syntax = path.syntax
val input1 = Input.File(path, Charset.forName("latin1"))
val input2 = Input.File(path, Charset.forName("UTF-8"))
input1 match { case _: Input.File => }
input2 match { case _: Input.File => }
assert(input1.toString == s"""Input.File(new File("$syntax"), Charset.forName("ISO-8859-1"))""")
assert(input2.toString == s"""Input.File(new File("$syntax"), Charset.forName("UTF-8"))""")
}
test("scala.meta.inputs.Input.Slice.toString") {
val input = Input.Slice(Input.String("foo"), 0, 2)
input match { case _: Input.Slice => }
assert(input.toString == """Input.Slice(Input.String("foo"), 0, 2)""")
}
test("scala.meta.inputs.Input.Stream.toString") {
val latin1 = Charset.forName("latin1")
val stream = new ByteArrayInputStream("Привет(мир!)".getBytes(latin1))
val input1 = Input.Stream(stream, latin1)
val input2 = Input.Stream(stream, Charset.forName("UTF-8"))
input1 match { case _: Input.Stream => }
input2 match { case _: Input.Stream => }
assert(input1.toString == """Input.Stream(<stream>, Charset.forName("ISO-8859-1"))""")
assert(input2.toString == """Input.Stream(<stream>, Charset.forName("UTF-8"))""")
}
test("scala.meta.inputs.Input.String.toString") {
val input = Input.String("foo")
input match { case _: Input.String => }
assert(input.toString == """Input.String("foo")""")
}
test("scala.meta.inputs.Input.VirtualFile.toString") {
val input = Input.VirtualFile("foo.scala", "foo")
input match { case _: Input.VirtualFile => }
assert(input.toString == s"""Input.VirtualFile("foo.scala", "foo")""")
}
test("scala.meta.inputs.Input.Ammonite.toString") {
val input = Input.Ammonite(Input.None)
input match { case _: Input.Ammonite => }
assert(input.toString == s"""Input.Ammonite(Input.None)""")
}
test("scala.meta.inputs.Position.toString") {
// covered below
}
test("scala.meta.inputs.Position.None.toString") {
assert(Position.None.toString == "Position.None")
}
test("scala.meta.inputs.Position.Range.toString") {
val Term.ApplyInfix(lhs, _, _, _) = "foo + bar".parse[Term].get
lhs.pos match { case _: Position.Range => ; case _ => }
assert(lhs.pos.toString == """[0..3) in Input.String("foo + bar")""")
}
test("scala.meta.io.AbsolutePath.toString") {
// NOTE: come up with a platform-independent test
}
test("scala.meta.io.Classpath.toString") {
// NOTE: come up with a platform-independent test
}
test("scala.meta.io.RelativePath.toString") {
// NOTE: come up with a platform-independent test
}
val untestedClasses = List(
"scala.meta.metap.Format.Proto",
"scala.meta.metac.Settings",
"scala.meta.metap.Settings",
"scala.meta.metacp.Result",
"scala.meta.metap.Format.Compact",
"scala.meta.tokens.Token.Indentation",
"scala.meta.metap.Format",
"scala.meta.metacp.Settings",
"scala.meta.metap.Format.Detailed",
"scala.meta.tokens.Token.Unquote",
"scala.meta.tokens.Token.LFLF",
"scala.meta.tokens.Token.Ellipsis"
)
untestedClasses.foreach { name =>
test(name + ".toString") {
// n/a
}
}
test("scala.meta.parsers.ParseException.toString") {
intercept[ParseException] {
try "foo + class".parse[Term].get
catch {
case ex: ParseException =>
assert(ex.toString == """
|<input>:1: error: end of file expected but class found
|foo + class
| ^
""".trim.stripMargin.split('\\n').mkString(EOL))
throw ex
}
}
}
test("scala.meta.parsers.Parsed.toString") {
// covered below
}
test("scala.meta.parsers.Parsed.Error.toString") {
val parsed = "foo + class".parse[Term]
parsed match { case _: Parsed.Error => ; case _ => }
assert(parsed.toString == """
|<input>:1: error: end of file expected but class found
|foo + class
| ^
""".trim.stripMargin.split('\\n').mkString(EOL))
}
test("scala.meta.parsers.Parsed.Success.toString") {
val parsed = "foo + bar".parse[Term]
parsed match { case _: Parsed.Success[_] => ; case _ => }
assert(parsed.toString == "foo + bar")
}
test("scala.meta.prettyprinters.Show.toString") {
// n/a
}
test("scala.meta.prettyprinters.Structure.toString") {
// n/a
}
test("scala.meta.prettyprinters.Syntax.toString") {
// n/a
}
test("scala.meta.quasiquotes.Lift.toString") {
// n/a
}
test("scala.meta.quasiquotes.Unlift.toString") {
// n/a
}
test("scala.meta.tokenizers.Tokenize.toString") {
// n/a
}
test("scala.meta.tokenizers.TokenizeException.toString") {
intercept[TokenizeException] {
try """"c""".tokenize.get
catch {
case ex: TokenizeException =>
assert(ex.toString == """
|<input>:1: error: unclosed string literal
|"c
|^
""".trim.stripMargin.split('\\n').mkString(EOL))
throw ex
}
}
}
test("scala.meta.tokenizers.Tokenized.Error.toString") {
val tokenized = """"c""".tokenize
tokenized match { case _: Tokenized.Error => ; case _ => }
assert(tokenized.toString == """
|<input>:1: error: unclosed string literal
|"c
|^
""".trim.stripMargin.split('\\n').mkString(EOL))
}
test("scala.meta.tokenizers.Tokenized.Success.toString") {
val tokenized = "foo + bar".tokenize
tokenized match { case _: Tokenized.Success => ; case _ => }
assert(tokenized.toString == "foo + bar")
}
test("scala.meta.tokens.Token.toString") {
val token = "foo + bar".tokenize.get(1)
assert(token.toString == "foo")
}
test("scala.meta.tokens.Token.structure") {
val token = "foo + bar".tokenize.get(1)
assert(token.structure == "foo [0..3)")
}
test("scala.meta.tokens.Token.syntax") {
val token = "foo + bar".tokenize.get(1)
assert(token.syntax == "foo")
}
test("scala.meta.tokens.Tokens.toString") {
val tokens = "foo + bar".tokenize.get
assert(tokens.toString == "foo + bar")
}
test("scala.meta.tokens.Tokens.structure") {
val tokens = "foo + bar".tokenize.get
assert(
tokens.structure == "Tokens(BOF [0..0), foo [0..3), [3..4), + [4..5), [5..6), bar [6..9), EOF [9..9))"
)
}
test("scala.meta.tokens.Tokens.syntax") {
val tokens = "foo + bar".tokenize.get
assert(tokens.syntax == "foo + bar")
}
test("scala.meta.tokens.Token.Interpolation.toString") {
// n/a
}
test("scala.meta.tokens.Token.Xml.toString") {
// n/a
}
test("scala.meta.transversers.SimpleTraverser.toString") {
// n/a
}
test("scala.meta.transversers.Transformer.toString") {
// n/a
}
test("scala.meta.transversers.Traverser.toString") {
// n/a
}
}
|
scalameta/scalameta
|
tests/shared/src/test/scala/scala/meta/tests/prettyprinters/PublicSuite.scala
|
Scala
|
bsd-3-clause
| 12,325 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.tree
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.apache.spark.SparkFunSuite
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.mllib.tree.configuration.Algo._
import org.apache.spark.mllib.tree.configuration.FeatureType._
import org.apache.spark.mllib.tree.configuration.{QuantileStrategy, Strategy}
import org.apache.spark.mllib.tree.impl.{BaggedPoint, DecisionTreeMetadata, TreePoint}
import org.apache.spark.mllib.tree.impurity.{Entropy, Gini, Variance}
import org.apache.spark.mllib.tree.model._
import org.apache.spark.mllib.util.MLlibTestSparkContext
import org.apache.spark.util.Utils
/**
*决策树是一个预测模型
*决策树:分类与回归树(Classification and Regression Trees ,CART)算法常用于特征含有类别信息的分类或者回归问题
* 特征值不标准化,优化需调整迭代次数
*/
class DecisionTreeSuite extends SparkFunSuite with MLlibTestSparkContext {
/////////////////////////////////////////////////////////////////////////////
// Tests examining individual elements of training
// 测试训练各个元素
/////////////////////////////////////////////////////////////////////////////
//具有连续特征的二元分类:分裂和计算
test("Binary classification with continuous features(连续特征): split and bin calculation") {
/**
* (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]),
* (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]),
* (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]),
* (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]), (1.0,[0.0,1.0]),
*/
val arr = DecisionTreeSuite.generateOrderedLabeledPointsWithLabel1()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
//Gini基尼不纯度:将来自集合中的某种结果随机应用于集合中某一数据项的预期误差率。
val strategy = new Strategy(Classification, Gini, 3, 2, 100)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(splits.length === 2)//分裂
assert(bins.length === 2)//桶数
assert(splits(0).length === 99)
assert(bins(0).length === 100)
}
//具有二元(有序)分类特征的二元分类:分裂和计算
test("Binary classification with binary (ordered) categorical features:" +
" split and bin calculation") {
//[(1.0,[0.0,1.0]), (0.0,[1.0, 0.0]), (1.0,[0.0,1.0]), (0.0,[1.0, 0.0])]
val arr = DecisionTreeSuite.generateCategoricalDataPoints()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
//策略
val strategy = new Strategy(
Classification,//分类
Gini,
maxDepth = 2,//树的最大深度,为了防止过拟合,设定划分的终止条件
numClasses = 2,//numClasses 分类数
maxBins = 100,//最大分箱数,当某个特征的特征值为连续时,该参数意思是将连续的特征值离散化为多少份
/**
指明特征的类别对应值(类别),注意特征索引是从0开始的,0和4表示第1和第5个特征
Map(0 -> 2,4->10)表示特征0有两个特征值(0和1),特征4有10个特征值{0,1,2,3,…,9}
**/
categoricalFeaturesInfo = Map(0 -> 2, 1-> 2))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
assert(splits.length === 2)
assert(bins.length === 2)
// no bins or splits pre-computed for ordered categorical features
//没有垃圾箱或分割预先计算的有序分类功能
assert(splits(0).length === 0)
assert(bins(0).length === 0)
}
//采用三元二分类(有序)的分类特征:没有一个类别的样本
test("Binary classification with 3-ary (ordered) categorical features," +
" with no samples for one category") {
val arr = DecisionTreeSuite.generateCategoricalDataPoints()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
val strategy = new Strategy(
Classification,
Gini,
maxDepth = 2,//树的最大深度,为了防止过拟合,设定划分的终止条件
numClasses = 2,//numClasses 分类数
maxBins = 100,//最大分箱数,当某个特征的特征值为连续时,该参数意思是将连续的特征值离散化为多少份
//用Map存储类别(离散)特征及每个类别对应值(类别)的数量
//例如 Map(n->k)表示特征n类别(离散)特征,特征值有K个,具体值为(0,1,...K-1)
//Map中元素的键是特征在输入向量Vector中的下标,Map中元素的值是类别特征的不同取值个数
//例如指定类别特征0取值3个数,指定类别1取值为3个数
categoricalFeaturesInfo = Map(0 -> 3, 1 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(splits.length === 2)
assert(bins.length === 2)
// no bins or splits pre-computed for ordered categorical features
//没有垃圾箱或分割预先计算的有序分类功能
assert(splits(0).length === 0)
assert(bins(0).length === 0)
}
//从多类分类号提取类
test("extract categories from a number for multiclass classification") {
val l = DecisionTree.extractMultiClassCategories(13, 10)
assert(l.length === 3)
assert(List(3.0, 2.0, 0.0).toSeq === l.toSeq)
}
//查找拆分连续特征
test("find splits for a continuous feature") {
// find splits for normal case
{
val fakeMetadata = new DecisionTreeMetadata(1, 0, 0, 0,
Map(), Set(),
Array(6), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0, 0
)
val featureSamples = Array.fill(200000)(math.random)
val splits = DecisionTree.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
assert(splits.length === 5)
assert(fakeMetadata.numSplits(0) === 5)
assert(fakeMetadata.numBins(0) === 6)
// check returned splits are distinct
//检查返回的拆分是不同的
assert(splits.distinct.length === splits.length)
}
// find splits should not return identical splits
//查找拆分不应该返回相同的拆分
// when there are not enough split candidates, reduce the number of splits in metadata
//当没有足够的分割候选时,减少元数据中的分裂次数
{
val fakeMetadata = new DecisionTreeMetadata(1, 0, 0, 0,
Map(), Set(),
Array(5), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0, 0
)
val featureSamples = Array(1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 3).map(_.toDouble)
val splits = DecisionTree.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
assert(splits.length === 3)
assert(fakeMetadata.numSplits(0) === 3)
assert(fakeMetadata.numBins(0) === 4)
// check returned splits are distinct
//检查返回不同拆分
assert(splits.distinct.length === splits.length)
}
// find splits when most samples close to the minimum
//当大多数样本接近最小值时,发现分裂
{
val fakeMetadata = new DecisionTreeMetadata(1, 0, 0, 0,
Map(), Set(),
Array(3), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0, 0
)
val featureSamples = Array(2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 4, 5).map(_.toDouble)
val splits = DecisionTree.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
assert(splits.length === 2)
assert(fakeMetadata.numSplits(0) === 2)
assert(fakeMetadata.numBins(0) === 3)
assert(splits(0) === 2.0)
assert(splits(1) === 3.0)
}
// find splits when most samples close to the maximum
//当大多数样本接近最大值时发现分裂
{
val fakeMetadata = new DecisionTreeMetadata(1, 0, 0, 0,
Map(), Set(),
Array(3), Gini, QuantileStrategy.Sort,
0, 0, 0.0, 0, 0
)
val featureSamples = Array(0, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2).map(_.toDouble)
val splits = DecisionTree.findSplitsForContinuousFeature(featureSamples, fakeMetadata, 0)
assert(splits.length === 1)
assert(fakeMetadata.numSplits(0) === 1)
assert(fakeMetadata.numBins(0) === 2)
assert(splits(0) === 1.0)
}
}
//无序多分类的分类特征:分裂和计算
test("Multiclass classification with unordered(无序) categorical features:" +
" split and bin calculations") {
val arr = DecisionTreeSuite.generateCategoricalDataPoints()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
val strategy = new Strategy(
Classification,
Gini,
maxDepth = 2,//树的最大深度,为了防止过拟合,设定划分的终止条件
numClasses = 100,//numClasses 分类数
maxBins = 100,//最大分箱数,当某个特征的特征值为连续时,该参数意思是将连续的特征值离散化为多少份
/**
指明特征是类别型的以及每个类别型特征对应值(类别)。
Map(0 -> 2, 4->10)表示特征0有两个特征值(0和1),特征4有10个特征值{0,1,2,3,…,9}。
注意特征索引是从0开始的,0和4表示第1和第5个特征**/
categoricalFeaturesInfo = Map(0 -> 3, 1-> 3))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
//特征无序
assert(metadata.isUnordered(featureIndex = 0))
assert(metadata.isUnordered(featureIndex = 1))
//不同的策略采用不同的预测方法
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(splits.length === 2)
assert(bins.length === 2)
assert(splits(0).length === 3)
assert(bins(0).length === 0)
// Expecting 2^2 - 1 = 3 bins/splits
/**
* splits(0)
* [
* Feature = 0, threshold = -1.7976931348623157E308, featureType = Categorical, categories = List(0.0),
* Feature = 0, threshold = -1.7976931348623157E308, featureType = Categorical, categories = List(1.0),
* Feature = 0, threshold = -1.7976931348623157E308, featureType = Categorical, categories = List(1.0, 0.0)
* ]
*/
assert(splits(0)(0).feature === 0)
//在二进制分类中设置阈值,范围为[0,1],如果类标签1的估计概率>Threshold,则预测1,否则0
assert(splits(0)(0).threshold === Double.MinValue)
assert(splits(0)(0).featureType === Categorical)
assert(splits(0)(0).categories.length === 1)
assert(splits(0)(0).categories.contains(0.0))
/**
* splits(1)
* [
* Feature = 1, threshold = -1.7976931348623157E308, featureType = Categorical, categories = List(0.0),
* Feature = 1, threshold = -1.7976931348623157E308, featureType = Categorical, categories = List(1.0),
* Feature = 1, threshold = -1.7976931348623157E308, featureType = Categorical, categories = List(1.0, 0.0)
* ]
*/
assert(splits(1)(0).feature === 1)
//在二进制分类中设置阈值,范围为[0,1],如果类标签1的估计概率>Threshold,则预测1,否则0
assert(splits(1)(0).threshold === Double.MinValue)
assert(splits(1)(0).featureType === Categorical)
assert(splits(1)(0).categories.length === 1)
assert(splits(1)(0).categories.contains(0.0))
assert(splits(0)(1).feature === 0)
assert(splits(0)(1).threshold === Double.MinValue)
assert(splits(0)(1).featureType === Categorical)
assert(splits(0)(1).categories.length === 1)
assert(splits(0)(1).categories.contains(1.0))
assert(splits(1)(1).feature === 1)
assert(splits(1)(1).threshold === Double.MinValue)
assert(splits(1)(1).featureType === Categorical)
assert(splits(1)(1).categories.length === 1)
assert(splits(1)(1).categories.contains(1.0))
assert(splits(0)(2).feature === 0)
assert(splits(0)(2).threshold === Double.MinValue)
assert(splits(0)(2).featureType === Categorical)
assert(splits(0)(2).categories.length === 2)
assert(splits(0)(2).categories.contains(0.0))
assert(splits(0)(2).categories.contains(1.0))
assert(splits(1)(2).feature === 1)
assert(splits(1)(2).threshold === Double.MinValue)
assert(splits(1)(2).featureType === Categorical)
assert(splits(1)(2).categories.length === 2)
assert(splits(1)(2).categories.contains(0.0))
assert(splits(1)(2).categories.contains(1.0))
}
//有序分类特征的多类分类:分仓计算
test("Multiclass classification with ordered categorical features: split and bin calculations") {
val arr = DecisionTreeSuite.generateCategoricalDataPointsForMulticlassForOrderedFeatures()
assert(arr.length === 3000)
val rdd = sc.parallelize(arr)
val strategy = new Strategy(
Classification,
Gini,
maxDepth = 2,//树的最大深度,为了防止过拟合,设定划分的终止条件
numClasses = 100,//numClasses 分类数
maxBins = 100,//最大分箱数,当某个特征的特征值为连续时,该参数意思是将连续的特征值离散化为多少份
/**
指明特征是类别型的以及每个类别型特征对应值(类别)。
Map(0 -> 2, 4->10)表示特征0有两个特征值(0和1),特征4有10个特征值{0,1,2,3,…,9}。
注意特征索引是从0开始的,0和4表示第1和第5个特征**/
categoricalFeaturesInfo = Map(0 -> 10, 1-> 10))
//因此,分类的功能将被排序
// 2^(10-1) - 1 > 100, so categorical features will be ordered
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(splits.length === 2)
assert(bins.length === 2)
// no bins or splits pre-computed for ordered categorical features
//没有垃圾箱或分割预先计算的有序分类功能
assert(splits(0).length === 0)
assert(bins(0).length === 0)
}
//避免在最后一级聚集
test("Avoid aggregation on the last level") {
val arr = Array(
//LabeledPoint标记点是局部向量,向量可以是密集型或者稀疏型,每个向量会关联了一个标签(label)
LabeledPoint(0.0, Vectors.dense(1.0, 0.0, 0.0)),
LabeledPoint(1.0, Vectors.dense(0.0, 1.0, 1.0)),
LabeledPoint(0.0, Vectors.dense(2.0, 0.0, 0.0)),
LabeledPoint(1.0, Vectors.dense(0.0, 2.0, 1.0)))
val input = sc.parallelize(arr)
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 1,
//categoricalFeaturesInfo 指明哪些特征是类别型的以及每个类别型特征对应值(类别)的数量,
//通过map来指定,map的key是特征索引,value是特征值数量
//numClasses 分类数
numClasses = 2, categoricalFeaturesInfo = Map(0 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(input, strategy)
val (splits, bins) = DecisionTree.findSplitsBins(input, metadata)
val treeInput = TreePoint.convertToTreeRDD(input, bins, metadata)
val baggedInput = BaggedPoint.convertToBaggedRDD(treeInput, 1.0, 1, false)
val topNode = Node.emptyNode(nodeIndex = 1)
assert(topNode.predict.predict === Double.MinValue)
assert(topNode.impurity === -1.0)
assert(topNode.isLeaf === false)
val nodesForGroup = Map((0, Array(topNode)))
val treeToNodeToIndexInfo = Map((0, Map(
(topNode.id, new RandomForest.NodeIndexInfo(0, None))
)))
val nodeQueue = new mutable.Queue[(Int, Node)]()
DecisionTree.findBestSplits(baggedInput, metadata, Array(topNode),
nodesForGroup, treeToNodeToIndexInfo, splits, bins, nodeQueue)
// don't enqueue leaf nodes into node queue
//不要将叶子节点到节点的队列
assert(nodeQueue.isEmpty)
// set impurity and predict for topNode
//设置不纯度和预测topnode
assert(topNode.predict.predict !== Double.MinValue)
assert(topNode.impurity !== -1.0)
// set impurity and predict for child nodes
assert(topNode.leftNode.get.predict.predict === 0.0)
assert(topNode.rightNode.get.predict.predict === 1.0)
assert(topNode.leftNode.get.impurity === 0.0)//不纯度
assert(topNode.rightNode.get.impurity === 0.0)//不纯度
}
//避免聚合,如果不纯度是0
test("Avoid aggregation if impurity is 0.0") {
val arr = Array(
//LabeledPoint标记点是局部向量,向量可以是密集型或者稀疏型,每个向量会关联了一个标签(label)
LabeledPoint(0.0, Vectors.dense(1.0, 0.0, 0.0)),
LabeledPoint(1.0, Vectors.dense(0.0, 1.0, 1.0)),
LabeledPoint(0.0, Vectors.dense(2.0, 0.0, 0.0)),
LabeledPoint(1.0, Vectors.dense(0.0, 2.0, 1.0)))
val input = sc.parallelize(arr)
//numClasses 分类数
//categoricalFeaturesInfo 指明哪些特征是类别型的以及每个类别型特征对应值(类别)的数量,
//通过map来指定,map的key是特征索引,value是特征值数量
//树的最大深度,为了防止过拟合,设定划分的终止条件
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 5,
numClasses = 2, categoricalFeaturesInfo = Map(0 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(input, strategy)
val (splits, bins) = DecisionTree.findSplitsBins(input, metadata)
val treeInput = TreePoint.convertToTreeRDD(input, bins, metadata)
val baggedInput = BaggedPoint.convertToBaggedRDD(treeInput, 1.0, 1, false)
val topNode = Node.emptyNode(nodeIndex = 1)
assert(topNode.predict.predict === Double.MinValue)
assert(topNode.impurity === -1.0)
assert(topNode.isLeaf === false)
val nodesForGroup = Map((0, Array(topNode)))
val treeToNodeToIndexInfo = Map((0, Map(
(topNode.id, new RandomForest.NodeIndexInfo(0, None))
)))
val nodeQueue = new mutable.Queue[(Int, Node)]()
DecisionTree.findBestSplits(baggedInput, metadata, Array(topNode),
nodesForGroup, treeToNodeToIndexInfo, splits, bins, nodeQueue)
// don't enqueue a node into node queue if its impurity is 0.0
//不要将一个节点到节点的队列,如果不纯度0
assert(nodeQueue.isEmpty)
// set impurity and predict for topNode
//设置不纯度和预测顶节点
assert(topNode.predict.predict !== Double.MinValue)
assert(topNode.impurity !== -1.0)
// set impurity and predict for child nodes
//设置不纯度和预测子节点
assert(topNode.leftNode.get.predict.predict === 0.0)
assert(topNode.rightNode.get.predict.predict === 1.0)
assert(topNode.leftNode.get.impurity === 0.0)
assert(topNode.rightNode.get.impurity === 0.0)
}
//第二级节点构建与无组
test("Second level node building with vs. without groups") {
val arr = DecisionTreeSuite.generateOrderedLabeledPoints()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
val strategy = new Strategy(Classification, Entropy, 3, 2, 100)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(splits.length === 2)
assert(splits(0).length === 99)
assert(bins.length === 2)
assert(bins(0).length === 100)
// Train a 1-node model
//训练一个一级节点
//熵:代表集合的无序程度
//树的最大深度,为了防止过拟合,设定划分的终止条件
val strategyOneNode = new Strategy(Classification, Entropy, maxDepth = 1,
//numClasses 分类数
//最大分箱数,当某个特征的特征值为连续时,该参数意思是将连续的特征值离散化为多少份
numClasses = 2, maxBins = 100)
val modelOneNode = DecisionTree.train(rdd, strategyOneNode)
val rootNode1 = modelOneNode.topNode.deepCopy()
val rootNode2 = modelOneNode.topNode.deepCopy()
assert(rootNode1.leftNode.nonEmpty)
assert(rootNode1.rightNode.nonEmpty)
val treeInput = TreePoint.convertToTreeRDD(rdd, bins, metadata)
val baggedInput = BaggedPoint.convertToBaggedRDD(treeInput, 1.0, 1, false)
// Single group second level tree construction.
//单分组二级树结构
val nodesForGroup = Map((0, Array(rootNode1.leftNode.get, rootNode1.rightNode.get)))
val treeToNodeToIndexInfo = Map((0, Map(
(rootNode1.leftNode.get.id, new RandomForest.NodeIndexInfo(0, None)),
(rootNode1.rightNode.get.id, new RandomForest.NodeIndexInfo(1, None)))))
val nodeQueue = new mutable.Queue[(Int, Node)]()
DecisionTree.findBestSplits(baggedInput, metadata, Array(rootNode1),
nodesForGroup, treeToNodeToIndexInfo, splits, bins, nodeQueue)
val children1 = new Array[Node](2)
children1(0) = rootNode1.leftNode.get
children1(1) = rootNode1.rightNode.get
// Train one second-level node at a time.
//一次训练一个二级节点
val nodesForGroupA = Map((0, Array(rootNode2.leftNode.get)))
val treeToNodeToIndexInfoA = Map((0, Map(
(rootNode2.leftNode.get.id, new RandomForest.NodeIndexInfo(0, None)))))
nodeQueue.clear()
DecisionTree.findBestSplits(baggedInput, metadata, Array(rootNode2),
nodesForGroupA, treeToNodeToIndexInfoA, splits, bins, nodeQueue)
val nodesForGroupB = Map((0, Array(rootNode2.rightNode.get)))
val treeToNodeToIndexInfoB = Map((0, Map(
(rootNode2.rightNode.get.id, new RandomForest.NodeIndexInfo(0, None)))))
nodeQueue.clear()
DecisionTree.findBestSplits(baggedInput, metadata, Array(rootNode2),
nodesForGroupB, treeToNodeToIndexInfoB, splits, bins, nodeQueue)
val children2 = new Array[Node](2)
children2(0) = rootNode2.leftNode.get
children2(1) = rootNode2.rightNode.get
// Verify whether the splits obtained using single group and multiple group level
// construction strategies are the same.
//验证是否相同是否使用单个组和多组级别的构建策略
for (i <- 0 until 2) {
assert(children1(i).stats.nonEmpty && children1(i).stats.get.gain > 0)
assert(children2(i).stats.nonEmpty && children2(i).stats.get.gain > 0)
assert(children1(i).split === children2(i).split)
assert(children1(i).stats.nonEmpty && children2(i).stats.nonEmpty)
val stats1 = children1(i).stats.get
val stats2 = children2(i).stats.get
assert(stats1.gain === stats2.gain)
assert(stats1.impurity === stats2.impurity)
assert(stats1.leftImpurity === stats2.leftImpurity)
assert(stats1.rightImpurity === stats2.rightImpurity)
assert(children1(i).predict.predict === children2(i).predict.predict)
}
}
/////////////////////////////////////////////////////////////////////////////
// Tests calling train() 测试调用训练
/////////////////////////////////////////////////////////////////////////////
//具有有序分类特征的二元分类方法
test("Binary classification stump with ordered categorical features") {
val arr = DecisionTreeSuite.generateCategoricalDataPoints()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
val strategy = new Strategy(
Classification,
Gini,
numClasses = 2,//分类数
maxDepth = 2,//树的最大深度,为了防止过拟合,设定划分的终止条件
//最大分箱数,当某个特征的特征值为连续时,该参数意思是将连续的特征值离散化为多少份
maxBins = 100,
/**
指明特征的类别对应值(类别),注意特征索引是从0开始的,0和4表示第1和第5个特征
Map(0 -> 2,4->10)表示特征0有两个特征值(0和1),特征4有10个特征值{0,1,2,3,…,9}
**/
categoricalFeaturesInfo = Map(0 -> 3, 1-> 3))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(splits.length === 2)
assert(bins.length === 2)
// no bins or splits pre-computed for ordered categorical features
//没有垃圾箱或分割预先计算的有序分类功能
assert(splits(0).length === 0)
assert(bins(0).length === 0)
val rootNode = DecisionTree.train(rdd, strategy).topNode
val split = rootNode.split.get
//分类
assert(split.categories === List(1.0))
//特征类型
assert(split.featureType === Categorical)
assert(split.threshold === Double.MinValue)
val stats = rootNode.stats.get
assert(stats.gain > 0)
assert(rootNode.predict.predict === 1)
assert(stats.impurity > 0.2)
}
//三元回归树桩(有序)的类别特征
test("Regression stump with 3-ary (ordered) categorical features") {
val arr = DecisionTreeSuite.generateCategoricalDataPoints()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
val strategy = new Strategy(
Regression,
Variance,
maxDepth = 2,//树的最大深度,为了防止过拟合,设定划分的终止条件
//最大分箱数,当某个特征的特征值为连续时,该参数意思是将连续的特征值离散化为多少份
maxBins = 100,
categoricalFeaturesInfo = Map(0 -> 3, 1-> 3))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val rootNode = DecisionTree.train(rdd, strategy).topNode
val split = rootNode.split.get
assert(split.categories.length === 1)
assert(split.categories.contains(1.0))
assert(split.featureType === Categorical)
assert(split.threshold === Double.MinValue)
val stats = rootNode.stats.get
assert(stats.gain > 0)
assert(rootNode.predict.predict === 0.6)
assert(stats.impurity > 0.2)
}
//具有二元(有序)分类特征的回归分析
test("Regression stump with binary (ordered) categorical features") {
val arr = DecisionTreeSuite.generateCategoricalDataPoints()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
val strategy = new Strategy(
Regression,
Variance,
maxDepth = 2,//树的最大深度,为了防止过拟合,设定划分的终止条件
maxBins = 100,//连续特征离散化的最大数量,以及选择每个节点分裂特征的方式
categoricalFeaturesInfo = Map(0 -> 2, 1-> 2))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val model = DecisionTree.train(rdd, strategy)
DecisionTreeSuite.validateRegressor(model, arr, 0.0)
assert(model.numNodes === 3)
assert(model.depth === 1)
}
//Gini的固定标签0的二元分类
test("Binary classification stump with fixed label 0 for Gini") {
val arr = DecisionTreeSuite.generateOrderedLabeledPointsWithLabel0()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
//maxBins最大分箱数,当某个特征的特征值为连续时,该参数意思是将连续的特征值离散化为多少份
//树的最大深度,为了防止过拟合,设定划分的终止条件
val strategy = new Strategy(Classification, Gini, maxDepth = 3,
numClasses = 2, maxBins = 100)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(splits.length === 2)
assert(splits(0).length === 99)
assert(bins.length === 2)
assert(bins(0).length === 100)
val rootNode = DecisionTree.train(rdd, strategy).topNode
val stats = rootNode.stats.get
assert(stats.gain === 0)
assert(stats.leftImpurity === 0)
assert(stats.rightImpurity === 0)
}
//Gini的固定标签1的二元分类
test("Binary classification stump with fixed label 1 for Gini") {
val arr = DecisionTreeSuite.generateOrderedLabeledPointsWithLabel1()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
//maxBins连续特征离散化的最大数量,以及选择每个节点分裂特征的方式
//树的最大深度,为了防止过拟合,设定划分的终止条件
val strategy = new Strategy(Classification, Gini, maxDepth = 3,
numClasses = 2, maxBins = 100)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(splits.length === 2)
assert(splits(0).length === 99)
assert(bins.length === 2)
assert(bins(0).length === 100)
val rootNode = DecisionTree.train(rdd, strategy).topNode
val stats = rootNode.stats.get
assert(stats.gain === 0)
assert(stats.leftImpurity === 0)
assert(stats.rightImpurity === 0)
assert(rootNode.predict.predict === 1)
}
//具有固定标签0的熵的二元分类
test("Binary classification stump with fixed label 0 for Entropy") {
val arr = DecisionTreeSuite.generateOrderedLabeledPointsWithLabel0()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
//numClasses 分类数
//树的最大深度,为了防止过拟合,设定划分的终止条件
val strategy = new Strategy(Classification, Entropy, maxDepth = 3,
numClasses = 2, maxBins = 100)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(splits.length === 2)
assert(splits(0).length === 99)
assert(bins.length === 2)
assert(bins(0).length === 100)
val rootNode = DecisionTree.train(rdd, strategy).topNode
val stats = rootNode.stats.get
assert(stats.gain === 0)
assert(stats.leftImpurity === 0)
assert(stats.rightImpurity === 0)
assert(rootNode.predict.predict === 0)
}
//具有固定标签1的熵的二元分类
test("Binary classification stump with fixed label 1 for Entropy") {
val arr = DecisionTreeSuite.generateOrderedLabeledPointsWithLabel1()
assert(arr.length === 1000)
val rdd = sc.parallelize(arr)
//树的最大深度,为了防止过拟合,设定划分的终止条件
val strategy = new Strategy(Classification, Entropy, maxDepth = 3,
numClasses = 2, maxBins = 100)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val (splits, bins) = DecisionTree.findSplitsBins(rdd, metadata)
assert(splits.length === 2)
assert(splits(0).length === 99)
assert(bins.length === 2)
assert(bins(0).length === 100)
val rootNode = DecisionTree.train(rdd, strategy).topNode
val stats = rootNode.stats.get
assert(stats.gain === 0)
assert(stats.leftImpurity === 0)
assert(stats.rightImpurity === 0)
assert(rootNode.predict.predict === 1)
}
//多类分类的树和三元(无序)的分类特征
test("Multiclass classification stump with 3-ary (unordered) categorical features") {
val arr = DecisionTreeSuite.generateCategoricalDataPointsForMulticlass()
val rdd = sc.parallelize(arr)
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 4,
//categoricalFeaturesInfo用Map存储类别(离散)特征及每个类别对应值(类别)的数量
//例如 Map(n->k)表示特征n类别(离散)特征,特征值有K个,具体值为(0,1,...K-1)
numClasses = 3, categoricalFeaturesInfo = Map(0 -> 3, 1 -> 3))
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(strategy.isMulticlassClassification)
assert(metadata.isUnordered(featureIndex = 0))
assert(metadata.isUnordered(featureIndex = 1))
val rootNode = DecisionTree.train(rdd, strategy).topNode
val split = rootNode.split.get
assert(split.feature === 0)
assert(split.categories.length === 1)
assert(split.categories.contains(1))
assert(split.featureType === Categorical)
}
//有1个连续的特征分类,检查off-by-1误差
test("Binary classification stump with 1 continuous feature, to check off-by-1 error") {
val arr = Array(
//LabeledPoint标记点是局部向量,向量可以是密集型或者稀疏型,每个向量会关联了一个标签(label)
LabeledPoint(0.0, Vectors.dense(0.0)),
LabeledPoint(1.0, Vectors.dense(1.0)),
LabeledPoint(1.0, Vectors.dense(2.0)),
LabeledPoint(1.0, Vectors.dense(3.0)))
val rdd = sc.parallelize(arr)
//树的最大深度,为了防止过拟合,设定划分的终止条件
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 4,
numClasses = 2)
val model = DecisionTree.train(rdd, strategy)
DecisionTreeSuite.validateClassifier(model, arr, 1.0)
assert(model.numNodes === 3)
assert(model.depth === 1)
}
//具有2个连续特征的二叉分类
test("Binary classification stump with 2 continuous features") {
val arr = Array(
LabeledPoint(0.0, Vectors.sparse(2, Seq((0, 0.0)))),
LabeledPoint(1.0, Vectors.sparse(2, Seq((1, 1.0)))),
LabeledPoint(0.0, Vectors.sparse(2, Seq((0, 0.0)))),
LabeledPoint(1.0, Vectors.sparse(2, Seq((1, 2.0)))))
val rdd = sc.parallelize(arr)
/**
1.训练数据集
2.目标类别个数,即结果有几种选择
3.Map中的键值分别对应Vector下标和该下标对应类别特征的取值情况,
空表示所有特征都是数值型(为了方便,示例中直接取空,实际当中并不能这么使用)
4.不纯性(impurity)度量:gini或者entropy,不纯度用来衡量一个规则的好坏,
好的规则可以将数据划分为等值的两部分,坏规则则相反
5.决策树的最大深度,越深的决策树越有可能产生过度拟合的问题
6.决策树的最大桶数,每层使用的决策规则的个数,越多就可能精确,花费的时候也就越多,
最小的桶数应该不小于类别特征中最大的选择个数
*/
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 4,
numClasses = 2)
val model = DecisionTree.train(rdd, strategy)
DecisionTreeSuite.validateClassifier(model, arr, 1.0)
assert(model.numNodes === 3)
assert(model.depth === 1)
assert(model.topNode.split.get.feature === 1)
}
//多类分类的树桩和无序的分类特征
test("Multiclass classification stump with unordered categorical features," +
" with just enough bins") {
//足够的垃圾箱允许无序的特征
val maxBins = 2 * (math.pow(2, 3 - 1).toInt - 1) // just enough bins to allow unordered features
val arr = DecisionTreeSuite.generateCategoricalDataPointsForMulticlass()
val rdd = sc.parallelize(arr)
//树的最大深度(>=0)
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 4,
//numClasses 分类数
//maxBins连续特征离散化的最大数量,以及选择每个节点分裂特征的方式
numClasses = 3, maxBins = maxBins,
/**
指明特征的类别对应值(类别),注意特征索引是从0开始的,0和4表示第1和第5个特征
Map(0 -> 2,4->10)表示特征0有两个特征值(0和1),特征4有10个特征值{0,1,2,3,…,9}
**/
categoricalFeaturesInfo = Map(0 -> 3, 1 -> 3))
assert(strategy.isMulticlassClassification)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(metadata.isUnordered(featureIndex = 0))
assert(metadata.isUnordered(featureIndex = 1))
val model = DecisionTree.train(rdd, strategy)
DecisionTreeSuite.validateClassifier(model, arr, 1.0)
assert(model.numNodes === 3)
assert(model.depth === 1)
val rootNode = model.topNode
val split = rootNode.split.get
assert(split.feature === 0)
assert(split.categories.length === 1)
assert(split.categories.contains(1))
assert(split.featureType === Categorical)
val gain = rootNode.stats.get
assert(gain.leftImpurity === 0)
assert(gain.rightImpurity === 0)
}
//多类分类的连续性的特征
test("Multiclass classification stump with continuous features") {
val arr = DecisionTreeSuite.generateContinuousDataPointsForMulticlass()
val rdd = sc.parallelize(arr)
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 4,
//numClasses 分类数
numClasses = 3, maxBins = 100)
assert(strategy.isMulticlassClassification)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
val model = DecisionTree.train(rdd, strategy)
DecisionTreeSuite.validateClassifier(model, arr, 0.9)
val rootNode = model.topNode
val split = rootNode.split.get
assert(split.feature === 1)
assert(split.featureType === Continuous)
//在二进制分类中设置阈值,范围为[0,1],如果类标签1的估计概率>Threshold,则预测1,否则0
assert(split.threshold > 1980)
assert(split.threshold < 2020)
}
//多类分类连续+无序分类特征
test("Multiclass classification stump with continuous + unordered categorical features") {
val arr = DecisionTreeSuite.generateContinuousDataPointsForMulticlass()
val rdd = sc.parallelize(arr)//树的最大深度,为了防止过拟合,设定划分的终止条件
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 4,
numClasses = 3, maxBins = 100, categoricalFeaturesInfo = Map(0 -> 3))
assert(strategy.isMulticlassClassification)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(metadata.isUnordered(featureIndex = 0))
val model = DecisionTree.train(rdd, strategy)
DecisionTreeSuite.validateClassifier(model, arr, 0.9)
val rootNode = model.topNode
val split = rootNode.split.get
assert(split.feature === 1)
assert(split.featureType === Continuous)
//在二进制分类中设置阈值,范围为[0,1],如果类标签1的估计概率>Threshold,则预测1,否则0
assert(split.threshold > 1980)
assert(split.threshold < 2020)
}
//多类分类和10进制(有序)的分类特征
test("Multiclass classification stump with 10-ary (ordered) categorical features") {
val arr = DecisionTreeSuite.generateCategoricalDataPointsForMulticlassForOrderedFeatures()
val rdd = sc.parallelize(arr)//树的最大深度,为了防止过拟合,设定划分的终止条件
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 4,
numClasses = 3, maxBins = 100,
categoricalFeaturesInfo = Map(0 -> 10, 1 -> 10))
assert(strategy.isMulticlassClassification)
val metadata = DecisionTreeMetadata.buildMetadata(rdd, strategy)
assert(!metadata.isUnordered(featureIndex = 0))
assert(!metadata.isUnordered(featureIndex = 1))
val rootNode = DecisionTree.train(rdd, strategy).topNode
val split = rootNode.split.get
assert(split.feature === 0)
assert(split.categories.length === 1)
assert(split.categories.contains(1.0))
assert(split.featureType === Categorical)
}
//多类分类树与10(有序)的分类特征:只要有足够的垃圾箱
test("Multiclass classification tree with 10-ary (ordered) categorical features," +
" with just enough bins") {
val arr = DecisionTreeSuite.generateCategoricalDataPointsForMulticlassForOrderedFeatures()
val rdd = sc.parallelize(arr)
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 4,
//numClasses 分类数,maxBins连续特征离散化的最大数量,以及选择每个节点分裂特征的方式
numClasses = 3, maxBins = 10,
categoricalFeaturesInfo = Map(0 -> 10, 1 -> 10))
assert(strategy.isMulticlassClassification)
val model = DecisionTree.train(rdd, strategy)
DecisionTreeSuite.validateClassifier(model, arr, 0.6)
}
//分裂必须满足每个节点要求的最小实例
test("split must satisfy min instances per node requirements") {
val arr = Array(
//LabeledPoint标记点是局部向量,向量可以是密集型或者稀疏型,每个向量会关联了一个标签(label)
LabeledPoint(0.0, Vectors.sparse(2, Seq((0, 0.0)))),
LabeledPoint(1.0, Vectors.sparse(2, Seq((1, 1.0)))),
LabeledPoint(0.0, Vectors.sparse(2, Seq((0, 1.0)))))
val rdd = sc.parallelize(arr)
val strategy = new Strategy(algo = Classification, impurity = Gini,
//minInstancesPerNode 切分后每个子节点至少包含的样本实例数,否则停止切分,于终止迭代计算
maxDepth = 2, numClasses = 2, minInstancesPerNode = 2)
val model = DecisionTree.train(rdd, strategy)
assert(model.topNode.isLeaf)
assert(model.topNode.predict.predict == 0.0)
val predicts = rdd.map(p => model.predict(p.features)).collect()
predicts.foreach { predict =>
assert(predict == 0.0)
}
// test when no valid split can be found
//测试时,没有有效的分裂可以被发现
val rootNode = model.topNode
val gain = rootNode.stats.get
assert(gain == InformationGainStats.invalidInformationGainStats)
}
//不要选择不满足每个节点要求的最小实例的分割
test("do not choose split that does not satisfy min instance per node requirements") {
// if a split does not satisfy min instances per node requirements,
//如果一个分裂不满足每个节点的要求的最小实例
// this split is invalid, even though the information gain of split is large.
//这种分裂是无效的,即使分裂的信息增益是大的
val arr = Array(
LabeledPoint(0.0, Vectors.dense(0.0, 1.0)),
LabeledPoint(1.0, Vectors.dense(1.0, 1.0)),
LabeledPoint(0.0, Vectors.dense(0.0, 0.0)),
LabeledPoint(0.0, Vectors.dense(0.0, 0.0)))
val rdd = sc.parallelize(arr)
/**
1.训练数据集
2.目标类别个数,即结果有几种选择
3.Map中的键值分别对应Vector下标和该下标对应类别特征的取值情况,
空表示所有特征都是数值型(为了方便,示例中直接取空,实际当中并不能这么使用)
4.不纯性(impurity)度量:gini或者entropy,不纯度用来衡量一个规则的好坏,
好的规则可以将数据划分为等值的两部分,坏规则则相反
5.决策树的最大深度,越深的决策树越有可能产生过度拟合的问题
6.决策树的最大桶数,每层使用的决策规则的个数,越多就可能精确,花费的时候也就越多,
最小的桶数应该不小于类别特征中最大的选择个数
**/
val strategy = new Strategy(algo = Classification, impurity = Gini,
maxBins = 2, maxDepth = 2, categoricalFeaturesInfo = Map(0 -> 2, 1-> 2),
numClasses = 2, minInstancesPerNode = 2)//切分后每个子节点至少包含的样本实例数,否则停止切分,于终止迭代计算
val rootNode = DecisionTree.train(rdd, strategy).topNode
val split = rootNode.split.get
val gain = rootNode.stats.get
assert(split.feature == 1)
assert(gain != InformationGainStats.invalidInformationGainStats)
}
//分隔必须满足最小信息增益的要求
test("split must satisfy min info gain requirements") {
val arr = Array(
//LabeledPoint标记点是局部向量,向量可以是密集型或者稀疏型,每个向量会关联了一个标签(label)
LabeledPoint(0.0, Vectors.sparse(2, Seq((0, 0.0)))),
LabeledPoint(1.0, Vectors.sparse(2, Seq((1, 1.0)))),
LabeledPoint(0.0, Vectors.sparse(2, Seq((0, 1.0)))))
val input = sc.parallelize(arr)//树的最大深度,为了防止过拟合,设定划分的终止条件
val strategy = new Strategy(algo = Classification, impurity = Gini, maxDepth = 2,
numClasses = 2, minInfoGain = 1.0)
val model = DecisionTree.train(input, strategy)
assert(model.topNode.isLeaf)
assert(model.topNode.predict.predict == 0.0)
val predicts = input.map(p => model.predict(p.features)).collect()
predicts.foreach { predict =>
assert(predict == 0.0)
}
// test when no valid split can be found
//测试时,没有有效的分裂可以被发现
val rootNode = model.topNode
val gain = rootNode.stats.get
assert(gain == InformationGainStats.invalidInformationGainStats)
}
/////////////////////////////////////////////////////////////////////////////
// Tests of model save/load 模型保存/加载测试
/////////////////////////////////////////////////////////////////////////////
test("Node.subtreeIterator") {//子树迭代器
val model = DecisionTreeSuite.createModel(Classification)
val nodeIds = model.topNode.subtreeIterator.map(_.id).toArray.sorted
assert(nodeIds === DecisionTreeSuite.createdModelNodeIds)
}
test("model save/load") {//模型保存/加载
val tempDir = Utils.createTempDir()
val path = tempDir.toURI.toString
Array(Classification, Regression).foreach { algo =>
val model = DecisionTreeSuite.createModel(algo)
// Save model, load it back, and compare.
try {
model.save(sc, path)
val sameModel = DecisionTreeModel.load(sc, path)
DecisionTreeSuite.checkEqual(model, sameModel)
} finally {
Utils.deleteRecursively(tempDir)
}
}
}
}
object DecisionTreeSuite extends SparkFunSuite {
/**
* 验证分类器
*/
def validateClassifier(
model: DecisionTreeModel,
input: Seq[LabeledPoint],
requiredAccuracy: Double) {
val predictions = input.map(x => model.predict(x.features))
val numOffPredictions = predictions.zip(input).count { case (prediction, expected) =>
prediction != expected.label
}
val accuracy = (input.length - numOffPredictions).toDouble / input.length
assert(accuracy >= requiredAccuracy,
s"validateClassifier calculated accuracy $accuracy but required $requiredAccuracy.")
}
/**
* 验证回归
*/
def validateRegressor(
model: DecisionTreeModel,
input: Seq[LabeledPoint],
requiredMSE: Double) {
val predictions = input.map(x => model.predict(x.features))
val squaredError = predictions.zip(input).map { case (prediction, expected) =>
val err = prediction - expected.label
err * err
}.sum
val mse = squaredError / input.length
assert(mse <= requiredMSE, s"validateRegressor calculated MSE $mse but required $requiredMSE.")
}
/**
*生成有序标签数据点
*/
def generateOrderedLabeledPointsWithLabel0(): Array[LabeledPoint] = {
val arr = new Array[LabeledPoint](1000)
for (i <- 0 until 1000) {
val lp = new LabeledPoint(0.0, Vectors.dense(i.toDouble, 1000.0 - i))
arr(i) = lp
}
arr
}
def generateOrderedLabeledPointsWithLabel1(): Array[LabeledPoint] = {
val arr = new Array[LabeledPoint](1000)
for (i <- 0 until 1000) {
val lp = new LabeledPoint(1.0, Vectors.dense(i.toDouble, 999.0 - i))
arr(i) = lp
}
arr
}
def generateOrderedLabeledPoints(): Array[LabeledPoint] = {
val arr = new Array[LabeledPoint](1000)
for (i <- 0 until 1000) {
val label = if (i < 100) {
0.0
} else if (i < 500) {
1.0
} else if (i < 900) {
0.0
} else {
1.0
}
arr(i) = new LabeledPoint(label, Vectors.dense(i.toDouble, 1000.0 - i))
}
arr
}
/**
* 生成二分类数据
+-----+---------+
|label| features|
+-----+---------+
| 1.0|[0.0,1.0]|
| 1.0|[0.0,1.0]|
| 0.0|[1.0,0.0]|
| 0.0|[1.0,0.0]|
+-----+---------+*/
def generateCategoricalDataPoints(): Array[LabeledPoint] = {
val arr = new Array[LabeledPoint](1000)
for (i <- 0 until 1000) {
if (i < 600) {
//LabeledPoint标记点是局部向量,向量可以是密集型或者稀疏型,每个向量会关联了一个标签(label)
arr(i) = new LabeledPoint(1.0, Vectors.dense(0.0, 1.0))
} else {
arr(i) = new LabeledPoint(0.0, Vectors.dense(1.0, 0.0))
}
}
//println(">>>>"+arr.toList)
arr
}
def generateCategoricalDataPointsAsJavaList(): java.util.List[LabeledPoint] = {
generateCategoricalDataPoints().toList.asJava
}
/**
* 生成多类分类数据
* [(1.0,[0.0,1.0]), (2.0,[0.0,1.0]), (1.0,[0.0,1.0]), (2.0,[0.0,1.0]), (1.0,[0.0,1.0])]
*/
def generateCategoricalDataPointsForMulticlass(): Array[LabeledPoint] = {
val arr = new Array[LabeledPoint](3000)
for (i <- 0 until 3000) {
if (i < 1000) {
//LabeledPoint标记点是局部向量,向量可以是密集型或者稀疏型,每个向量会关联了一个标签(label)
arr(i) = new LabeledPoint(2.0, Vectors.dense(2.0, 2.0))
} else if (i < 2000) {
arr(i) = new LabeledPoint(1.0, Vectors.dense(1.0, 2.0))
} else {
arr(i) = new LabeledPoint(2.0, Vectors.dense(2.0, 2.0))
}
}
arr
}
/**
* 生成多类连续数据
* [(1.0,[0.0,1.0]), (2.0,[0.0,1.0]), (1.0,[0.0,1.0]), (2.0,[0.0,1.0]), (1.0,[0.0,1.0])]
*/
def generateContinuousDataPointsForMulticlass(): Array[LabeledPoint] = {
val arr = new Array[LabeledPoint](3000)
for (i <- 0 until 3000) {
if (i < 2000) {
arr(i) = new LabeledPoint(2.0, Vectors.dense(2.0, i))
} else {
arr(i) = new LabeledPoint(1.0, Vectors.dense(2.0, i))
}
}
arr
}
def generateCategoricalDataPointsForMulticlassForOrderedFeatures():
Array[LabeledPoint] = {
val arr = new Array[LabeledPoint](3000)
for (i <- 0 until 3000) {
if (i < 1000) {
//LabeledPoint标记点是局部向量,向量可以是密集型或者稀疏型,每个向量会关联了一个标签(label)
arr(i) = new LabeledPoint(2.0, Vectors.dense(2.0, 2.0))
} else if (i < 2000) {
arr(i) = new LabeledPoint(1.0, Vectors.dense(1.0, 2.0))
} else {
arr(i) = new LabeledPoint(1.0, Vectors.dense(2.0, 2.0))
}
}
arr
}
/**
* Create a leaf node with the given node ID
* 用给定的节点标识创建一个叶节点
* */
private def createLeafNode(id: Int): Node = {
Node(nodeIndex = id, new Predict(0.0, 1.0), impurity = 0.5, isLeaf = true)
}
/**
* Create an internal node with the given node ID and feature type.
* 创建一个给定节点标识和特征类型的内部节点
* Note: This does NOT set the child nodes.这不设置子节点
*/
private def createInternalNode(id: Int, featureType: FeatureType): Node = {
val node = Node(nodeIndex = id, new Predict(0.0, 1.0), impurity = 0.5, isLeaf = false)
featureType match {
case Continuous =>
node.split = Some(new Split(feature = 0, threshold = 0.5, Continuous,
categories = List.empty[Double]))
case Categorical =>
node.split = Some(new Split(feature = 1, threshold = 0.0, Categorical,
categories = List(0.0, 1.0)))
}
// TODO: The information gain stats should be consistent with info in children: SPARK-7131
//信息增益统计应与子类的信息相一致
node.stats = Some(new InformationGainStats(gain = 0.1, impurity = 0.2,
leftImpurity = 0.3, rightImpurity = 0.4, new Predict(1.0, 0.4), new Predict(0.0, 0.6)))
node
}
/**
* Create a tree model. 创建树模型
* This is deterministic and contains a variety of node and feature types.
* 这是确定性的,包含了各种节点和特征类型
* TODO: Update to be a correct tree (with matching probabilities, impurities, etc.): SPARK-7131
*/
private[spark] def createModel(algo: Algo): DecisionTreeModel = {
val topNode = createInternalNode(id = 1, Continuous)
val (node2, node3) = (createLeafNode(id = 2), createInternalNode(id = 3, Categorical))
val (node6, node7) = (createLeafNode(id = 6), createLeafNode(id = 7))
topNode.leftNode = Some(node2)
topNode.rightNode = Some(node3)
node3.leftNode = Some(node6)
node3.rightNode = Some(node7)
new DecisionTreeModel(topNode, algo)
}
/**
* Sorted Node IDs matching the model returned by [[createModel()]]
* 排序的节点标识匹配的模型返回
* */
private val createdModelNodeIds = Array(1, 2, 3, 6, 7)
/**
* Check if the two trees are exactly the same.
* 检查两棵树是否完全相同
* Note: I hesitate to override Node.equals since it could cause problems if users
* make mistakes such as creating loops of Nodes.
* If the trees are not equal, this prints the two trees and throws an exception.
* 如果树不相等,则打印两个树并抛出一个异常
*/
private[mllib] def checkEqual(a: DecisionTreeModel, b: DecisionTreeModel): Unit = {
try {
assert(a.algo === b.algo)
checkEqual(a.topNode, b.topNode)
} catch {
case ex: Exception =>
throw new AssertionError("checkEqual failed since the two trees were not identical.\n" +
"TREE A:\n" + a.toDebugString + "\n" +
"TREE B:\n" + b.toDebugString + "\n", ex)
}
}
/**
* Return true iff the two nodes and their descendents are exactly the same.
* 返回true,当两节点和他们的后代是完全相同的
* Note: I hesitate to override Node.equals since it could cause problems if users
* make mistakes such as creating loops of Nodes.
*/
private def checkEqual(a: Node, b: Node): Unit = {
assert(a.id === b.id)
assert(a.predict === b.predict)
assert(a.impurity === b.impurity)
assert(a.isLeaf === b.isLeaf)
assert(a.split === b.split)
(a.stats, b.stats) match {
// TODO: Check other fields besides the infomation gain.
//检查除了信息增益等领域
case (Some(aStats), Some(bStats)) => assert(aStats.gain === bStats.gain)
case (None, None) =>
case _ => throw new AssertionError(
s"Only one instance has stats defined. (a.stats: ${a.stats}, b.stats: ${b.stats})")
}
(a.leftNode, b.leftNode) match {
case (Some(aNode), Some(bNode)) => checkEqual(aNode, bNode)
case (None, None) =>
case _ => throw new AssertionError("Only one instance has leftNode defined. " +
s"(a.leftNode: ${a.leftNode}, b.leftNode: ${b.leftNode})")
}
(a.rightNode, b.rightNode) match {
case (Some(aNode: Node), Some(bNode: Node)) => checkEqual(aNode, bNode)
case (None, None) =>
case _ => throw new AssertionError("Only one instance has rightNode defined. " +
s"(a.rightNode: ${a.rightNode}, b.rightNode: ${b.rightNode})")
}
}
}
|
tophua/spark1.52
|
mllib/src/test/scala/org/apache/spark/mllib/tree/DecisionTreeSuite.scala
|
Scala
|
apache-2.0
| 56,139 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.models
import org.bdgenomics.adam.util.ADAMFunSuite
import org.bdgenomics.formats.avro.Feature
class CoverageSuite extends ADAMFunSuite {
sparkTest("Convert to coverage from valid Feature") {
// create a valid feature
val featureToConvert =
Feature.newBuilder()
.setContigName("chr1")
.setStart(1)
.setEnd(2)
.setScore(100)
.build()
val coverageAfterConversion = Coverage(featureToConvert)
// all fields should match between the two
assert(coverageAfterConversion.start == featureToConvert.start)
assert(coverageAfterConversion.end == featureToConvert.end)
assert(coverageAfterConversion.contigName == featureToConvert.contigName)
assert(coverageAfterConversion.count == featureToConvert.score)
}
sparkTest("Convert to coverage from Feature with null/empty contigName fails with correct error") {
// feature with empty contigname is not valid when converting to Coverage
val featureWithEmptyContigName =
Feature.newBuilder()
.setContigName("")
.setStart(1)
.setEnd(2)
.setScore(100)
.build()
val caughtWithEmptyContigName =
intercept[IllegalArgumentException](Coverage(featureWithEmptyContigName))
assert(caughtWithEmptyContigName.getMessage == "requirement failed: Features must have Contig name to convert to Coverage")
// feature without contigname is not valid when converting to Coverage
val featureWithNullContigName =
Feature.newBuilder()
.setStart(1)
.setEnd(2)
.setScore(100)
.build()
val caughtWithNullContigName =
intercept[IllegalArgumentException](Coverage(featureWithNullContigName))
assert(caughtWithNullContigName.getMessage == "requirement failed: Features must have Contig name to convert to Coverage")
}
sparkTest("Convert to coverage from Feature with no start/end position fails with correct error") {
// feature without start position is invalid when converting to Coverage
val featureWithoutStartPosition =
Feature.newBuilder()
.setContigName("chr1")
.setEnd(2)
.setScore(100)
.build()
val caughtWithoutStartPosition =
intercept[IllegalArgumentException](Coverage(featureWithoutStartPosition))
assert(caughtWithoutStartPosition.getMessage == "requirement failed: Features must have valid position data to convert to Coverage")
// feature without end position is invalid when converting to Coverage
val featureWithoutEndPosition =
Feature.newBuilder()
.setContigName("chr1")
.setStart(1)
.setScore(100)
.build()
val caughtWithoutEndPosition =
intercept[IllegalArgumentException](Coverage(featureWithoutEndPosition))
assert(caughtWithoutEndPosition.getMessage == "requirement failed: Features must have valid position data to convert to Coverage")
}
sparkTest("Convert to coverage from Feature with no score fails with correct error") {
// feature without score is invalid when converting to Coverage
val featureWithoutScore =
Feature.newBuilder()
.setContigName("chr1")
.setStart(1)
.setEnd(2)
.build()
val caughtWithoutScore =
intercept[IllegalArgumentException](Coverage(featureWithoutScore))
assert(caughtWithoutScore.getMessage == "requirement failed: Features must have valid score to convert to Coverage")
}
}
|
laserson/adam
|
adam-core/src/test/scala/org/bdgenomics/adam/models/CoverageSuite.scala
|
Scala
|
apache-2.0
| 4,280 |
/* Copyright 2015 White Label Personal Clouds Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.welcomer.signpost.spray
import scala.concurrent.Await
import scala.concurrent.Future
import scala.concurrent.duration._
import akka.actor.ActorSystem
import oauth.signpost.AbstractOAuthProvider
import spray.client.pipelining._
import spray.http._
class SprayOAuthProvider(
requestTokenEndpointUrl: String,
accessTokenEndpointUrl: String,
authorizationWebsiteUrl: String)(implicit system: ActorSystem, timeout: Duration)
extends AbstractOAuthProvider(requestTokenEndpointUrl, accessTokenEndpointUrl, authorizationWebsiteUrl) {
import system.dispatcher // execution context for futures
private val pipeline: HttpRequest => Future[HttpResponse] = sendReceive
def createRequest(endpointUrl: String): oauth.signpost.http.HttpRequest = {
new HttpRequestAdapter(Post(endpointUrl))
}
def sendRequest(request: oauth.signpost.http.HttpRequest): oauth.signpost.http.HttpResponse = {
val sprayRequest = request.unwrap().asInstanceOf[spray.http.HttpRequest]
val response = pipeline(sprayRequest)
new HttpResponseAdapter(Await.result(response, timeout));
}
}
|
welcomer/framework
|
welcomer-signpost/src/main/scala/me/welcomer/signpost/spray/SprayOAuthProvider.scala
|
Scala
|
apache-2.0
| 1,727 |
/*
* Copyright 2019 Scanamo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scanamo.ops
import akka.NotUsed
import akka.stream.Materializer
import akka.stream.alpakka.dynamodb.scaladsl.DynamoDb
import akka.stream.alpakka.dynamodb.{ DynamoDbOp, DynamoDbPaginatedOp }
import akka.stream.scaladsl.Source
import cats.syntax.either._
import cats.~>
import software.amazon.awssdk.services.dynamodb.DynamoDbAsyncClient
import software.amazon.awssdk.services.dynamodb.model.{ Delete => _, Get => _, Put => _, Update => _, _ }
import java.util.concurrent.CompletionException
private[scanamo] class AlpakkaInterpreter(implicit client: DynamoDbAsyncClient, mat: Materializer)
extends (ScanamoOpsA ~> AlpakkaInterpreter.Alpakka) {
private[this] val unwrap: PartialFunction[Throwable, Throwable] = { case error: CompletionException =>
error.getCause
}
final private def run[In <: DynamoDbRequest, Out <: DynamoDbResponse](
op: In
)(implicit operation: DynamoDbOp[In, Out]): AlpakkaInterpreter.Alpakka[Out] =
Source.fromFuture(DynamoDb.single(op)).mapError(unwrap)
final private def runPaginated[In <: DynamoDbRequest, Out <: DynamoDbResponse](
op: In
)(implicit operation: DynamoDbPaginatedOp[In, Out, _]): AlpakkaInterpreter.Alpakka[Out] =
DynamoDb.source(op).mapError(unwrap)
def apply[A](ops: ScanamoOpsA[A]) =
ops match {
case Put(req) => run[PutItemRequest, PutItemResponse](JavaRequests.put(req))
case Get(req) => run[GetItemRequest, GetItemResponse](req)
case Delete(req) => run[DeleteItemRequest, DeleteItemResponse](JavaRequests.delete(req))
case Scan(req) => runPaginated[ScanRequest, ScanResponse](JavaRequests.scan(req))
case Query(req) => runPaginated[QueryRequest, QueryResponse](JavaRequests.query(req))
case Update(req) => run[UpdateItemRequest, UpdateItemResponse](JavaRequests.update(req))
case BatchWrite(req) => run[BatchWriteItemRequest, BatchWriteItemResponse](req)
case BatchGet(req) => run[BatchGetItemRequest, BatchGetItemResponse](req)
case ConditionalDelete(req) =>
run(JavaRequests.delete(req))
.map(Either.right[ConditionalCheckFailedException, DeleteItemResponse])
.recover { case e: ConditionalCheckFailedException =>
Either.left(e)
}
case ConditionalPut(req) =>
run(JavaRequests.put(req))
.map(Either.right[ConditionalCheckFailedException, PutItemResponse])
.recover { case e: ConditionalCheckFailedException =>
Either.left(e)
}
case ConditionalUpdate(req) =>
run(JavaRequests.update(req))
.map(Either.right[ConditionalCheckFailedException, UpdateItemResponse])
.recover { case e: ConditionalCheckFailedException =>
Either.left(e)
}
case TransactWriteAll(req) =>
run[TransactWriteItemsRequest, TransactWriteItemsResponse](JavaRequests.transactItems(req))
}
}
object AlpakkaInterpreter {
type Alpakka[A] = Source[A, NotUsed]
}
|
scanamo/scanamo
|
alpakka/src/main/scala/org/scanamo/ops/AlpakkaInterpreter.scala
|
Scala
|
apache-2.0
| 3,594 |
package com.danielasfregola.twitter4s.http.clients.rest.lists.parameters
import com.danielasfregola.twitter4s.http.marshalling.Parameters
private[twitter4s] final case class SubscriptionsParameters(user_id: Option[Long],
screen_name: Option[String],
count: Int,
cursor: Long)
extends Parameters
|
DanielaSfregola/twitter4s
|
src/main/scala/com/danielasfregola/twitter4s/http/clients/rest/lists/parameters/SubscriptionsParameters.scala
|
Scala
|
apache-2.0
| 481 |
package de.kaufhof.pillar.config
import com.typesafe.config.{Config, ConfigFactory}
import org.scalatest.{BeforeAndAfter, FunSpec, Matchers}
/**
* Tests for Connection Configuration.
*/
class ConnectionConfigurationTest extends FunSpec with BeforeAndAfter with Matchers {
val TRUST_STORE = "javax.net.ssl.trustStore"
val TRUST_STORE_PASSWORD = "javax.net.ssl.trustStorePassword"
val TRUST_STORE_TYPE = "javax.net.ssl.trustStoreType"
val KEY_STORE = "javax.net.ssl.keyStore"
val KEY_STORE_PASSWORD = "javax.net.ssl.keyStorePassword"
val KEY_STORE_TYPE = "javax.net.ssl.keyStoreType"
before {
val propertiesToReset = List(
TRUST_STORE, TRUST_STORE_PASSWORD, TRUST_STORE_TYPE,
KEY_STORE, KEY_STORE_PASSWORD, KEY_STORE_TYPE)
propertiesToReset.foreach(System.getProperties.remove(_))
}
describe("#initialize") {
it("should allow authentication to be set") {
val config: Config = ConfigFactory.load("authConfig.conf")
val configuration: ConnectionConfiguration = new ConnectionConfiguration("faker", "development",
config)
configuration.auth === Some(PlaintextAuth("cassandra", "secret"))
}
it("should show defaults for useSsl") {
val config: Config = ConfigFactory.load()
val configuration: ConnectionConfiguration = new ConnectionConfiguration("faker", "development", config)
configuration.useSsl === false
configuration.sslConfig === None
}
it("should set ssl keystore system properties when ssl is configured correctly") {
val config: Config = ConfigFactory.load("sslKeystoreConfig.conf")
val configuration: ConnectionConfiguration = new ConnectionConfiguration("faker", "ssl_with_just_keystore",
config)
configuration.useSsl === true
configuration.sslConfig match {
case Some(sslConfig) => sslConfig.setAsSystemProperties()
System.getProperty(KEY_STORE) === "keystore.jks"
System.getProperty(KEY_STORE_PASSWORD) === "secret"
System.getProperty(KEY_STORE_TYPE) === "JCEKS"
System.getProperty(TRUST_STORE) === null
System.getProperty(TRUST_STORE_PASSWORD) === null
System.getProperty(TRUST_STORE_TYPE) === null
case None => fail("ssl should be configured")
}
}
it("should set ssl truststore system properties when ssl is configured correctly") {
val config: Config = ConfigFactory.load("sslKeystoreConfig.conf")
val configuration: ConnectionConfiguration = new ConnectionConfiguration("faker", "ssl_with_just_truststore_and_no_type",
config)
configuration.useSsl === true
configuration.sslConfig match {
case Some(sslConfig) => sslConfig.setAsSystemProperties()
System.getProperty(KEY_STORE) === null
System.getProperty(KEY_STORE_PASSWORD) === null
System.getProperty(KEY_STORE_TYPE) === null
System.getProperty(TRUST_STORE) === "truststore.jks"
System.getProperty(TRUST_STORE_PASSWORD) === "secret"
System.getProperty(TRUST_STORE_TYPE) === "JKS"
case None => fail("ssl should be configured")
}
}
it("should set ssl keystore and truststore system properties when ssl is configured correctly") {
val config: Config = ConfigFactory.load("sslKeystoreConfig.conf")
val configuration: ConnectionConfiguration = new ConnectionConfiguration("faker",
"ssl_with_keystore_and_truststore_and_no_keystore_type",
config)
configuration.useSsl === true
configuration.sslConfig match {
case Some(sslConfig) => sslConfig.setAsSystemProperties()
System.getProperty(KEY_STORE) === "keystore.jks"
System.getProperty(KEY_STORE_PASSWORD) === "secret"
System.getProperty(KEY_STORE_TYPE) === "JKS"
System.getProperty(TRUST_STORE) === "truststore.jks"
System.getProperty(TRUST_STORE_PASSWORD) === "secret"
System.getProperty(TRUST_STORE_TYPE) === "JCEKS"
case None => fail("ssl should be configured")
}
}
it("should allow ssl usage with system properties directly, meaning keystore and truststore will be set from " +
"outside") {
val config: Config = ConfigFactory.load("sslKeystoreConfig.conf")
val configuration: ConnectionConfiguration = new ConnectionConfiguration("faker", "no_ssl_but_wanted_is_also_valid",
config)
configuration.useSsl === true
configuration.sslConfig === None
}
it("should allow a single cassandra seed address to be read") {
val config: Config = ConfigFactory.load("authConfig.conf")
val configuration: ConnectionConfiguration = new ConnectionConfiguration("faker", "test", config)
configuration.seedAddress should equal(List("127.0.0.1"))
}
it("should allow multiple cassandra seed addresses to be read") {
val config: Config = ConfigFactory.load("authConfig.conf")
val configuration: ConnectionConfiguration = new ConnectionConfiguration("faker", "multiple_seed_addresses_test",
config)
configuration.seedAddress should equal(List("127.0.0.1", "127.0.0.2", "127.0.0.3"))
}
}
}
|
Galeria-Kaufhof/pillar
|
src/test/scala/de/kaufhof/pillar/config/ConnectionConfigurationTest.scala
|
Scala
|
mit
| 5,176 |
package com.atomist.rug.test.gherkin
import com.atomist.graph.GraphNode
import com.atomist.project.archive.Rugs
import com.atomist.project.common.InvalidParametersException
import com.atomist.rug.runtime.js.interop.{NashornUtils, jsSafeCommittingProxy}
import com.typesafe.scalalogging.LazyLogging
import gherkin.ast.{ScenarioDefinition, Step}
import jdk.nashorn.api.scripting.ScriptObjectMirror
import scala.collection.JavaConverters._
/**
* Superclass for all features, regardless of what they act on
*/
abstract class AbstractExecutableFeature[W <: ScenarioWorld](
val definition: FeatureDefinition,
val definitions: Definitions,
val rugs: Option[Rugs],
listeners: Seq[GherkinExecutionListener],
config: GherkinRunnerConfig)
extends LazyLogging {
def execute(): FeatureResult = {
FeatureResult(definition.feature,
definition.feature.getChildren.asScala
.map(executeScenario)
)
}
/**
* Create a world for overall context
* This creates a default world.
*/
protected def createWorldForScenario(): ScenarioWorld
private def executeScenario(scenario: ScenarioDefinition): ScenarioResult = {
listeners.foreach(_.scenarioStarting(scenario))
val world = createWorldForScenario()
val assertionResults: Seq[AssertionResult] =
scenario.getSteps.asScala.flatMap(step => {
listeners.foreach(_.stepStarting(step))
val result = step.getKeyword match {
case "Given " if !world.aborted =>
runGiven(world, step)
case "When " if !world.aborted =>
runWhen(world, step)
case "Then " if step.getText == "the scenario aborted" =>
Some(AssertionResult(step.getText, Result(world.aborted, "Scenario aborted")))
case "Then " if !world.aborted =>
Some(runThen(world, step))
case "Then " if world.aborted =>
Some(AssertionResult(step.getText,
Failed(s"Scenario aborted: Could not evaluate: [${world.abortMessage}]")))
case _ =>
None
}
listeners.foreach(_.stepCompleted(step))
result
})
val sr = ScenarioResult(scenario, assertionResults, "")
listeners.foreach(_.scenarioCompleted(scenario, sr))
sr
}
private def runThen(world: ScenarioWorld, step: Step): AssertionResult = {
val somo = definitions.thenFor(step.getText)
logger.debug(s"Then for [${step.getText}]=$somo")
somo match {
case Some(stepMatch) =>
val r = callFunction(stepMatch, world)
r match {
case Right(b: java.lang.Boolean) =>
AssertionResult(step.getText, Result(b, stepMatch.jsVar.toString))
case Right(rsom: ScriptObjectMirror) =>
val result = NashornUtils.stringProperty(rsom, "result", "false") == "true"
AssertionResult(step.getText, Result(result, NashornUtils.stringProperty(rsom, "message", "Detailed information unavailable")))
case Right(res) if ScriptObjectMirror.isUndefined(res) =>
// Returning void (which will be undefined) is truthy
// This enables use of frameworks such as as chai
AssertionResult(step.getText, Result(f = true, stepMatch.jsVar.toString))
case Right(wtf) =>
throw new IllegalArgumentException(s"Unexpected result from Then '${step.getText}': $wtf")
case Left(t) =>
AssertionResult(step.getText, Failed(t.getMessage))
}
case None =>
AssertionResult(step.getText, NotYetImplemented("Then " + step.getText))
}
}
private def runWhen(world: ScenarioWorld, step: Step): Option[AssertionResult] = {
val somo = definitions.whenFor(step.getText)
logger.debug(s"When for [${step.getText}]=$somo")
somo match {
case Some(som) =>
callFunction(som, world) match {
case Left(ipe: InvalidParametersException) =>
listeners.foreach(_.stepFailed(step, ipe))
world.logInvalidParameters(ipe)
None
case Left(e: Exception) if e.getCause != null && e.getCause.isInstanceOf[InvalidParametersException] =>
// Can sometimes get this wrapped
listeners.foreach(_.stepFailed(step, e.getCause.asInstanceOf[InvalidParametersException]))
world.logInvalidParameters(e.getCause.asInstanceOf[InvalidParametersException])
None
case Left(t) =>
listeners.foreach(_.stepFailed(step, t))
logger.error(t.getMessage, t)
world.abort(t.getMessage)
None
case _ =>
None
}
case None =>
Some(AssertionResult(step.getText, NotYetImplemented("When " + step.getText)))
}
}
private def runGiven(world: ScenarioWorld, step: Step): Option[AssertionResult] = {
val somo = definitions.givenFor(step.getText)
logger.debug(s"Given for [${step.getText}]=$somo")
somo match {
case Some(som) =>
callFunction(som, world) match {
case Left(t) =>
world.abort(t.getMessage)
listeners.foreach(_.stepFailed(step, t))
logger.error(t.getMessage, t)
None
case _ =>
None
}
case None =>
Some(AssertionResult(step.getText, NotYetImplemented("Given " + step.getText)))
}
}
// Call a ScriptObjectMirror function with appropriate error handling
private def callFunction(sm: StepMatch, world: ScenarioWorld): Either[Throwable, Object] = {
import scala.util.control.Exception._
val target = world.target match {
case gn: GraphNode => new jsSafeCommittingProxy(gn, world.typeRegistry)
case t => t
}
// Only include the target if it's different from the world.
val fixedParams: Seq[AnyRef] = target match {
case `world` => Seq(world)
case _ => Seq(target, world)
}
val args = fixedParams ++ sm.args
allCatch.either(sm.jsVar.call("apply", args:_*))
}
}
|
atomist/rug
|
src/main/scala/com/atomist/rug/test/gherkin/AbstractExecutableFeature.scala
|
Scala
|
gpl-3.0
| 6,291 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package connectors
import com.google.inject.ImplementedBy
import config.{MicroserviceAppConfig, WSHttpT}
import connectors.ExchangeObjects._
import model.Exceptions.ConnectorException
import model.OnlineTestCommands._
import play.api.Logging
import play.api.http.Status._
import play.api.libs.json.Json
import uk.gov.hmrc.http.HttpReads.Implicits._
import uk.gov.hmrc.http.{HeaderCarrier, HttpResponse}
import javax.inject.{Inject, Singleton}
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
@ImplementedBy(classOf[OnlineTestsGatewayClientImpl])
trait OnlineTestsGatewayClient extends Logging {
val http: WSHttpT
val url: String
val root = "fset-online-tests-gateway"
// Blank out header carriers for calls to LPG. Passing on someone's true-client-ip header will cause them to be reassessed
// for whitelisting in the LPG as well (even though they've gone from front -> back -> LPG), which leads to undesirable behaviour.
implicit def blankedHeaderCarrier = HeaderCarrier()
def psiRegisterApplicant(request: RegisterCandidateRequest): Future[AssessmentOrderAcknowledgement] = {
logger.debug(s"$root psi registerApplicant POST request, body=${Json.toJson(request).toString}")
http.POST[RegisterCandidateRequest, HttpResponse](url = s"$url/$root/faststream/psi-register", request).map { response =>
if (response.status == OK) {
logger.debug(s"$root psiRegisterApplicant response - ${response.json.toString}")
response.json.as[AssessmentOrderAcknowledgement]
} else {
throw new ConnectorException(s"There was a general problem connecting to Online Tests Gateway. HTTP response was $response")
}
}
}
def psiCancelTest(request: CancelCandidateTestRequest): Future[AssessmentCancelAcknowledgementResponse] = {
logger.debug(s"cancelTest - $request")
http.GET[HttpResponse](url = s"$url/$root/faststream/psi-cancel-assessment/${request.orderId}").map { response =>
if (response.status == OK) {
logger.debug(s"psiCancelAssessment response - ${response.json.toString}")
response.json.as[AssessmentCancelAcknowledgementResponse]
} else {
throw new ConnectorException(s"There was a general problem connecting to Online Tests Gateway. HTTP response was $response")
}
}
}
def downloadPsiTestResults(reportId: Int): Future[PsiTestResult] = {
logger.debug(s"$root downloadPsiTestResults GET request - $url/$root/faststream/psi-results/$reportId")
http.GET[HttpResponse](s"$url/$root/faststream/psi-results/$reportId").map { response =>
if (response.status == OK) {
logger.debug(s"$root downloadPsiTestResults response - ${response.json.toString}")
response.json.as[PsiTestResult]
} else {
throw new ConnectorException(s"There was a general problem connecting to Online Tests Gateway. HTTP response was $response")
}
}
}
}
@Singleton
class OnlineTestsGatewayClientImpl @Inject() (val http: WSHttpT, appConfig: MicroserviceAppConfig) extends OnlineTestsGatewayClient {
val url: String = appConfig.onlineTestsGatewayConfig.url
}
|
hmrc/fset-faststream
|
app/connectors/OnlineTestsGatewayClient.scala
|
Scala
|
apache-2.0
| 3,754 |
package org.niohiki.wishartmontecarlo.largesteigenvalue
import org.niohiki.wishartmontecarlo.integrator.Bins
import org.niohiki.wishartmontecarlo.integrator.Bin
import org.niohiki.wishartmontecarlo.integrator.BinnerConfiguration
import org.niohiki.wishartmontecarlo.integrator.Integrand
import org.niohiki.wishartmontecarlo.integrator.Domains
import org.niohiki.wishartmontecarlo.systems.Wishart
import java.io.PrintWriter
import java.io.File
object Main {
def generateFile(beta: Double, t: Double) {
val zeta = 1
val N = 10
println("Starting beta=" + beta + " t=" + t)
val tim = System.nanoTime
implicit val conf = BinnerConfiguration(0, 15, 10, 100000000, true)
val wishart = new Wishart(zeta = 1, sl = 1)
val maxeig = wishart.largestEigenvalue(beta = beta, t = t)
val bins = maxeig(N)
val output = new PrintWriter(new File("le_t" + t + "beta" + beta + ".csv"))
bins.bins.foreach {
case (value, point) => output.println(point + "," + value)
}
output.close
println("Done in " + ((System.nanoTime - tim) * 1e-9))
}
def main(args: Array[String]) {
for (beta <- List(0.5, 1.0, 1.1, 1.5); t <- List(0.5, 1.0, 1.5)) {
generateFile(beta, t)
}
}
}
|
niohiki/wishart-monte-carlo
|
src/org/niohiki/wishartmontecarlo/largesteigenvalue/Main.scala
|
Scala
|
gpl-3.0
| 1,223 |
package commands
/**
* Created by alex on 07/05/15.
*/
trait FilenameSanitiser {
def sanitise(filename: String): String = {
filename.toLowerCase.filter(ch => Character.isLetterOrDigit(ch) || Character.isWhitespace(ch)).replaceAll("""\\s+""", "_")
}
}
|
unclealex72/ripper
|
src/main/scala/commands/FilenameSanitiser.scala
|
Scala
|
mit
| 262 |
/*
Copyright (c) 2012, The Children's Hospital of Philadelphia All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package edu.chop.cbmi.dataExpress.dsl.statements
import edu.chop.cbmi.dataExpress.dsl.stores.{FileStore, SqlDb, Store}
import edu.chop.cbmi.dataExpress.dsl.exceptions.UnsupportedStoreType
import edu.chop.cbmi.dataExpress.dataModels.{DataRow, DataTable}
import edu.chop.cbmi.dataExpress.dsl.From
/**
* Created by IntelliJ IDEA.
* User: masinoa
* Date: 1/13/12
* Time: 12:28 PM
* To change this template use File | Settings | File Templates.
*/
abstract class GetFrom{
def from(source : Store) : DataTable[_]
}
class GetFromQuery(query : String) extends GetFrom{
private var _bind_vars : Seq[Option[Any]] = Seq.empty[Option[Any]]
def from(source:Store) = source match{
case s:SqlDb => DataTable(s.backend, query, _bind_vars)
case _ => throw UnsupportedStoreType(source, "GetFromQuery.from")
}
def using_bind_vars(bind_var : Any*) = {
_bind_vars = bind_var.toSeq map {Some(_)}
this
}
}
class GetFromTable(table_name : String) extends GetFrom{
def from(source : Store) = From(source).get_table(table_name).data_table
}
class GetSelect {
def query(q : String) : GetFromQuery = new GetFromQuery(q)
def table(table_name : String) : GetFromTable = new GetFromTable(table_name)
def from(source : Store) = From(source).get_values.data_table
}
|
chop-dbhi/dataexpress
|
src/main/scala/edu/chop/cbmi/dataExpress/dsl/statements/Get.scala
|
Scala
|
bsd-2-clause
| 2,615 |
package ark.optaplanner.deception
import scala.collection.JavaConverters
import scala.collection.JavaConverters._
import scala.collection.JavaConversions
import scala.collection.JavaConversions._
import org.scalatest.Finders
import org.scalatest.FunSpec
import org.optaplanner.core.api.solver.SolverFactory
import ark.Trap
import ark.Trap._
import ark.TrapType._
import ark.TrapEffect._
class ComboSpec extends FunSpec {
describe("A Solved Combo") {
val res = "comboSolverConfig.xml"
val traps = Trap.values
.filterNot { _.explodes }
//.filterNot { _.rolls }
//.filterNot { _.isProjectile }
//.filterNot { _.kind == Ceiling }
//.filterNot { _.effects.contains(MoveToWall) }
.filter { _.movesVictim }
val hitNum = 7
lazy val solved = {
val solver = SolverFactory.createFromXmlResource[Combo](res, getClass.getClassLoader).buildSolver()
val unsolvedCombo = new ComboGenerator().createCombo(traps, hitNum)
solver.solve(unsolvedCombo)
}
it("exists") {
println
println(s"${solved}")
println(s"Score: ${solved.score}")
println(s"Scores: ${Util.toArkCombo(solved).scores}")
}
it(s"has max ${hitNum} hits") {
assert(solved.getHitList.size <= hitNum)
}
it("has correct scores") {
val combo = ark.Combo(Seq(ArrowSlit, MagnifyingGlass, ArrowSlit, Washbin, Aldebaran, CakeintheFace, LightningSpear).map(ark.Hit(_)))
assert(combo.ark === 1439)
assert(combo.elaborate === 15365)
assert(combo.sadistic === 672)
assert(combo.humiliating === 3450)
}
}
}
|
lucaster/ark-calc
|
src/test/scala/ark/optaplanner/deception/ComboSpec.scala
|
Scala
|
mit
| 1,606 |
package scodec
package codecs
import java.io.ByteArrayInputStream
import java.security.KeyPairGenerator
import java.security.cert.{ Certificate, CertificateFactory, X509Certificate }
import java.util.Date
import org.bouncycastle.asn1.x500.X500Name
import org.bouncycastle.cert._
import org.bouncycastle.cert.jcajce._
import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder
class CertificateCodecTest extends CodecSuite {
private val keyPair = {
val keyGen = KeyPairGenerator.getInstance("RSA")
keyGen.initialize(1024)
keyGen.generateKeyPair
}
val aCert: X509Certificate = {
val issuer = new X500Name("CN=Test")
val serialNum = BigInt(1).bigInteger
val notBefore = new Date(System.currentTimeMillis - 1000000)
val notAfter = new Date()
val subject = issuer
val bldr = new JcaX509v3CertificateBuilder(issuer, serialNum, notBefore, notAfter, subject, keyPair.getPublic)
val signer = new JcaContentSignerBuilder("SHA1withRSA").build(keyPair.getPrivate)
val holder = bldr.build(signer)
CertificateFactory.getInstance("X.509").generateCertificate(new ByteArrayInputStream(holder.getEncoded)).asInstanceOf[X509Certificate]
}
"the x509Certificate codec" should {
"roundtrip" in {
roundtrip(x509Certificate, aCert)
}
}
}
|
danielwegener/scodec
|
src/test/scala/scodec/codecs/CertificateCodecTest.scala
|
Scala
|
bsd-3-clause
| 1,299 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.fs.storage.common
import com.typesafe.config.{ConfigFactory, ConfigRenderOptions}
import org.junit.runner.RunWith
import org.locationtech.geomesa.fs.storage.common.partitions.{CompositeScheme, DateTimeScheme, Z2Scheme}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import org.specs2.specification.AllExpectations
@RunWith(classOf[JUnitRunner])
class PartitionSchemeConfTest extends Specification with AllExpectations {
sequential
"PartitionScheme" should {
"load from conf" >> {
val conf =
"""
| {
| scheme = "datetime,z2"
| options = {
| datetime-format = "yyyy/DDD/HH"
| step-unit = HOURS
| step = 1
| dtg-attribute = dtg
| geom-attribute = geom
| z2-resolution = 10
| leaf-storage = true
| }
| }
""".stripMargin
val sft = SimpleFeatureTypes.createType("test", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326")
val scheme = PartitionScheme(sft, ConfigFactory.parseString(conf))
scheme must not(beNull)
scheme must beAnInstanceOf[CompositeScheme]
}
"load, serialize, deserialize" >> {
val sft = SimpleFeatureTypes.createType("test", "name:String,age:Int,dtg:Date,*geom:Point:srid=4326")
val scheme = PartitionScheme.apply(sft, "daily,z2-2bit")
scheme must beAnInstanceOf[CompositeScheme]
val schemeStr = PartitionScheme.toConfig(scheme).root().render(ConfigRenderOptions.concise)
val scheme2 = PartitionScheme.apply(sft, ConfigFactory.parseString(schemeStr))
scheme2 must beAnInstanceOf[CompositeScheme]
}
"load dtg, geom, step, and leaf defaults" >> {
val conf =
"""
| {
| scheme = "datetime,z2"
| options = {
| datetime-format = "yyyy/DDD/HH"
| step-unit = HOURS
| z2-resolution = 10
| }
| }
""".stripMargin
val sft = SimpleFeatureTypes.createType("test", "name:String,age:Int,foo:Date,*bar:Point:srid=4326")
val scheme = PartitionScheme(sft, ConfigFactory.parseString(conf))
scheme must not(beNull)
scheme must beAnInstanceOf[CompositeScheme]
scheme.isLeafStorage must beTrue
val opts = scheme.getOptions
opts.get(Z2Scheme.Config.GeomAttribute) mustEqual "bar"
opts.get(DateTimeScheme.Config.DtgAttribute) mustEqual "foo"
opts.get(DateTimeScheme.Config.StepOpt).toInt mustEqual 1
opts.get(Z2Scheme.Config.LeafStorage).toBoolean must beTrue
}
}
}
|
jahhulbert-ccri/geomesa
|
geomesa-fs/geomesa-fs-storage/geomesa-fs-storage-common/src/test/scala/org/locationtech/geomesa/fs/storage/common/PartitionSchemeConfTest.scala
|
Scala
|
apache-2.0
| 3,208 |
package io.udash.generator.plugins.sbt
import java.io.File
import io.udash.generator.exceptions.FileCreationError
import io.udash.generator.plugins._
import io.udash.generator.plugins.utils.{FrontendPaths, UtilPaths}
import io.udash.generator.utils._
import io.udash.generator.{FrontendOnlyProject, GeneratorPlugin, GeneratorSettings, StandardProject}
/**
* Prepares SBT modules configuration.
*/
object SBTModulesPlugin extends GeneratorPlugin with SBTProjectFiles with FrontendPaths with UtilPaths {
override val dependencies = Seq(SBTBootstrapPlugin)
override def run(settings: GeneratorSettings): GeneratorSettings = {
settings.projectType match {
case FrontendOnlyProject =>
generateFrontendOnlyProject(settings)
case StandardProject(backend, shared, frontend) =>
generateStandardProject(
settings.rootDirectory.subFile(backend),
settings.rootDirectory.subFile(shared),
settings.rootDirectory.subFile(frontend), settings)
}
settings
}
private def scalajsWorkbenchSettings(settings: GeneratorSettings) =
if (settings.shouldEnableJsWorkbench)
s""".settings(workbenchSettings:_*)
| .settings(
| bootSnippet := "${settings.rootPackage.mkPackage()}.Init().main();",
| updatedJS := {
| var files: List[String] = Nil
| ((crossTarget in Compile).value / StaticFilesDir ** "*.js").get.foreach {
| (x: File) =>
| streams.value.log.info("workbench: Checking " + x.getName)
| FileFunction.cached(streams.value.cacheDirectory / x.getName, FilesInfo.lastModified, FilesInfo.lastModified) {
| (f: Set[File]) =>
| val fsPath = f.head.getAbsolutePath.drop(new File("").getAbsolutePath.length)
| files = "http://localhost:12345" + fsPath :: files
| f
| }(Set(x))
| }
| files
| },
| //// use either refreshBrowsers OR updateBrowsers
| // refreshBrowsers <<= refreshBrowsers triggeredBy (compileStatics in Compile)
| updateBrowsers <<= updateBrowsers triggeredBy (compileStatics in Compile)
| )
|""".stripMargin
else ""
/**
* Creates modules dirs:<br/>
* * src/main/assets<br/>
* * src/main/assets/fonts<br/>
* * src/main/assets/images<br/>
* * src/main/assets/index.dev.html<br/>
* * src/main/assets/index.prod.html<br/>
* * src/main/scala/{rootPackage}<br/>
* * src/test/scala/{rootPackage}<br/>
* and appends `build.sbt` modules config with dependencies config in `project/Dependencies.scala`.
*/
private def generateFrontendOnlyProject(settings: GeneratorSettings): Unit = {
createModulesDirs(Seq(settings.rootDirectory), settings)
createFrontendExtraDirs(settings.rootDirectory, settings, Option.empty)
requireFilesExist(Seq(buildSbt(settings), projectDir(settings), udashBuildScala(settings), dependenciesScala(settings)))
generateFrontendTasks(udashBuildScala(settings), indexDevHtml(settings.rootDirectory), indexProdHtml(settings.rootDirectory))
val frontendModuleName = wrapValName(settings.projectName)
val depsName = wrapValName("deps")
val depsJSName = wrapValName("depsJS")
appendFile(buildSbt(settings))(
s"""val $frontendModuleName = project.in(file(".")).enablePlugins(ScalaJSPlugin)
| .settings(
| libraryDependencies ++= $depsName.value,
| jsDependencies ++= $depsJSName.value,
| persistLauncher in Compile := true,
|
| compile <<= (compile in Compile).dependsOn(compileStatics),
| compileStatics := {
| IO.copyDirectory(sourceDirectory.value / "main/assets/fonts", crossTarget.value / StaticFilesDir / WebContent / "assets/fonts")
| IO.copyDirectory(sourceDirectory.value / "main/assets/images", crossTarget.value / StaticFilesDir / WebContent / "assets/images")
| compileStaticsForRelease.value
| (crossTarget.value / StaticFilesDir).***.get
| },
|
| artifactPath in(Compile, fastOptJS) :=
| (crossTarget in(Compile, fastOptJS)).value / StaticFilesDir / WebContent / "scripts" / "${settings.frontendImplFastJs}",
| artifactPath in(Compile, fullOptJS) :=
| (crossTarget in(Compile, fullOptJS)).value / StaticFilesDir / WebContent / "scripts" / "${settings.frontendImplJs}",
| artifactPath in(Compile, packageJSDependencies) :=
| (crossTarget in(Compile, packageJSDependencies)).value / StaticFilesDir / WebContent / "scripts" / "${settings.frontendDepsFastJs}",
| artifactPath in(Compile, packageMinifiedJSDependencies) :=
| (crossTarget in(Compile, packageMinifiedJSDependencies)).value / StaticFilesDir / WebContent / "scripts" / "${settings.frontendDepsJs}",
| artifactPath in(Compile, packageScalaJSLauncher) :=
| (crossTarget in(Compile, packageScalaJSLauncher)).value / StaticFilesDir / WebContent / "scripts" / "${settings.frontendInitJs}"$FrontendSettingsPlaceholder
| )${scalajsWorkbenchSettings(settings)}
| $FrontendModulePlaceholder
|
|""".stripMargin)
appendOnPlaceholder(dependenciesScala(settings))(DependenciesPlaceholder,
s"""
| $DependenciesVariablesPlaceholder
|
| val $depsName = Def.setting(Seq[ModuleID]($DependenciesFrontendPlaceholder
| ))
|
| val $depsJSName = Def.setting(Seq[org.scalajs.sbtplugin.JSModuleID]($DependenciesFrontendJSPlaceholder
| ))
|""".stripMargin
)
}
/**
* Creates modules dirs:<br/>
* * {module}/src/main/scala/{rootPackage}<br/>
* * {module}/src/test/scala/{rootPackage}<br/>
* extra in frontend:<br/>
* * {module}/src/main/assets<br/>
* * {module}/src/main/assets/fonts<br/>
* * {module}/src/main/assets/images<br/>
* * {module}/src/main/assets/index.dev.html<br/>
* * {module}/src/main/assets/index.prod.html<br/>
* and appends `build.sbt` modules config with dependencies config in `project/Dependencies.scala`.
*/
private def generateStandardProject(backend: File, shared: File, frontend: File, settings: GeneratorSettings): Unit = {
createModulesDirs(Seq(backend, shared, frontend), settings)
createFrontendExtraDirs(frontend, settings, Option(frontend.getName))
requireFilesExist(Seq(buildSbt(settings), projectDir(settings), udashBuildScala(settings), dependenciesScala(settings)))
generateFrontendTasks(udashBuildScala(settings), indexDevHtml(frontend), indexProdHtml(frontend))
val rootModuleName = wrapValName(settings.projectName)
val backendModuleName = wrapValName(backend.getName)
val frontendModuleName = wrapValName(frontend.getName)
val sharedModuleName = wrapValName(shared.getName)
val sharedJSModuleName = wrapValName(shared.getName + "JS")
val sharedJVMModuleName = wrapValName(shared.getName + "JVM")
val crossDepsName = wrapValName("crossDeps")
val backendDepsName = wrapValName("backendDeps")
val frontendDepsName = wrapValName("frontendDeps")
val frontendJSDepsName = wrapValName("frontendJSDeps")
appendFile(buildSbt(settings))(
s"""def crossLibs(configuration: Configuration) =
| libraryDependencies ++= $crossDepsName.value.map(_ % configuration)
|
|lazy val $rootModuleName = project.in(file("."))
| .aggregate($sharedJSModuleName, $sharedJVMModuleName, $frontendModuleName, $backendModuleName)
| .dependsOn($backendModuleName)
| .settings(
| publishArtifact := false$RootSettingsPlaceholder
| )$RootModulePlaceholder
|
|lazy val $sharedModuleName = crossProject.crossType(CrossType.Pure).in(file("${shared.getName}"))
| .settings(
| crossLibs(Provided)$SharedSettingsPlaceholder
| )$SharedModulePlaceholder
|
|lazy val $sharedJVMModuleName = $sharedModuleName.jvm$SharedJVMModulePlaceholder
|lazy val $sharedJSModuleName = $sharedModuleName.js$SharedJSModulePlaceholder
|
|lazy val $backendModuleName = project.in(file("${backend.getName}"))
| .dependsOn($sharedJVMModuleName)
| .settings(
| libraryDependencies ++= $backendDepsName.value,
| crossLibs(Compile)$BackendSettingsPlaceholder
| )$BackendModulePlaceholder
|
|lazy val $frontendModuleName = project.in(file("${frontend.getName}")).enablePlugins(ScalaJSPlugin)
| .dependsOn($sharedJSModuleName)
| .settings(
| libraryDependencies ++= $frontendDepsName.value,
| crossLibs(Compile),
| jsDependencies ++= $frontendJSDepsName.value,
| persistLauncher in Compile := true,
|
| compile <<= (compile in Compile),
| compileStatics := {
| IO.copyDirectory(sourceDirectory.value / "main/assets/fonts", crossTarget.value / StaticFilesDir / WebContent / "assets/fonts")
| IO.copyDirectory(sourceDirectory.value / "main/assets/images", crossTarget.value / StaticFilesDir / WebContent / "assets/images")
| compileStaticsForRelease.value
| (crossTarget.value / StaticFilesDir).***.get
| },
| compileStatics <<= compileStatics.dependsOn(compile in Compile),
|
| artifactPath in(Compile, fastOptJS) :=
| (crossTarget in(Compile, fastOptJS)).value / StaticFilesDir / WebContent / "scripts" / "${settings.frontendImplFastJs}",
| artifactPath in(Compile, fullOptJS) :=
| (crossTarget in(Compile, fullOptJS)).value / StaticFilesDir / WebContent / "scripts" / "${settings.frontendImplJs}",
| artifactPath in(Compile, packageJSDependencies) :=
| (crossTarget in(Compile, packageJSDependencies)).value / StaticFilesDir / WebContent / "scripts" / "${settings.frontendDepsFastJs}",
| artifactPath in(Compile, packageMinifiedJSDependencies) :=
| (crossTarget in(Compile, packageMinifiedJSDependencies)).value / StaticFilesDir / WebContent / "scripts" / "${settings.frontendDepsJs}",
| artifactPath in(Compile, packageScalaJSLauncher) :=
| (crossTarget in(Compile, packageScalaJSLauncher)).value / StaticFilesDir / WebContent / "scripts" / "${settings.frontendInitJs}"$FrontendSettingsPlaceholder
| )${scalajsWorkbenchSettings(settings)}
| $FrontendModulePlaceholder
|
|""".stripMargin)
appendOnPlaceholder(dependenciesScala(settings))(DependenciesPlaceholder,
s"""
| $DependenciesVariablesPlaceholder
|
| val $crossDepsName = Def.setting(Seq[ModuleID]($DependenciesCrossPlaceholder
| ))
|
| val $frontendDepsName = Def.setting(Seq[ModuleID]($DependenciesFrontendPlaceholder
| ))
|
| val $frontendJSDepsName = Def.setting(Seq[org.scalajs.sbtplugin.JSModuleID]($DependenciesFrontendJSPlaceholder
| ))
|
| val $backendDepsName = Def.setting(Seq[ModuleID]($DependenciesBackendPlaceholder
| ))
|""".stripMargin
)
}
private def createModulesDirs(modules: Seq[File], settings: GeneratorSettings): Unit = {
modules.foreach((modulePath: File) => {
val module = modulePath
if (modulePath != settings.rootDirectory && !module.mkdir()) throw FileCreationError(module.toString)
createDirs(Seq(rootPackageInSrc(module, settings), rootPackageInTestSrc(module, settings)))
})
}
private def createFrontendExtraDirs(frontend: File, settings: GeneratorSettings, frontendModuleName: Option[String]): Unit = {
createDirs(Seq(images(frontend), fonts(frontend)))
val indexDev: File = indexDevHtml(frontend)
val indexProd: File = indexProdHtml(frontend)
val frontendDirectoryName = frontendModuleName match {
case None => ""
case Some(name) => name + "/"
}
val scripts =
if (settings.shouldEnableJsWorkbench)
s"""
| <script src="http://localhost:12345/${frontendDirectoryName}target/UdashStatic/WebContent/scripts/${settings.frontendDepsFastJs}"></script>
| <script src="http://localhost:12345/${frontendDirectoryName}target/UdashStatic/WebContent/scripts/${settings.frontendImplFastJs}"></script>
| <script src="http://localhost:12345/${frontendDirectoryName}target/UdashStatic/WebContent/scripts/${settings.frontendInitJs}"></script>
| <script src="http://localhost:12345/workbench.js"></script>
""".stripMargin
else
s"""
| <script src="scripts/${settings.frontendDepsFastJs}"></script>
| <script src="scripts/${settings.frontendImplFastJs}"></script>
| <script src="scripts/${settings.frontendInitJs}"></script>
""".stripMargin
createFiles(Seq(indexDev, indexProd), requireNotExists = true)
writeFile(indexDev)(
s"""<!DOCTYPE html>
|<html>
|<head lang="en">
| <meta charset="UTF-8">
| <title>${settings.projectName} - development</title>
|
| $scripts
|
| $HTMLHeadPlaceholder
|</head>
|<body>
| <div id="${settings.htmlRootId}"></div>
|</body>
|</html>
|""".stripMargin)
writeFile(indexProd)(
s"""<!DOCTYPE html>
|<html>
|<head lang="en">
| <meta charset="UTF-8">
| <title>${settings.projectName}</title>
|
| <script src="scripts/${settings.frontendDepsJs}"></script>
| <script src="scripts/${settings.frontendImplJs}"></script>
| <script src="scripts/${settings.frontendInitJs}"></script>
| $HTMLHeadPlaceholder
|</head>
|<body>
| <div id="${settings.htmlRootId}"></div>
|</body>
|</html>
|""".stripMargin)
}
private def generateFrontendTasks(udashBuildScala: File, indexDevHtml: File, indexProdHtml: File): Unit = {
appendOnPlaceholder(udashBuildScala)(UdashBuildPlaceholder,
s"""
| val StaticFilesDir = "UdashStatic"
| val WebContent = "WebContent"
|
| def copyIndex(file: File, to: File) = {
| val newFile = Path(to.toPath.toString + "/index.html")
| IO.copyFile(file, newFile.asFile)
| }
|
| val compileStatics = taskKey[Seq[File]]("Frontend static files manager.")
|
| val compileStaticsForRelease = Def.taskDyn {
| val outDir = crossTarget.value / StaticFilesDir / WebContent
| if (!isSnapshot.value) {
| Def.task {
| val indexFile = sourceDirectory.value / "main/assets/${indexProdHtml.getName}"
| copyIndex(indexFile, outDir)
| (fullOptJS in Compile).value
| (packageMinifiedJSDependencies in Compile).value
| (packageScalaJSLauncher in Compile).value
| }
| } else {
| Def.task {
| val indexFile = sourceDirectory.value / "main/assets/${indexDevHtml.getName}"
| copyIndex(indexFile, outDir)
| (fastOptJS in Compile).value
| (packageJSDependencies in Compile).value
| (packageScalaJSLauncher in Compile).value
| }
| }
| }
|""".stripMargin)
}
//TODO: wrap only when its necessary
private def wrapValName(name: String): String =
if (name.contains("-")) s"`$name`"
else name
}
|
tstangenberg/udash-generator
|
core/src/main/scala/io/udash/generator/plugins/sbt/SBTModulesPlugin.scala
|
Scala
|
apache-2.0
| 16,095 |
package fr.inria.spirals.actress.runtime.protocol
import akka.actor.ActorRef
sealed trait Message
case class GetCapabilities() extends Message
case class Capabilities(services: Seq[(String, ActorRef)]) extends Message
case class Register(name: String, ref: ActorRef) extends Message
case class GetAttribute(id: String, name: String) extends Message
case class GetAttributes() extends Message
case class Attributes(attributes: Iterable[String]) extends Message
case class AttributeValue(id: String, name: String, value: Any) extends Message
case class UnknownAttribute(id: String, name: String) extends Message
|
fikovnik/actress-mrt
|
src/main/scala/fr/inria/spirals/actress/runtime/protocol/Messages.scala
|
Scala
|
apache-2.0
| 616 |
package uk.gov.gds.common.audit
import uk.gov.gds.common.mongo.UnauthenticatedMongoDatabaseManagerForTests
object TestAuditEventRepository extends AuditEventRepositoryBase {
protected val collection = UnauthenticatedMongoDatabaseManagerForTests("auditEvents")
def testAudit(event: AuditEvent) = audit(event)
}
|
alphagov/gds-scala-common
|
mongo-utils/src/test/scala/uk/gov/gds/common/audit/TestAuditEventRepository.scala
|
Scala
|
mit
| 317 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package test
import play.api.test._
object RouterSpec extends PlaySpecification {
"document the router" in new WithApplication() {
val someRoute = implicitApp.injector
.instanceOf[play.api.routing.Router]
.documentation
.find(r => r._1 == "GET" && r._2.startsWith("/public/"))
someRoute must beSome[(String, String, String)]
val route = someRoute.get
route._2 must_== "/public/$file<.+>"
route._3 must startWith("""_root_.controllers.Assets.versioned(path:String = "/public", file:_root_.controllers.Assets.Asset)""")
}
"The assets reverse route support" should {
"fingerprint assets" in new WithApplication() {
router.controllers.routes.Assets.versioned("css/main.css").url must_== "/public/css/abcd1234-main.css"
}
"selected the minified version" in new WithApplication() {
router.controllers.routes.Assets.versioned("css/minmain.css").url must_== "/public/css/abcd1234-minmain-min.css"
}
"work for non fingerprinted assets" in new WithApplication() {
router.controllers.routes.Assets.versioned("css/nonfingerprinted.css").url must_== "/public/css/nonfingerprinted.css"
}
"selected the minified non fingerprinted version" in new WithApplication() {
router.controllers.routes.Assets
.versioned("css/nonfingerprinted-minmain.css")
.url must_== "/public/css/nonfingerprinted-minmain-min.css"
}
}
}
|
playframework/playframework
|
dev-mode/sbt-plugin/src/sbt-test/play-sbt-plugin/routes-compiler-namespace-reverse-router/tests/RouterSpec.scala
|
Scala
|
apache-2.0
| 1,487 |
package org.coalesce.coalescebot.command.executors.request
import org.coalesce.coalescebot.command.{BotCommand, CommandContext}
class RoleRequest extends BotCommand {
val availableRoles: Set[Long] = Set (388145098199924736L,
388145098199924736L,
388145098199924736L,
388145098199924736L)
override val name: String = "rolerequest"
override val aliases: Set[String] = Set("request", "rr")
override val desc: String = "Request a role to the server staff"
override def execute(commandContext: CommandContext): Unit = {
}
}
|
Project-Coalesce/CoalesceBot
|
src/main/scala/org/coalesce/coalescebot/command/executors/request/RoleRequest.scala
|
Scala
|
mit
| 550 |
package com.dazito.scala.dakkabase
import akka.actor.{Status, Actor}
import akka.actor.Actor.Receive
/**
* Created by daz on 21/02/2016.
*/
class ScalaPongActor extends Actor {
override def receive: Receive = {
case "Ping" => sender() ! "Pong"
case _ => sender() ! Status.Failure(new Exception("Unknown message"))
}
}
|
dazito/LearningAkkaScalaServer
|
akkademy-db/src/main/scala/com/dazito/scala/dakkabase/ScalaPongActor.scala
|
Scala
|
mit
| 346 |
package csvside
import cats.data.Validated
trait CellReader[+A] extends (String => Validated[String, A]) {
def map[B](func: A => B): CellReader[B] =
CellReader[B](cell => this(cell).map(func))
}
object CellReader {
def apply[A](func: String => Validated[String, A]): CellReader[A] =
new CellReader[A] {
def apply(csv: String): Validated[String, A] =
func(csv)
}
}
|
davegurnell/csvside
|
src/main/scala/csvside/CellReader.scala
|
Scala
|
apache-2.0
| 397 |
/**
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package noop.model
import collection.jcl.Buffer;
/**
* @author [email protected] (Alex Eagle)
* @author [email protected] (Jeremie Lenfant-Engelmann)
*/
class MethodInvocationExpression(val left: Expression, val name: String, val arguments: Seq[Expression])
extends Expression {
override def accept(visitor: Visitor) = {
left.accept(visitor);
visitor.enter(this);
arguments.foreach(arg => {
arg.accept(visitor);
visitor.afterArgumentVisit(this);
});
visitor.visit(this);
}
}
|
SeaUrchinBot/noop
|
core/src/main/scala/noop/model/MethodInvocationExpression.scala
|
Scala
|
apache-2.0
| 1,119 |
package slamdata.engine.api
import unfiltered.request._
import unfiltered.response._
import slamdata.engine._
import slamdata.engine.fs._
import slamdata.engine.config._
import scalaz.concurrent._
object Server {
def run(port: Int, fs: FSTable[Backend]): Task[Unit] = Task.delay {
unfiltered.netty.Server.http(port).chunked(1048576).plan(new FileSystemApi(fs).api).run()
}
def main(args: Array[String]) {
val serve = for {
config <- args.headOption.map(Config.fromFile _).getOrElse(Task.now(Config.DefaultConfig))
mounted <- Mounter.mount(config)
_ <- run(config.server.port.getOrElse(8080), mounted)
} yield ()
// Move the server off of the main thread so unfiltered will think it's running
// under SBT and listen for keystrokes to stop.
new Thread(new Runnable { def run = serve.run }).start()
}
}
|
mossprescott/quasar
|
src/main/scala/slamdata/engine/api/server.scala
|
Scala
|
agpl-3.0
| 868 |
package mesosphere.marathon.state
import org.apache.mesos.{ Protos => MesosProtos }
import mesosphere.marathon.{ Protos, MarathonSpec }
import mesosphere.marathon.api.JsonTestHelper
import mesosphere.marathon.state.DiscoveryInfo.Port
import org.scalatest.Matchers
import play.api.libs.json.{ JsPath, JsError, Json }
import scala.collection.JavaConverters._
class DiscoveryInfoTest extends MarathonSpec with Matchers {
import mesosphere.marathon.api.v2.json.Formats._
class Fixture {
lazy val emptyDiscoveryInfo = DiscoveryInfo()
lazy val discoveryInfoWithPort = DiscoveryInfo(
ports = Seq(Port(name = "http", number = 80, protocol = "tcp", labels = Map("VIP_0" -> "192.168.0.1:80")))
)
lazy val discoveryInfoWithTwoPorts = DiscoveryInfo(
ports = Seq(
Port(name = "dns", number = 53, protocol = "udp"),
Port(name = "http", number = 80, protocol = "tcp")
)
)
lazy val discoveryInfoWithTwoPorts2 = DiscoveryInfo(
ports = Seq(
Port(name = "dnsudp", number = 53, protocol = "udp"),
Port(name = "dnstcp", number = 53, protocol = "tcp")
)
)
}
def fixture(): Fixture = new Fixture
test("ToProto default DiscoveryInfo") {
val f = fixture()
val proto = f.emptyDiscoveryInfo.toProto
proto should be(Protos.DiscoveryInfo.getDefaultInstance)
}
test("ToProto with one port") {
val f = fixture()
val proto = f.discoveryInfoWithPort.toProto
val portProto =
MesosProtos.Port.newBuilder()
.setName("http")
.setNumber(80)
.setProtocol("tcp")
.setLabels(
MesosProtos.Labels.newBuilder.addLabels(
MesosProtos.Label.newBuilder
.setKey("VIP_0")
.setValue("192.168.0.1:80")))
.build()
proto.getPortsList.asScala.head should equal(portProto)
}
test("ConstructFromProto with default proto") {
val f = fixture()
val defaultProto = Protos.DiscoveryInfo.newBuilder.build
val result = DiscoveryInfo.fromProto(defaultProto)
result should equal(f.emptyDiscoveryInfo)
}
test("ConstructFromProto with port") {
val f = fixture()
val portProto =
MesosProtos.Port.newBuilder()
.setName("http")
.setNumber(80)
.setProtocol("tcp")
.setLabels(
MesosProtos.Labels.newBuilder.addLabels(
MesosProtos.Label.newBuilder
.setKey("VIP_0")
.setValue("192.168.0.1:80")))
.build()
val protoWithPort = Protos.DiscoveryInfo.newBuilder
.addAllPorts(Seq(portProto).asJava)
.build
val result = DiscoveryInfo.fromProto(protoWithPort)
result should equal(f.discoveryInfoWithPort)
}
test("JSON Serialization round-trip emptyDiscoveryInfo") {
val f = fixture()
JsonTestHelper.assertSerializationRoundtripWorks(f.emptyDiscoveryInfo)
}
test("JSON Serialization round-trip discoveryInfoWithPort") {
val f = fixture()
JsonTestHelper.assertSerializationRoundtripWorks(f.discoveryInfoWithPort)
}
private[this] def fromJson(json: String): DiscoveryInfo = {
Json.fromJson[DiscoveryInfo](Json.parse(json)).get
}
test("Read empty discovery info") {
val json =
"""
{
"ports": []
}
"""
val readResult = fromJson(json)
val f = fixture()
assert(readResult == f.emptyDiscoveryInfo)
}
test("Read discovery info with one port") {
val json =
"""
{
"ports": [
{ "name": "http", "number": 80, "protocol": "tcp", "labels": { "VIP_0": "192.168.0.1:80" } }
]
}
"""
val readResult = fromJson(json)
val f = fixture()
assert(readResult == f.discoveryInfoWithPort)
}
test("Read discovery info with two ports") {
val json =
"""
{
"ports": [
{ "name": "dns", "number": 53, "protocol": "udp" },
{ "name": "http", "number": 80, "protocol": "tcp" }
]
}
"""
val readResult = fromJson(json)
val f = fixture()
assert(readResult == f.discoveryInfoWithTwoPorts)
}
test("Read discovery info with two ports with the same port number") {
val json =
"""
{
"ports": [
{ "name": "dnsudp", "number": 53, "protocol": "udp" },
{ "name": "dnstcp", "number": 53, "protocol": "tcp" }
]
}
"""
val readResult = fromJson(json)
val f = fixture()
assert(readResult == f.discoveryInfoWithTwoPorts2)
}
test("Read discovery info with two ports with duplicate port/number") {
val json =
"""
{
"ports": [
{ "name": "dns1", "number": 53, "protocol": "udp" },
{ "name": "dns2", "number": 53, "protocol": "udp" }
]
}
"""
val readResult = Json.fromJson[DiscoveryInfo](Json.parse(json))
readResult should be(JsError(
JsPath() \\ "ports",
"There may be only one port with a particular port number/protocol combination.")
)
}
test("Read discovery info with two ports with duplicate name") {
val json =
"""
{
"ports": [
{ "name": "dns1", "number": 53, "protocol": "udp" },
{ "name": "dns1", "number": 53, "protocol": "tcp" }
]
}
"""
val readResult = Json.fromJson[DiscoveryInfo](Json.parse(json))
readResult should be(JsError(
JsPath() \\ "ports",
"Port names are not unique.")
)
}
test("Read discovery info with a port with an invalid protocol") {
val json =
"""
{
"ports": [
{ "name": "http", "number": 80, "protocol": "foo" }
]
}
"""
val readResult = Json.fromJson[DiscoveryInfo](Json.parse(json))
readResult should be(JsError(
(JsPath() \\ "ports")(0) \\ "protocol",
"Invalid protocol. Only 'udp' or 'tcp' are allowed.")
)
}
test("Read discovery info with a port with an invalid name") {
val json =
"""
{
"ports": [
{ "name": "???", "number": 80, "protocol": "tcp" }
]
}
"""
val readResult = Json.fromJson[DiscoveryInfo](Json.parse(json))
readResult should be(JsError(
(JsPath() \\ "ports")(0) \\ "name",
s"Port name must fully match regular expression ${PortAssignment.PortNamePattern}")
)
}
}
|
yp-engineering/marathon
|
src/test/scala/mesosphere/marathon/state/DiscoveryInfoTest.scala
|
Scala
|
apache-2.0
| 6,349 |
package com.neilconcepts.battlespace.domain
import com.neilconcepts.battlespace.domain.Board.{ Point, BattleSpaceBoard }
import com.neilconcepts.battlespace.domain.Messages.GameStateError
import com.neilconcepts.battlespace.domain.bst.GameState
/**
* GameActions ::
* All the game actions for game objects
* Each action has a three phase lifecycle
* - birth : The object when assigned to the game board
* - life : Any special actions while on the board
* - death : What happens when destroyed by a missle
*/
object GameActions {
sealed trait GameAction
private def getRandomBoardSpot: Int = {
val r = scala.util.Random
r.nextInt(Board.maxDimensions)
}
object Takes extends GameAction {
def apply(size: Int): Option[GameAction] = {
//Determine x,y,z interactions
None
}
}
object Explode extends GameAction {
def apply(size: Int): Option[GameAction] = {
//Fire missles at x,y,z for a random range
None
}
}
object Illuminate extends GameAction {
def apply(size: Int): Option[GameAction] = {
//make visible x,y,z to both players
None
}
}
object RandomTransport extends GameAction {
def apply(size: Int): Option[GameAction] = {
None
}
}
object Attack extends GameAction {
//def apply(point: Point): Either[GameState, GameStateError] = {
// //this is temp
// None
//}
}
}
|
bneil/battlespace
|
src/main/scala/com/neilconcepts/battlespace/domain/GameActions.scala
|
Scala
|
mit
| 1,407 |
/*
* Copyright 2010 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.fusesource.fabric.webui.patching
import javax.ws.rs._
@Path("/patches/profiles")
class ProfileUpgradeResource extends BaseUpgradeResource {
@GET
override def get = not_found
@GET
@Path("{version_id}/{profile_id}")
def possible_upgrades(
@PathParam("version_id") version_id: String,
@PathParam("profile_id") profile_id: String) = {
val version = get_version(version_id)
val profile = get_profile(profile_id, version)
patch_service.getPossibleUpgrades(profile)
}
@POST
@Path("profile")
def apply_upgrades(
@PathParam("version_id") version_id: String,
@PathParam("profile_id") profile_id: String,
dto: ApplyUpgradesDTO) = {
val version = get_version(version_id)
val profile = get_profile(profile_id, version)
patch_service.applyUpgrades(profile, dto.upgrades)
}
}
|
gnodet/fuse
|
fmc/fmc-rest/src/main/scala/org/fusesource/fabric/webui/patching/ProfileUpgradeResource.scala
|
Scala
|
apache-2.0
| 1,573 |
////////////////////////////////////////////////////////////////////////////////
// //
// OpenSolid is a generic library for the representation and manipulation //
// of geometric objects such as points, curves, surfaces, and volumes. //
// //
// Copyright 2007-2015 by Ian Mackenzie //
// [email protected] //
// //
// This Source Code Form is subject to the terms of the Mozilla Public //
// License, v. 2.0. If a copy of the MPL was not distributed with this file, //
// you can obtain one at http://mozilla.org/MPL/2.0/. //
// //
////////////////////////////////////////////////////////////////////////////////
package org.opensolid.core
import scala.util.Random
import org.opensolid.core.DoubleGenerators._
import org.opensolid.core.Vector3dGenerators._
import org.scalacheck._
trait Direction3dGenerators {
private[this] val randomDirection3d: Gen[Direction3d] = {
val vectorGenerator =
vectorWithin(VectorBounds3d(Interval(-1.0, 1.0), Interval(-1.0, 1.0), Interval(-1.0, 1.0)))
val radiusPredicate = (vector: Vector3d) => Interval(0.25, 1.0).contains(vector.squaredLength)
vectorGenerator.retryUntil(radiusPredicate).map(_.direction.get)
}
val anyDirection3d: Gen[Direction3d] =
Gen.frequency(
1 -> Direction3d.X,
1 -> Direction3d.Y,
1 -> Direction3d.Z,
1 -> -Direction3d.X,
1 -> -Direction3d.Y,
1 -> -Direction3d.Z,
8 -> randomDirection3d
)
implicit val arbitraryDirection3d: Arbitrary[Direction3d] = Arbitrary(anyDirection3d)
}
object Direction3dGenerators extends Direction3dGenerators
|
ianmackenzie/opensolid-core
|
src/test/scala/org/opensolid/core/Direction3dGenerators.scala
|
Scala
|
mpl-2.0
| 2,017 |
package com.orendainx.trucking.enrichment
import java.io.{InputStreamReader, Reader}
import java.util.{Calendar, Date}
import better.files.File
import com.github.tototoshi.csv.CSVReader
/**
* @author Edgar Orendain <[email protected]>
*/
object DriverTimesheetAPI {
lazy val apply = new DriverTimesheetAPI(new InputStreamReader(getClass.getResourceAsStream("timesheet-default.conf")))
def apply(filename: String) = new DriverTimesheetAPI(File(filename).newBufferedReader)
}
class DriverTimesheetAPI(datasource: Reader) {
private val reader = CSVReader.open(datasource)
private val values = reader.all()
reader.close()
/** Queries the driver timesheet for hours logged.
*
* @param driverId The id of the driver to query for
* @return the number of hours the given driver has logged
*/
def hoursLogged(driverId: Int, eventTime: Long): Int = {
val cal = Calendar.getInstance()
cal.setTime(new Date(eventTime))
val weekNumber = cal.get(Calendar.WEEK_OF_YEAR)
values.filter(_.head.toInt == driverId).collectFirst{ case lst: List[_] if lst(1).toInt == weekNumber => lst(3).toInt }.get
}
/** Queries the driver timesheet for miles logged.
*
* @param driverId The id of the driver to query for
* @return the number of miles the given driver has logged
*/
def milesLogged(driverId: Int, eventTime: Long): Int = {
val cal = Calendar.getInstance()
cal.setTime(new Date(eventTime))
val weekNumber = cal.get(Calendar.WEEK_OF_YEAR)
values.filter(_.head.toInt == driverId).collectFirst{ case lst: List[_] if lst(1).toInt == weekNumber => lst(4).toInt }.get
}
}
|
orendain/trucking-iot
|
enrichment/src/main/scala/com/orendainx/trucking/enrichment/DriverTimesheetAPI.scala
|
Scala
|
apache-2.0
| 1,648 |
package com.twitter.finagle.util
import com.twitter.finagle.Stack
import com.twitter.finagle.param.{Label, ProtocolLibrary}
import com.twitter.util.registry.GlobalRegistry
import java.util.concurrent.atomic.AtomicInteger
object StackRegistry {
/**
* Represents an entry in the registry.
*/
case class Entry(addr: String, stack: Stack[_], params: Stack.Params) {
def modules: Seq[Module] =
stack.tails.map { node =>
val raw = node.head.parameters
val reflected = raw.foldLeft(Seq.empty[(String, () => String)]) {
case (seq, s) if s.show(params(s)).nonEmpty =>
seq ++ s.show(params(s))
// If Stack.Param.show() returns an empty Seq, and the parameter is a case class, obtain the names
// and values via reflection.
case (seq, s) =>
params(s) match {
case p: Product =>
// TODO: many case classes have a $outer field because they close over an outside scope.
// this is not very useful, and it might make sense to filter them out in the future.
val fields = p.getClass.getDeclaredFields.map(_.getName)
val valueFunctions = p.productIterator.map(v => () => v.toString).toSeq
seq ++ fields.zipAll(valueFunctions, "<unknown>", () => "<unknown>")
case _ => seq
}
}
Module(
node.head.role.name,
node.head.description,
reflected.map { case (n, v) => (n, v()) }
)
}.toSeq
val name: String = params[Label].label
val protocolLibrary: String = params[ProtocolLibrary].name
}
/**
* The module describing a given Param for a Stack element.
*/
case class Module(name: String, description: String, fields: Seq[(String, String)])
}
/**
* A registry that allows the registration of a string identifier with a
* a [[com.twitter.finagle.Stack]] and its params. This is especially useful
* in keeping a process global registry of Finagle clients and servers for
* dynamic introspection.
*/
trait StackRegistry {
import StackRegistry._
/** The name of the [[StackRegistry]], to be used for identification in the registry. */
def registryName: String
// thread-safe updates via synchronization on `this`
private[this] var registry = Map.empty[String, Entry]
private[this] val numEntries = new AtomicInteger(0)
// thread-safe updates via synchronization on `this`
private[this] var duplicates: Map[String, Seq[Entry]] =
Map.empty[String, Seq[Entry]]
/**
* Returns any registered [[Entry Entries]] that had the same [[Label]].
*/
def registeredDuplicates: Seq[Entry] = synchronized {
duplicates.values.flatten.toSeq
}
/** Registers an `addr`, `Stack`, and `Params`. */
def register(addr: String, stk: Stack[_], params: Stack.Params): Unit = {
val entry = Entry(addr, stk, params)
addEntries(entry)
synchronized {
if (registry.contains(entry.name)) {
val updated = duplicates.get(entry.name) match {
case Some(values) => values :+ entry
case None => Seq(entry)
}
duplicates += entry.name -> updated
}
registry += entry.name -> entry
}
}
/**
* Utility for getting the registry key's prefix for an [[Entry]].
*/
protected def registryPrefix(entry: Entry): Seq[String] =
Seq(registryName, entry.protocolLibrary, entry.name, entry.addr)
/** Unregisters an `addr`, `Stack`, and `Params`. */
def unregister(addr: String, stk: Stack[_], params: Stack.Params): Unit = {
val entry = Entry(addr, stk, params)
synchronized {
duplicates.get(entry.name) match {
case Some(dups) =>
if (dups.size == 1)
duplicates -= entry.name
else
// We may not remove the exact same entry, but since they are duplicates,
// it does not matter.
duplicates += entry.name -> dups.drop(1)
case None =>
// only remove when there is no more duplications
registry -= entry.name
}
}
removeEntries(entry)
}
private[this] def addEntries(entry: Entry): Unit = {
val prefix = registryPrefix(entry)
entry.modules.foreach {
case Module(paramName, _, reflected) =>
reflected.foreach {
case (field, value) =>
val key = prefix ++ Seq(paramName, field)
add(key, value)
}
}
}
protected[this] def add(key: Seq[String], value: String): Unit = {
if (GlobalRegistry.get.put(key, value).isEmpty)
numEntries.incrementAndGet()
}
private[this] def removeEntries(entry: Entry): Unit = {
val prefix = registryPrefix(entry)
val name = entry.name
entry.modules.foreach {
case Module(paramName, _, reflected) =>
reflected.foreach {
case (field, _) =>
val key = prefix ++ Seq(paramName, field)
remove(key)
}
}
}
protected[this] def remove(key: Seq[String]): Unit = {
if (GlobalRegistry.get.remove(key).isDefined)
numEntries.decrementAndGet()
}
/** Returns the number of entries */
def size: Int = numEntries.get
/** Returns a list of all entries. */
def registrants: Iterable[Entry] = synchronized { registry.values }
// added for tests
private[finagle] def clear(): Unit = synchronized {
registry = Map.empty[String, Entry]
duplicates = Map.empty[String, Seq[Entry]]
}
}
|
luciferous/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/util/StackRegistry.scala
|
Scala
|
apache-2.0
| 5,481 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.aggregate
import java.nio.ByteBuffer
import com.google.common.primitives.{Doubles, Ints, Longs}
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult}
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile.PercentileDigest
import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData}
import org.apache.spark.sql.catalyst.util.QuantileSummaries
import org.apache.spark.sql.catalyst.util.QuantileSummaries.{defaultCompressThreshold, Stats}
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.types._
/**
* The ApproximatePercentile function returns the approximate percentile(s) of a column at the given
* percentage(s). A percentile is a watermark value below which a given percentage of the column
* values fall. For example, the percentile of column `col` at percentage 50% is the median of
* column `col`.
*
* This function supports partial aggregation.
*
* @param child child expression that can produce column value with `child.eval(inputRow)`
* @param percentageExpression Expression that represents a single percentage value or
* an array of percentage values. Each percentage value must be between
* 0.0 and 1.0.
* @param accuracyExpression Integer literal expression of approximation accuracy. Higher value
* yields better accuracy, the default value is
* DEFAULT_PERCENTILE_ACCURACY.
*/
@ExpressionDescription(
usage = """
_FUNC_(col, percentage [, accuracy]) - Returns the approximate `percentile` of the numeric
column `col` which is the smallest value in the ordered `col` values (sorted from least to
greatest) such that no more than `percentage` of `col` values is less than the value
or equal to that value. The value of percentage must be between 0.0 and 1.0. The `accuracy`
parameter (default: 10000) is a positive numeric literal which controls approximation accuracy
at the cost of memory. Higher value of `accuracy` yields better accuracy, `1.0/accuracy` is
the relative error of the approximation.
When `percentage` is an array, each value of the percentage array must be between 0.0 and 1.0.
In this case, returns the approximate percentile array of column `col` at the given
percentage array.
""",
examples = """
Examples:
> SELECT _FUNC_(col, array(0.5, 0.4, 0.1), 100) FROM VALUES (0), (1), (2), (10) AS tab(col);
[1,1,0]
> SELECT _FUNC_(col, 0.5, 100) FROM VALUES (0), (6), (7), (9), (10) AS tab(col);
7
""",
group = "agg_funcs",
since = "2.1.0")
case class ApproximatePercentile(
child: Expression,
percentageExpression: Expression,
accuracyExpression: Expression,
override val mutableAggBufferOffset: Int,
override val inputAggBufferOffset: Int)
extends TypedImperativeAggregate[PercentileDigest] with ImplicitCastInputTypes {
def this(child: Expression, percentageExpression: Expression, accuracyExpression: Expression) = {
this(child, percentageExpression, accuracyExpression, 0, 0)
}
def this(child: Expression, percentageExpression: Expression) = {
this(child, percentageExpression, Literal(ApproximatePercentile.DEFAULT_PERCENTILE_ACCURACY))
}
// Mark as lazy so that accuracyExpression is not evaluated during tree transformation.
private lazy val accuracy: Long = accuracyExpression.eval().asInstanceOf[Number].longValue
override def inputTypes: Seq[AbstractDataType] = {
// Support NumericType, DateType and TimestampType since their internal types are all numeric,
// and can be easily cast to double for processing.
Seq(TypeCollection(NumericType, DateType, TimestampType),
TypeCollection(DoubleType, ArrayType(DoubleType, containsNull = false)), IntegralType)
}
// Mark as lazy so that percentageExpression is not evaluated during tree transformation.
private lazy val (returnPercentileArray, percentages) =
percentageExpression.eval() match {
// Rule ImplicitTypeCasts can cast other numeric types to double
case null => (false, null)
case num: Double => (false, Array(num))
case arrayData: ArrayData => (true, arrayData.toDoubleArray())
}
override def checkInputDataTypes(): TypeCheckResult = {
val defaultCheck = super.checkInputDataTypes()
if (defaultCheck.isFailure) {
defaultCheck
} else if (!percentageExpression.foldable || !accuracyExpression.foldable) {
TypeCheckFailure(s"The accuracy or percentage provided must be a constant literal")
} else if (accuracy <= 0 || accuracy > Int.MaxValue) {
TypeCheckFailure(s"The accuracy provided must be a literal between (0, ${Int.MaxValue}]" +
s" (current value = $accuracy)")
} else if (percentages == null) {
TypeCheckFailure("Percentage value must not be null")
} else if (percentages.exists(percentage => percentage < 0.0D || percentage > 1.0D)) {
TypeCheckFailure(
s"All percentage values must be between 0.0 and 1.0 " +
s"(current = ${percentages.mkString(", ")})")
} else {
TypeCheckSuccess
}
}
override def createAggregationBuffer(): PercentileDigest = {
val relativeError = 1.0D / accuracy
new PercentileDigest(relativeError)
}
override def update(buffer: PercentileDigest, inputRow: InternalRow): PercentileDigest = {
val value = child.eval(inputRow)
// Ignore empty rows, for example: percentile_approx(null)
if (value != null) {
// Convert the value to a double value
val doubleValue = child.dataType match {
case DateType => value.asInstanceOf[Int].toDouble
case TimestampType => value.asInstanceOf[Long].toDouble
case n: NumericType => n.numeric.toDouble(value.asInstanceOf[n.InternalType])
case other: DataType =>
throw QueryExecutionErrors.dataTypeUnexpectedError(other)
}
buffer.add(doubleValue)
}
buffer
}
override def merge(buffer: PercentileDigest, other: PercentileDigest): PercentileDigest = {
buffer.merge(other)
buffer
}
override def eval(buffer: PercentileDigest): Any = {
val doubleResult = buffer.getPercentiles(percentages)
val result = child.dataType match {
case DateType => doubleResult.map(_.toInt)
case TimestampType => doubleResult.map(_.toLong)
case ByteType => doubleResult.map(_.toByte)
case ShortType => doubleResult.map(_.toShort)
case IntegerType => doubleResult.map(_.toInt)
case LongType => doubleResult.map(_.toLong)
case FloatType => doubleResult.map(_.toFloat)
case DoubleType => doubleResult
case _: DecimalType => doubleResult.map(Decimal(_))
case other: DataType =>
throw QueryExecutionErrors.dataTypeUnexpectedError(other)
}
if (result.length == 0) {
null
} else if (returnPercentileArray) {
new GenericArrayData(result)
} else {
result(0)
}
}
override def withNewMutableAggBufferOffset(newOffset: Int): ApproximatePercentile =
copy(mutableAggBufferOffset = newOffset)
override def withNewInputAggBufferOffset(newOffset: Int): ApproximatePercentile =
copy(inputAggBufferOffset = newOffset)
override def children: Seq[Expression] = Seq(child, percentageExpression, accuracyExpression)
// Returns null for empty inputs
override def nullable: Boolean = true
// The result type is the same as the input type.
private lazy val internalDataType: DataType = {
if (returnPercentileArray) ArrayType(child.dataType, false) else child.dataType
}
override def dataType: DataType = internalDataType
override def prettyName: String =
getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("percentile_approx")
override def serialize(obj: PercentileDigest): Array[Byte] = {
ApproximatePercentile.serializer.serialize(obj)
}
override def deserialize(bytes: Array[Byte]): PercentileDigest = {
ApproximatePercentile.serializer.deserialize(bytes)
}
}
object ApproximatePercentile {
// Default accuracy of Percentile approximation. Larger value means better accuracy.
// The default relative error can be deduced by defaultError = 1.0 / DEFAULT_PERCENTILE_ACCURACY
val DEFAULT_PERCENTILE_ACCURACY: Int = 10000
/**
* PercentileDigest is a probabilistic data structure used for approximating percentiles
* with limited memory. PercentileDigest is backed by [[QuantileSummaries]].
*
* @param summaries underlying probabilistic data structure [[QuantileSummaries]].
*/
class PercentileDigest(private var summaries: QuantileSummaries) {
def this(relativeError: Double) = {
this(new QuantileSummaries(defaultCompressThreshold, relativeError, compressed = true))
}
private[sql] def isCompressed: Boolean = summaries.compressed
/** Returns compressed object of [[QuantileSummaries]] */
def quantileSummaries: QuantileSummaries = {
if (!isCompressed) compress()
summaries
}
/** Insert an observation value into the PercentileDigest data structure. */
def add(value: Double): Unit = {
summaries = summaries.insert(value)
}
/** In-place merges in another PercentileDigest. */
def merge(other: PercentileDigest): Unit = {
if (!isCompressed) compress()
summaries = summaries.merge(other.quantileSummaries)
}
/**
* Returns the approximate percentiles of all observation values at the given percentages.
* A percentile is a watermark value below which a given percentage of observation values fall.
* For example, the following code returns the 25th, median, and 75th percentiles of
* all observation values:
*
* {{{
* val Array(p25, median, p75) = percentileDigest.getPercentiles(Array(0.25, 0.5, 0.75))
* }}}
*/
def getPercentiles(percentages: Array[Double]): Array[Double] = {
if (!isCompressed) compress()
if (summaries.count == 0 || percentages.length == 0) {
Array.emptyDoubleArray
} else {
val result = new Array[Double](percentages.length)
var i = 0
while (i < percentages.length) {
// Since summaries.count != 0, the query here never return None.
result(i) = summaries.query(percentages(i)).get
i += 1
}
result
}
}
private final def compress(): Unit = {
summaries = summaries.compress()
}
}
/**
* Serializer for class [[PercentileDigest]]
*
* This class is thread safe.
*/
class PercentileDigestSerializer {
private final def length(summaries: QuantileSummaries): Int = {
// summaries.compressThreshold, summary.relativeError, summary.count
Ints.BYTES + Doubles.BYTES + Longs.BYTES +
// length of summary.sampled
Ints.BYTES +
// summary.sampled, Array[Stat(value: Double, g: Long, delta: Long)]
summaries.sampled.length * (Doubles.BYTES + Longs.BYTES + Longs.BYTES)
}
final def serialize(obj: PercentileDigest): Array[Byte] = {
val summary = obj.quantileSummaries
val buffer = ByteBuffer.wrap(new Array(length(summary)))
buffer.putInt(summary.compressThreshold)
buffer.putDouble(summary.relativeError)
buffer.putLong(summary.count)
buffer.putInt(summary.sampled.length)
var i = 0
while (i < summary.sampled.length) {
val stat = summary.sampled(i)
buffer.putDouble(stat.value)
buffer.putLong(stat.g)
buffer.putLong(stat.delta)
i += 1
}
buffer.array()
}
final def deserialize(bytes: Array[Byte]): PercentileDigest = {
val buffer = ByteBuffer.wrap(bytes)
val compressThreshold = buffer.getInt()
val relativeError = buffer.getDouble()
val count = buffer.getLong()
val sampledLength = buffer.getInt()
val sampled = new Array[Stats](sampledLength)
var i = 0
while (i < sampledLength) {
val value = buffer.getDouble()
val g = buffer.getLong()
val delta = buffer.getLong()
sampled(i) = Stats(value, g, delta)
i += 1
}
val summary = new QuantileSummaries(compressThreshold, relativeError, sampled, count, true)
new PercentileDigest(summary)
}
}
val serializer: PercentileDigestSerializer = new PercentileDigestSerializer
}
|
witgo/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/ApproximatePercentile.scala
|
Scala
|
apache-2.0
| 13,488 |
/*
* Copyright (c) 2013 Daniel Krzywicki <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package pl.edu.agh.scalamas.mas.logic
import pl.edu.agh.scalamas.mas.LogicTypes._
/**
* Created by Daniel on 2015-01-14.
*/
trait PopulationStrategy {
def populationStrategy: PopulationProvider
trait PopulationProvider {
def initialPopulation: Population
}
}
|
eleaar/scala-mas
|
core/src/main/scala/pl/edu/agh/scalamas/mas/logic/PopulationStrategy.scala
|
Scala
|
mit
| 1,426 |
package routes
import akka.actor.ActorRef
import cattamer.CatMasterGeneral
/**
* Created by ruguer on 3/25/15.
*/
trait RequireMasterGeneral {
val catMasterGeneral : ActorRef
}
|
raymondpoling/CatsOfUlthar
|
src/main/scala/routes/RequireMasterGeneral.scala
|
Scala
|
apache-2.0
| 183 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.network
import java.io.IOException
import java.net._
import java.nio.channels._
import java.util
import java.util.concurrent._
import java.util.concurrent.atomic._
import com.yammer.metrics.core.Gauge
import kafka.cluster.EndPoint
import kafka.common.KafkaException
import kafka.metrics.KafkaMetricsGroup
import kafka.server.KafkaConfig
import kafka.utils._
import org.apache.kafka.common.MetricName
import org.apache.kafka.common.metrics._
import org.apache.kafka.common.network.{InvalidReceiveException, ChannelBuilder,
PlaintextChannelBuilder, SSLChannelBuilder}
import org.apache.kafka.common.security.ssl.SSLFactory
import org.apache.kafka.common.protocol.SecurityProtocol
import org.apache.kafka.common.protocol.types.SchemaException
import org.apache.kafka.common.utils.{SystemTime, Time, Utils}
import scala.collection._
import scala.util.control.{NonFatal, ControlThrowable}
/**
* An NIO socket server. The threading model is
* 1 Acceptor thread that handles new connections
* Acceptor has N Processor threads that each have their own selector and read requests from sockets
* M Handler threads that handle requests and produce responses back to the processor threads for writing.
*/
class SocketServer(val config: KafkaConfig, val metrics: Metrics, val time: Time) extends Logging with KafkaMetricsGroup {
val channelConfigs = config.channelConfigs
val endpoints = config.listeners
val numProcessorThreads = config.numNetworkThreads
val maxQueuedRequests = config.queuedMaxRequests
val sendBufferSize = config.socketSendBufferBytes
val recvBufferSize = config.socketReceiveBufferBytes
val maxRequestSize = config.socketRequestMaxBytes
val maxConnectionsPerIp = config.maxConnectionsPerIp
val connectionsMaxIdleMs = config.connectionsMaxIdleMs
val maxConnectionsPerIpOverrides = config.maxConnectionsPerIpOverrides
val totalProcessorThreads = numProcessorThreads * endpoints.size
this.logIdent = "[Socket Server on Broker " + config.brokerId + "], "
val requestChannel = new RequestChannel(totalProcessorThreads, maxQueuedRequests)
private val processors = new Array[Processor](totalProcessorThreads)
private[network] var acceptors = mutable.Map[EndPoint,Acceptor]()
private val allMetricNames = (0 until totalProcessorThreads).map { i =>
val tags = new util.HashMap[String, String]()
tags.put("networkProcessor", i.toString)
new MetricName("io-wait-ratio", "socket-server-metrics", tags)
}
/* I'm pushing the mapping of port-to-protocol to the processor level,
so the processor can put the correct protocol in the request channel.
we'll probably have a more elegant way of doing this once we patch the request channel
to include more information about security and authentication.
TODO: re-consider this code when working on KAFKA-1683
*/
private val portToProtocol: ConcurrentHashMap[Int, SecurityProtocol] = new ConcurrentHashMap[Int, SecurityProtocol]()
/**
* Start the socket server
*/
def startup() {
val quotas = new ConnectionQuotas(maxConnectionsPerIp, maxConnectionsPerIpOverrides)
this.synchronized {
var processorBeginIndex = 0
endpoints.values.foreach(endpoint => {
val acceptor = new Acceptor(endpoint.host, endpoint.port, sendBufferSize, recvBufferSize, config.brokerId, requestChannel, processors, processorBeginIndex, numProcessorThreads, quotas,
endpoint.protocolType, portToProtocol, channelConfigs, maxQueuedRequests, maxRequestSize, connectionsMaxIdleMs, metrics, allMetricNames, time)
acceptors.put(endpoint, acceptor)
Utils.newThread("kafka-socket-acceptor-%s-%d".format(endpoint.protocolType.toString, endpoint.port), acceptor, false).start()
acceptor.awaitStartup
processorBeginIndex += numProcessorThreads
})
}
newGauge("NetworkProcessorAvgIdlePercent",
new Gauge[Double] {
def value = allMetricNames.map( metricName =>
metrics.metrics().get(metricName).value()).sum / totalProcessorThreads
}
)
info("Started " + acceptors.size + " acceptor threads")
}
// register the processor threads for notification of responses
requestChannel.addResponseListener(id => processors(id).wakeup())
/**
* Shutdown the socket server
*/
def shutdown() = {
info("Shutting down")
this.synchronized {
acceptors.values.foreach(_.shutdown)
processors.foreach(_.shutdown)
}
info("Shutdown completed")
}
def boundPort(protocol: SecurityProtocol = SecurityProtocol.PLAINTEXT): Int = {
try {
acceptors(endpoints(protocol)).serverChannel.socket().getLocalPort
} catch {
case e: Exception => throw new KafkaException("Tried to check server's port before server was started or checked for port of non-existing protocol", e)
}
}
}
/**
* A base class with some helper variables and methods
*/
private[kafka] abstract class AbstractServerThread(connectionQuotas: ConnectionQuotas) extends Runnable with Logging {
private val startupLatch = new CountDownLatch(1)
private val shutdownLatch = new CountDownLatch(1)
private val alive = new AtomicBoolean(true)
def wakeup()
/**
* Initiates a graceful shutdown by signaling to stop and waiting for the shutdown to complete
*/
def shutdown(): Unit = {
alive.set(false)
wakeup()
shutdownLatch.await()
}
/**
* Wait for the thread to completely start up
*/
def awaitStartup(): Unit = startupLatch.await
/**
* Record that the thread startup is complete
*/
protected def startupComplete() = {
startupLatch.countDown()
}
/**
* Record that the thread shutdown is complete
*/
protected def shutdownComplete() = shutdownLatch.countDown()
/**
* Is the server still running?
*/
protected def isRunning = alive.get
/**
* Close the given key and associated socket
*/
def close(key: SelectionKey) {
if(key != null) {
key.attach(null)
close(key.channel.asInstanceOf[SocketChannel])
swallowError(key.cancel())
}
}
def close(channel: SocketChannel) {
if(channel != null) {
debug("Closing connection from " + channel.socket.getRemoteSocketAddress())
connectionQuotas.dec(channel.socket.getInetAddress)
swallowError(channel.socket().close())
swallowError(channel.close())
}
}
}
/**
* Thread that accepts and configures new connections. There is only need for one of these
*/
private[kafka] class Acceptor(val host: String,
private val port: Int,
val sendBufferSize: Int,
val recvBufferSize: Int,
brokerId: Int,
requestChannel: RequestChannel,
processors: Array[Processor],
processorBeginIndex: Int,
numProcessorThreads: Int,
connectionQuotas: ConnectionQuotas,
protocol: SecurityProtocol,
portToProtocol: ConcurrentHashMap[Int, SecurityProtocol],
channelConfigs: java.util.Map[String, Object],
maxQueuedRequests: Int,
maxRequestSize: Int,
connectionsMaxIdleMs: Long,
metrics: Metrics,
allMetricNames: Seq[MetricName],
time: Time) extends AbstractServerThread(connectionQuotas) with KafkaMetricsGroup {
val nioSelector = java.nio.channels.Selector.open()
val serverChannel = openServerSocket(host, port)
val processorEndIndex = processorBeginIndex + numProcessorThreads
portToProtocol.put(serverChannel.socket().getLocalPort, protocol)
this.synchronized {
for (i <- processorBeginIndex until processorEndIndex) {
processors(i) = new Processor(i,
time,
maxRequestSize,
numProcessorThreads,
requestChannel,
connectionQuotas,
connectionsMaxIdleMs,
protocol,
channelConfigs,
metrics
)
Utils.newThread("kafka-network-thread-%d-%s-%d".format(brokerId, protocol.name, i), processors(i), false).start()
}
}
/**
* Accept loop that checks for new connection attempts
*/
def run() {
serverChannel.register(nioSelector, SelectionKey.OP_ACCEPT);
startupComplete()
var currentProcessor = processorBeginIndex
try {
while (isRunning) {
try {
val ready = nioSelector.select(500)
if (ready > 0) {
val keys = nioSelector.selectedKeys()
val iter = keys.iterator()
while (iter.hasNext && isRunning) {
var key: SelectionKey = null
try {
key = iter.next
iter.remove()
if (key.isAcceptable)
accept(key, processors(currentProcessor))
else
throw new IllegalStateException("Unrecognized key state for acceptor thread.")
// round robin to the next processor thread
currentProcessor = (currentProcessor + 1) % processorEndIndex
if (currentProcessor < processorBeginIndex) currentProcessor = processorBeginIndex
} catch {
case e: Throwable => error("Error while accepting connection", e)
}
}
}
}
catch {
// We catch all the throwables to prevent the acceptor thread from exiting on exceptions due
// to a select operation on a specific channel or a bad request. We don't want the
// the broker to stop responding to requests from other clients in these scenarios.
case e: ControlThrowable => throw e
case e: Throwable => error("Error occurred", e)
}
}
} finally {
debug("Closing server socket and selector.")
swallowError(serverChannel.close())
swallowError(nioSelector.close())
shutdownComplete()
}
}
/*
* Create a server socket to listen for connections on.
*/
def openServerSocket(host: String, port: Int): ServerSocketChannel = {
val socketAddress =
if(host == null || host.trim.isEmpty)
new InetSocketAddress(port)
else
new InetSocketAddress(host, port)
val serverChannel = ServerSocketChannel.open()
serverChannel.configureBlocking(false)
serverChannel.socket().setReceiveBufferSize(recvBufferSize)
try {
serverChannel.socket.bind(socketAddress)
info("Awaiting socket connections on %s:%d.".format(socketAddress.getHostName, serverChannel.socket.getLocalPort))
} catch {
case e: SocketException =>
throw new KafkaException("Socket server failed to bind to %s:%d: %s.".format(socketAddress.getHostName, port, e.getMessage), e)
}
serverChannel
}
/*
* Accept a new connection
*/
def accept(key: SelectionKey, processor: Processor) {
val serverSocketChannel = key.channel().asInstanceOf[ServerSocketChannel]
val socketChannel = serverSocketChannel.accept()
try {
connectionQuotas.inc(socketChannel.socket().getInetAddress)
socketChannel.configureBlocking(false)
socketChannel.socket().setTcpNoDelay(true)
socketChannel.socket().setKeepAlive(true)
socketChannel.socket().setSendBufferSize(sendBufferSize)
debug("Accepted connection from %s on %s. sendBufferSize [actual|requested]: [%d|%d] recvBufferSize [actual|requested]: [%d|%d]"
.format(socketChannel.socket.getInetAddress, socketChannel.socket.getLocalSocketAddress,
socketChannel.socket.getSendBufferSize, sendBufferSize,
socketChannel.socket.getReceiveBufferSize, recvBufferSize))
processor.accept(socketChannel)
} catch {
case e: TooManyConnectionsException =>
info("Rejected connection from %s, address already has the configured maximum of %d connections.".format(e.ip, e.count))
close(socketChannel)
}
}
/**
* Wakeup the thread for selection.
*/
@Override
def wakeup = nioSelector.wakeup()
}
/**
* Thread that processes all requests from a single connection. There are N of these running in parallel
* each of which has its own selectors
*/
private[kafka] class Processor(val id: Int,
val time: Time,
val maxRequestSize: Int,
val totalProcessorThreads: Int,
val requestChannel: RequestChannel,
connectionQuotas: ConnectionQuotas,
val connectionsMaxIdleMs: Long,
val protocol: SecurityProtocol,
val channelConfigs: java.util.Map[String, Object],
val metrics: Metrics) extends AbstractServerThread(connectionQuotas) with KafkaMetricsGroup {
private val newConnections = new ConcurrentLinkedQueue[SocketChannel]()
private val inflightResponses = mutable.Map[String, RequestChannel.Response]()
private val channelBuilder = createChannelBuilder
private val metricTags = new util.HashMap[String, String]()
metricTags.put("networkProcessor", id.toString)
newGauge("IdlePercent",
new Gauge[Double] {
def value = {
metrics.metrics().get(new MetricName("io-wait-ratio", "socket-server-metrics", metricTags)).value()
}
},
JavaConversions.mapAsScalaMap(metricTags)
)
private val selector = new org.apache.kafka.common.network.Selector(
maxRequestSize,
connectionsMaxIdleMs,
metrics,
time,
"socket-server",
metricTags,
false,
channelBuilder)
override def run() {
startupComplete()
while(isRunning) {
try {
// setup any new connections that have been queued up
configureNewConnections()
// register any new responses for writing
processNewResponses()
try {
selector.poll(300)
} catch {
case e @ (_: IllegalStateException | _: IOException) => {
error("Closing processor %s due to illegal state or IO exception".format(id))
swallow(closeAll())
shutdownComplete()
throw e
}
case e: InvalidReceiveException =>
// Log warning and continue since Selector already closed the connection
warn("Connection was closed due to invalid receive. Processor will continue handling other connections")
}
collection.JavaConversions.collectionAsScalaIterable(selector.completedReceives).foreach(receive => {
try {
val req = RequestChannel.Request(processor = id, connectionId = receive.source, buffer = receive.payload, startTimeMs = time.milliseconds, securityProtocol = protocol)
requestChannel.sendRequest(req)
} catch {
case e @ (_: InvalidRequestException | _: SchemaException) => {
// note that even though we got an exception, we can assume that receive.source is valid. Issues with constructing a valid receive object were handled earlier
error("Closing socket for " + receive.source + " because of error", e)
selector.close(receive.source)
}
}
selector.mute(receive.source)
})
collection.JavaConversions.iterableAsScalaIterable(selector.completedSends()).foreach(send => {
val resp = inflightResponses.remove(send.destination()).get
resp.request.updateRequestMetrics()
selector.unmute(send.destination())
})
} catch {
// We catch all the throwables here to prevent the processor thread from exiting. We do this because
// letting a processor exit might cause bigger impact on the broker. Usually the exceptions thrown would
// be either associated with a specific socket channel or a bad request. We just ignore the bad socket channel
// or request. This behavior might need to be reviewed if we see an exception that need the entire broker to stop.
case e : ControlThrowable => throw e
case e : Throwable =>
error("Processor got uncaught exception.", e)
}
}
debug("Closing selector - processor " + id)
swallowError(closeAll())
shutdownComplete()
}
private def processNewResponses() {
var curr = requestChannel.receiveResponse(id)
while(curr != null) {
try {
curr.responseAction match {
case RequestChannel.NoOpAction => {
// There is no response to send to the client, we need to read more pipelined requests
// that are sitting in the server's socket buffer
curr.request.updateRequestMetrics
trace("Socket server received empty response to send, registering for read: " + curr)
selector.unmute(curr.request.connectionId)
}
case RequestChannel.SendAction => {
trace("Socket server received response to send, registering for write and sending data: " + curr)
selector.send(curr.responseSend)
inflightResponses += (curr.request.connectionId -> curr)
}
case RequestChannel.CloseConnectionAction => {
curr.request.updateRequestMetrics
trace("Closing socket connection actively according to the response code.")
selector.close(curr.request.connectionId)
}
}
} finally {
curr = requestChannel.receiveResponse(id)
}
}
}
/**
* Queue up a new connection for reading
*/
def accept(socketChannel: SocketChannel) {
newConnections.add(socketChannel)
wakeup()
}
/**
* Register any new connections that have been queued up
*/
private def configureNewConnections() {
while(!newConnections.isEmpty) {
val channel = newConnections.poll()
try {
debug("Processor " + id + " listening to new connection from " + channel.socket.getRemoteSocketAddress)
val localHost = channel.socket().getLocalAddress.getHostAddress
val localPort = channel.socket().getLocalPort
val remoteHost = channel.socket().getInetAddress.getHostAddress
val remotePort = channel.socket().getPort
val connectionId = localHost + ":" + localPort + "-" + remoteHost + ":" + remotePort
selector.register(connectionId, channel)
} catch {
// We explicitly catch all non fatal exceptions and close the socket to avoid socket leak. The other
// throwables will be caught in processor and logged as uncaught exception.
case NonFatal(e) =>
// need to close the channel here to avoid socket leak.
close(channel)
error("Processor " + id + " closed connection from " + channel.getRemoteAddress, e)
}
}
}
private def createChannelBuilder(): ChannelBuilder = {
val channelBuilder: ChannelBuilder = if (protocol == SecurityProtocol.SSL) new SSLChannelBuilder(SSLFactory.Mode.SERVER)
else new PlaintextChannelBuilder()
channelBuilder.configure(channelConfigs)
channelBuilder
}
/**
* Close all open connections
*/
def closeAll() {
selector.close()
}
/**
* Wakeup the thread for selection.
*/
@Override
def wakeup = selector.wakeup()
}
class ConnectionQuotas(val defaultMax: Int, overrideQuotas: Map[String, Int]) {
private val overrides = overrideQuotas.map(entry => (InetAddress.getByName(entry._1), entry._2))
private val counts = mutable.Map[InetAddress, Int]()
def inc(addr: InetAddress) {
counts synchronized {
val count = counts.getOrElse(addr, 0)
counts.put(addr, count + 1)
val max = overrides.getOrElse(addr, defaultMax)
if(count >= max)
throw new TooManyConnectionsException(addr, max)
}
}
def dec(addr: InetAddress) {
counts synchronized {
val count = counts.get(addr).get
if(count == 1)
counts.remove(addr)
else
counts.put(addr, count - 1)
}
}
}
class TooManyConnectionsException(val ip: InetAddress, val count: Int) extends KafkaException("Too many connections from %s (maximum = %d)".format(ip, count))
|
bpupadhyaya/kafka
|
core/src/main/scala/kafka/network/SocketServer.scala
|
Scala
|
apache-2.0
| 21,361 |
package dotty.tools.scaladoc
package tasty
import scala.jdk.CollectionConverters._
import dotty.tools.scaladoc.Scaladoc.CommentSyntax
import dotty.tools.scaladoc.tasty.comments.Comment
import scala.quoted._
object ScaladocSupport:
def parseCommentString(using Quotes, DocContext)(comment: String, sym: reflect.Symbol, pos: Option[reflect.Position]): Comment =
import reflect.report
val preparsed = comments.Preparser.preparse(comments.Cleaner.clean(comment))
val commentSyntax =
preparsed.syntax.headOption match {
case Some(commentSetting) =>
CommentSyntax.parse(commentSetting).getOrElse {
val msg = s"not a valid comment syntax: $commentSetting, defaulting to Markdown syntax."
// we should update pos with span from documentation
pos.fold(report.warning(msg))(report.warning(msg, _))
CommentSyntax.default
}
case None => summon[DocContext].args.defaultSyntax
}
val parser = commentSyntax match {
case CommentSyntax.Wiki =>
comments.WikiCommentParser(comments.Repr(quotes)(sym))
case CommentSyntax.Markdown =>
comments.MarkdownCommentParser(comments.Repr(quotes)(sym))
}
parser.parse(preparsed)
def parseComment(using Quotes, DocContext)(docstring: String, tree: reflect.Tree): Comment =
val commentString: String =
if tree.symbol.isClassDef || tree.symbol.owner.isClassDef then
import dotty.tools.dotc
given ctx: dotc.core.Contexts.Context = quotes.asInstanceOf[scala.quoted.runtime.impl.QuotesImpl].ctx
val sym = tree.symbol.asInstanceOf[dotc.core.Symbols.Symbol]
comments.CommentExpander.cookComment(sym)(using ctx)
.get.expanded.get
else
docstring
parseCommentString(commentString, tree.symbol, Some(tree.pos))
|
dotty-staging/dotty
|
scaladoc/src/dotty/tools/scaladoc/tasty/ScalaDocSupport.scala
|
Scala
|
apache-2.0
| 1,853 |
object Holder {
protected case class C
}
import Holder.C
println(/* accessible: false */ C)
println(classOf[/* accessible: false */ C])
|
ilinum/intellij-scala
|
testdata/resolve2/import/access/ProtectedClass.scala
|
Scala
|
apache-2.0
| 141 |
/*
Copyright (c) 2012-2014, Université de Lorraine, Nancy, France
Copyright (c) 2014-2015, Christophe Calvès
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Université de Lorraine nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package zk
import scalaz.{Monad, NaturalTransformation, MonadTrans}
/** An iterator whose next value of type X (the '''focus''') is computed in the monad M
* using a '''parameter''' of type Y. When there is no next value, it gives
* an end value (the '''answer''') of type B.
*
* @tparam M monad the computation is living in.
* @tparam X values of the iterator.
* @tparam Y parameter for next values.
* @tparam B end result.
*/
sealed abstract class Rotareti[M[_],X,Y,B] {
/** Uses ''f'' to run the iterator until there is no more focus.
*
* @param f compute the parameter Y from present focus X.
* @return the anwser (final value when there is no more focus).
*/
def getAnswer(f : X => M[Y]) : M[B]
/** Lifts the monad the computation of next focus is living in by the monad transformer T */
def lift[T[_[_],_]](implicit T : MonadTrans[T]) : Rotareti[({type λ[α] = T[M,α]})#λ,X,Y,B]
/** Attaches a transformation to the focuses and parameters of the iterator.
*
* @param domain transformation of focuses.
* @param codomain transformation of parameters.
* @tparam R type of the ''new'' focuses.
* @tparam S type of the ''new'' parameters.
*/
def refocus[R,S](domain : X => R, codomain : S => M[Y]) : Rotareti[M,R,S,B]
/** Replaces the monad the computation is living in (M) by another monad (N) using
* a natural transformation.
*
* @param nat natural transformation from M to N.
* @param N evidence that N is a monad.
* @tparam N new monad for the computation.
* @return transformed iterator.
*/
def transform[N[_]](nat : NaturalTransformation[M,N])(implicit N : Monad[N]) : Rotareti[N,X,Y,B]
/** Is this iterator an [[Answer]] (true) or a [[Focus]] (false). */
def isAnswer : Boolean
}
/** Case where there is no more focus in the iterator but an end value ''answer''.
*
* @param answer end value of the iterator.
* @param M
* @tparam M monad the computation is living in.
* @tparam X values of the iterator.
* @tparam Y parameter for next values.
* @tparam B type of ''answer''.
*/
final case class Answer[M[_],X,Y,B](val answer : B)(implicit M : Monad[M]) extends Rotareti[M,X,Y,B] {
/** Returns ''answer''. */
def getAnswer(f : X => M[Y]) : M[B] = M.point[B](answer)
/** Lifts the monad the computation of next focus is living in by the monad transformer T */
def lift[T[_[_],_]](implicit T : MonadTrans[T]) : Answer[({type λ[α] = T[M,α]})#λ,X,Y,B] = {
type TM[Z] = T[M,Z]
val TM : Monad[TM] = T[M](M)
Answer[TM,X,Y,B](answer)(TM)
}
/** Attaches a transformation to the focuses and parameters of the iterator.
*
* @param domain transformation of focuses.
* @param codomain transformation of parameters.
* @tparam R type of the ''new'' focuses.
* @tparam S type of the ''new'' parameters.
*/
def refocus[R,S](domain : X => R, codomain : S => M[Y]) : Answer[M,R,S,B] = Answer[M,R,S,B](answer)
/** Replaces the monad the computation is living in (M) by another monad (N) using
* a natural transformation.
*
* @param nat natural transformation from M to N.
* @param N evidence that N is a monad.
* @tparam N new monad for the computation.
* @return transformed iterator.
*/
def transform[N[_]](nat : NaturalTransformation[M,N])(implicit N : Monad[N]) : Answer[N,X,Y,B] =
Answer[N,X,Y,B](answer)(N)
/** true */
val isAnswer : Boolean = true
}
/**
*
* @param focus
* @param context
* @param M
* @tparam M monad the computation is living in.
* @tparam X values of the iterator.
* @tparam Y parameter for next values.
* @tparam B end result.
*/
final case class Focus[M[_],X,Y,B](val focus : X, val context : Y => M[Rotareti[M,X,Y,B]])(implicit M : Monad[M]) extends Rotareti[M,X,Y,B] {
import M.monadSyntax._
/** A loop that computes the next step using ''f''(''focus'') as parameter.
*
* @param f compute the parameter Y from focus''.
* @return the anwser (final value when there is no more focus).
*/
def getAnswer(f : X => M[Y]) : M[B] = f(focus) >>= context >>= (_.getAnswer(f))
/** Lifts the monad the computation of next focus is living in by the monad transformer T */
def lift[T[_[_],_]](implicit T : MonadTrans[T]) : Focus[({type λ[α] = T[M,α]})#λ,X,Y,B] = {
type TM[Z] = T[M,Z]
val TM : Monad[TM] = T[M](M)
Focus[TM,X,Y,B](focus, (y:Y) => T.liftM[M,Rotareti[TM,X,Y,B]](M.apply(context(y))(_.lift[T](T))))(TM)
}
/** Attaches a transformation to the focuses and parameters of the iterator.
*
* @param domain transformation of ''focus''.
* @param codomain transformation of parameters.
* @tparam R type of the ''new'' focuses.
* @tparam S type of the ''new'' parameters.
*/
def refocus[R,S](domain : X => R, codomain : S => M[Y]) : Focus[M,R,S,B] = Focus[M,R,S,B](domain(focus), (s:S) => M.apply(M.bind(codomain(s))(context))(_.refocus(domain,codomain)))
/** Replaces the monad the computation is living in (M) by another monad (N) using
* a natural transformation.
*
* @param nat natural transformation from M to N.
* @param N evidence that N is a monad.
* @tparam N new monad for the computation.
* @return transformed iterator.
*/
def transform[N[_]](nat : NaturalTransformation[M,N])(implicit N : Monad[N]) : Focus[N,X,Y,B] =
Focus[N,X,Y,B](focus, (y:Y) => nat(M.apply(context(y))(_.transform[N](nat)(N))))(N)
/** false */
val isAnswer : Boolean = false
}
object Rotareti {
/** Replaces the monad the computation is living in (M) by another monad (N) using
* a natural transformation.
*
* @param mr the iterator to transform.
* @param nat natural transformation from M to N.
* @param M evidence that M is a monad.
* @param N evidence that N is a monad.
* @tparam M present monad of mr.
* @tparam N new monad for the computation.
* @return transformed iterator.
*/
def transform[M[_],N[_],X,Y,B](mr : M[Rotareti[M,X,Y,B]], nat : NaturalTransformation[M,N])(implicit M : Monad[M], N : Monad[N]) : N[Rotareti[N,X,Y,B]] =
nat[Rotareti[N,X,Y,B]](M.apply(mr)(_.transform[N](nat)(N)))
}
/** Transforms A to B giving focuses of type X in the process and expecting parameters of type Y.
* Focuses can be seen as requests made by the transformation and parameters as responses to these requests.
*
* ''default'' is used as a fallback transformation from X to Y when needed.
*
* For example:
* In order to transform a term of type A, the transformation could give as focuses its subterms of type X.
* It would expect as parameters subterms of B.
*/
case class ELens[M[_],A,X,Y,B](val lens : A => M[Rotareti[M,X,Y,B]] , val default : X => M[Y]) extends (A => M[Rotareti[M,X,Y,B]]) {
/** Just ''lens'' */
final def apply(a:A) : M[Rotareti[M,X,Y,B]] = lens(a)
}
/** An iterator with exactly one focus on the Identity monad. */
case class Context[X,Y,B](val focus : X, val context : Y => B) {
/** Transforms this iterator into a [[Rotareti]] */
final def toRotareti[F[_]](implicit F : Monad[F]) : Rotareti[F,X,Y,B] = Focus[F,X,Y,B](focus , (y:Y) => F.point(Answer[F,X,Y,B](context(y))))
}
|
christophe-calves/zk.scala
|
scala/src/main/scala/zk/ELens.scala
|
Scala
|
bsd-3-clause
| 8,925 |
/*
* Copyright © 2015-2019 the contributors (see Contributors.md).
*
* This file is part of Knora.
*
* Knora is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Knora is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public
* License along with Knora. If not, see <http://www.gnu.org/licenses/>.
*/
package org.knora.webapi.e2e.v2
import java.io.File
import java.net.URLEncoder
import akka.actor.ActorSystem
import akka.http.javadsl.model.StatusCodes
import akka.http.scaladsl.model.headers.BasicHttpCredentials
import akka.http.scaladsl.model.{ContentTypes, HttpEntity, Multipart}
import akka.http.scaladsl.testkit.RouteTestTimeout
import org.knora.webapi._
import org.knora.webapi.e2e.v2.ResponseCheckerR2RV2._
import org.knora.webapi.messages.store.triplestoremessages.RdfDataObject
import org.knora.webapi.responders.v2.search._
import org.knora.webapi.routing.RouteUtilV2
import org.knora.webapi.routing.v1.ValuesRouteV1
import org.knora.webapi.routing.v2.{ResourcesRouteV2, SearchRouteV2, StandoffRouteV2}
import org.knora.webapi.testing.tags.E2ETest
import org.knora.webapi.util.{FileUtil, MutableTestIri, StringFormatter}
import org.knora.webapi.util.IriConversions._
import org.knora.webapi.util.jsonld.{JsonLDConstants, JsonLDDocument, JsonLDUtil}
import org.xmlunit.builder.{DiffBuilder, Input}
import org.xmlunit.diff.Diff
import spray.json.JsString
import scala.concurrent.ExecutionContextExecutor
/**
* End-to-end test specification for the search endpoint. This specification uses the Spray Testkit as documented
* here: http://spray.io/documentation/1.2.2/spray-testkit/
*/
@E2ETest
class SearchRouteV2R2RSpec extends R2RSpec {
private implicit val stringFormatter: StringFormatter = StringFormatter.getGeneralInstance
override def testConfigSource: String =
"""
|# akka.loglevel = "DEBUG"
|# akka.stdout-loglevel = "DEBUG"
""".stripMargin
private val searchPath = new SearchRouteV2(routeData).knoraApiPath
private val resourcePath = new ResourcesRouteV2(routeData).knoraApiPath
private val standoffPath = new StandoffRouteV2(routeData).knoraApiPath
private val valuesPath = new ValuesRouteV1(routeData).knoraApiPath
implicit def default(implicit system: ActorSystem): RouteTestTimeout = RouteTestTimeout(settings.defaultTimeout)
implicit val ec: ExecutionContextExecutor = system.dispatcher
private val anythingUser = SharedTestDataADM.anythingUser1
private val anythingUserEmail = anythingUser.email
private val anythingProjectIri = SharedTestDataADM.ANYTHING_PROJECT_IRI
private val incunabulaUser = SharedTestDataADM.incunabulaMemberUser
private val incunabulaUserEmail = incunabulaUser.email
private val password = "test"
private val hamletResourceIri = new MutableTestIri
// If true, writes all API responses to test data files. If false, compares the API responses to the existing test data files.
private val writeTestDataFiles = false
override lazy val rdfDataObjects: List[RdfDataObject] = List(
RdfDataObject(path = "_test_data/demo_data/images-demo-data.ttl", name = "http://www.knora.org/data/00FF/images"),
RdfDataObject(path = "_test_data/all_data/anything-data.ttl", name = "http://www.knora.org/data/0001/anything"),
RdfDataObject(path = "_test_data/all_data/incunabula-data.ttl", name = "http://www.knora.org/data/0803/incunabula"),
RdfDataObject(path = "_test_data/all_data/beol-data.ttl", name = "http://www.knora.org/data/0801/beol"),
RdfDataObject(path = "_test_data/e2e.v2.SearchRouteV2R2RSpec/gravsearchtest1-admin.ttl", name = "http://www.knora.org/data/admin"),
RdfDataObject(path = "_test_data/e2e.v2.SearchRouteV2R2RSpec/gravsearchtest1-onto.ttl", name = "http://www.knora.org/ontology/0666/gravsearchtest1"),
RdfDataObject(path = "_test_data/e2e.v2.SearchRouteV2R2RSpec/gravsearchtest1-data.ttl", name = "http://www.knora.org/data/0666/gravsearchtest1")
)
"The Search v2 Endpoint" should {
"perform a fulltext search for 'Narr'" in {
Get("/v2/search/Narr") ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/NarrFulltextSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a count query for a fulltext search for 'Narr'" in {
Get("/v2/search/count/Narr") ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 210)
}
}
"perform a fulltext search for 'Ding'" in {
Get("/v2/search/Ding") ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
// the response involves forbidden resource
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/searchResponseWithforbiddenResource.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a fulltext search for 'Dinge' (in the complex schema)" in {
Get("/v2/search/Dinge") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/DingeFulltextSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a fulltext search for 'Dinge' (in the simple schema)" in {
Get("/v2/search/Dinge").addHeader(new SchemaHeader(RouteUtilV2.SIMPLE_SCHEMA_NAME)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/DingeFulltextSearchSimple.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a count query for a fulltext search for 'Dinge'" in {
Get("/v2/search/count/Dinge") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 1)
}
}
"perform a fulltext query for a search value containing a single character wildcard" in {
Get("/v2/search/Unif%3Frm") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingUniform.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a fulltext query for a search value containing a multiple character wildcard" in {
Get("/v2/search/Unif*m") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingUniform.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Queries without type inference
"perform a Gravsearch query for an anything:Thing with an optional date and sort by date" in {
val gravsearchQuery =
"""PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasDate ?date .
|} WHERE {
|
| ?thing a knora-api:Resource .
| ?thing a anything:Thing .
|
| OPTIONAL {
| ?thing anything:hasDate ?date .
| anything:hasDate knora-api:objectType knora-api:Date .
| ?date a knora-api:Date .
| }
|
| MINUS {
| ?thing anything:hasInteger ?intVal .
| anything:hasInteger knora-api:objectType xsd:integer .
| ?intVal a xsd:integer .
| FILTER(?intVal = 123454321)
| }
|}
|ORDER BY DESC(?date)
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/thingWithOptionalDateSortedDesc.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for an anything:Thing with an optional date used as a sort criterion" in {
val gravsearchQuery =
"""PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasDate ?date .
|} WHERE {
| ?thing a knora-api:Resource .
| ?thing a anything:Thing .
|
| OPTIONAL {
| ?thing anything:hasDate ?date .
| anything:hasDate knora-api:objectType knora-api:Date .
| ?date a knora-api:Date .
| }
|
| MINUS {
| ?thing anything:hasInteger ?intVal .
| anything:hasInteger knora-api:objectType xsd:integer .
| ?intVal a xsd:integer .
| FILTER(?intVal = 123454321)
| }
|}
|ORDER BY DESC(?date)
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 43)
}
}
"perform a Gravsearch query for books that have the title 'Zeitglöcklein des Lebens' returning the title in the answer (in the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchWithTitleInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have the dcterms:title 'Zeitglöcklein des Lebens' returning the title in the answer (in the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX dcterms: <http://purl.org/dc/terms/>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book dcterms:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book dcterms:title ?title .
| dcterms:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchWithTitleInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have the title 'Zeitglöcklein des Lebens' returning the title in the answer (in the simple schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)).addHeader(new SchemaHeader(RouteUtilV2.SIMPLE_SCHEMA_NAME)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchWithTitleInAnswerSimple.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have the dcterms:title 'Zeitglöcklein des Lebens' returning the title in the answer (in the simple schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX dcterms: <http://purl.org/dc/terms/>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book dcterms:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book dcterms:title ?title .
| dcterms:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)).addHeader(new SchemaHeader(RouteUtilV2.SIMPLE_SCHEMA_NAME)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchWithTitleInAnswerSimple.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for books that have the title 'Zeitglöcklein des Lebens' returning the title in the answer" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 2)
}
}
"perform a Gravsearch query for books that have the title 'Zeitglöcklein des Lebens' not returning the title in the answer" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchNoTitleInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that do not have the title 'Zeitglöcklein des Lebens'" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| FILTER(?title != "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/NotZeitgloeckleinExtendedSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for books that do not have the title 'Zeitglöcklein des Lebens'" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| FILTER(?title != "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
// 19 - 2 = 18 :-)
// there is a total of 19 incunabula books of which two have the title "Zeitglöcklein des Lebens und Leidens Christi" (see test above)
// however, there are 18 books that have a title that is not "Zeitglöcklein des Lebens und Leidens Christi"
// this is because there is a book that has two titles, one "Zeitglöcklein des Lebens und Leidens Christi" and the other in Latin "Horologium devotionis circa vitam Christi"
checkCountResponse(responseAs[String], 18)
}
}
"perform a Gravsearch query for the page of a book whose seqnum equals 10, returning the seqnum and the link value" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
| ?page a knora-api:Resource .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| <http://rdfh.ch/0803/b6b5ff1eb703> a knora-api:Resource .
|
| ?page incunabula:seqnum ?seqnum .
| incunabula:seqnum knora-api:objectType xsd:integer .
|
| FILTER(?seqnum = 10)
|
| ?seqnum a xsd:integer .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PageWithSeqnum10WithSeqnumAndLinkValueInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for the page of a book whose seqnum equals 10, returning the seqnum and the link value" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
| ?page a knora-api:Resource .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| <http://rdfh.ch/0803/b6b5ff1eb703> a knora-api:Resource .
|
| ?page incunabula:seqnum ?seqnum .
| incunabula:seqnum knora-api:objectType xsd:integer .
|
| FILTER(?seqnum = 10)
|
| ?seqnum a xsd:integer .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"perform a Gravsearch query for the page of a book whose seqnum equals 10, returning only the seqnum" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
| ?page a knora-api:Resource .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| <http://rdfh.ch/0803/b6b5ff1eb703> a knora-api:Resource .
|
| ?page incunabula:seqnum ?seqnum .
| incunabula:seqnum knora-api:objectType xsd:integer .
|
| FILTER(?seqnum = 10)
|
| ?seqnum a xsd:integer .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PageWithSeqnum10OnlySeqnuminAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for the pages of a book whose seqnum is lower than or equals 10" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
| ?page a knora-api:Resource .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| <http://rdfh.ch/0803/b6b5ff1eb703> a knora-api:Resource .
|
| ?page incunabula:seqnum ?seqnum .
| incunabula:seqnum knora-api:objectType xsd:integer .
|
| FILTER(?seqnum <= 10)
|
| ?seqnum a xsd:integer .
|
| } ORDER BY ?seqnum
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/pagesOfLatinNarrenschiffWithSeqnumLowerEquals10.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for the pages of a book and return them ordered by their seqnum" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
| ?page a knora-api:Resource .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| <http://rdfh.ch/0803/b6b5ff1eb703> a knora-api:Resource .
|
| ?page incunabula:seqnum ?seqnum .
| incunabula:seqnum knora-api:objectType xsd:integer .
|
| ?seqnum a xsd:integer .
|
| } ORDER BY ?seqnum
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PagesOfNarrenschiffOrderedBySeqnum.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for the pages of a book and return them ordered by their seqnum and get the next OFFSET" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
| ?page a knora-api:Resource .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| <http://rdfh.ch/0803/b6b5ff1eb703> a knora-api:Resource .
|
| ?page incunabula:seqnum ?seqnum .
| incunabula:seqnum knora-api:objectType xsd:integer .
|
| ?seqnum a xsd:integer .
|
| } ORDER BY ?seqnum
| OFFSET 1
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PagesOfNarrenschiffOrderedBySeqnumNextOffset.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have been published on the first of March 1497 (Julian Calendar)" ignore { // literals are not supported
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate "JULIAN:1497-03-01"^^knora-api:Date .
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| ?book incunabula:pubdate "JULIAN:1497-03-01"^^knora-api:Date .
| incunabula:pubdate knora-api:objectType knora-api:Date .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"perform a Gravsearch query for books that have been published on the first of March 1497 (Julian Calendar) (2)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| ?book incunabula:pubdate ?pubdate .
| incunabula:pubdate knora-api:objectType knora-api:Date .
|
| ?pubdate a knora-api:Date .
|
| FILTER(?pubdate = "JULIAN:1497-03-01"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have not been published on the first of March 1497 (Julian Calendar)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| ?book incunabula:pubdate ?pubdate .
| incunabula:pubdate knora-api:objectType knora-api:Date .
|
| ?pubdate a knora-api:Date .
|
| FILTER(?pubdate != "JULIAN:1497-03-01"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksNotPublishedOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 2 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 17)
}
}
"perform a Gravsearch query for books that have not been published on the first of March 1497 (Julian Calendar) 2" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| ?book incunabula:pubdate ?pubdate .
| incunabula:pubdate knora-api:objectType knora-api:Date .
|
| ?pubdate a knora-api:Date .
|
| FILTER(?pubdate < "JULIAN:1497-03-01"^^knora-api:Date || ?pubdate > "JULIAN:1497-03-01"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksNotPublishedOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 2 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 17)
}
}
"perform a Gravsearch query for books that have been published before 1497 (Julian Calendar)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| ?book incunabula:pubdate ?pubdate .
| incunabula:pubdate knora-api:objectType knora-api:Date .
|
| ?pubdate a knora-api:Date .
| FILTER(?pubdate < "JULIAN:1497"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedBeforeDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 4 (number of results from query below)
checkSearchResponseNumberOfResults(responseAs[String], 15)
}
}
"perform a Gravsearch query for books that have been published 1497 or later (Julian Calendar)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| ?book incunabula:pubdate ?pubdate .
| incunabula:pubdate knora-api:objectType knora-api:Date .
|
| ?pubdate a knora-api:Date .
| FILTER(?pubdate >= "JULIAN:1497"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedAfterOrOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 15 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 4)
}
}
"perform a Gravsearch query for books that have been published after 1497 (Julian Calendar)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| ?book incunabula:pubdate ?pubdate .
| incunabula:pubdate knora-api:objectType knora-api:Date .
|
| ?pubdate a knora-api:Date .
| FILTER(?pubdate > "JULIAN:1497"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedAfterDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 18 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"perform a Gravsearch query for books that have been published 1497 or before (Julian Calendar)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| ?book incunabula:pubdate ?pubdate .
| incunabula:pubdate knora-api:objectType knora-api:Date .
|
| ?pubdate a knora-api:Date .
| FILTER(?pubdate <= "JULIAN:1497"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedBeforeOrOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 1 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 18)
}
}
"perform a Gravsearch query for books that have been published after 1486 and before 1491 (Julian Calendar)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| ?book incunabula:pubdate ?pubdate .
| incunabula:pubdate knora-api:objectType knora-api:Date .
|
| ?pubdate a knora-api:Date .
|
| FILTER(?pubdate > "JULIAN:1486"^^knora-api:Date && ?pubdate < "JULIAN:1491"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedBetweenDates.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 5)
}
}
"get the regions belonging to a page" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?region knora-api:isMainResource true .
|
| ?region knora-api:isRegionOf <http://rdfh.ch/0803/9d626dc76c03> .
|
| ?region knora-api:hasGeometry ?geom .
|
| ?region knora-api:hasComment ?comment .
|
| ?region knora-api:hasColor ?color .
| } WHERE {
|
| ?region a knora-api:Region .
| ?region a knora-api:Resource .
|
| ?region knora-api:isRegionOf <http://rdfh.ch/0803/9d626dc76c03> .
| knora-api:isRegionOf knora-api:objectType knora-api:Resource .
|
| <http://rdfh.ch/0803/9d626dc76c03> a knora-api:Resource .
|
| ?region knora-api:hasGeometry ?geom .
| knora-api:hasGeometry knora-api:objectType knora-api:Geom .
|
| ?geom a knora-api:Geom .
|
| ?region knora-api:hasComment ?comment .
| knora-api:hasComment knora-api:objectType xsd:string .
|
| ?comment a xsd:string .
|
| ?region knora-api:hasColor ?color .
| knora-api:hasColor knora-api:objectType knora-api:Color .
|
| ?color a knora-api:Color .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/RegionsForPage.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"get a book a page points to and include the page in the results (all properties present in WHERE clause)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|
|CONSTRUCT {
|
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:seqnum ?seqnum .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:hasStillImageFile ?file .
|
|} WHERE {
|
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
|
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| <http://rdfh.ch/0803/50e7460a7203> a knora-api:Resource .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:seqnum ?seqnum .
| knora-api:seqnum knora-api:objectType xsd:integer .
|
| ?seqnum a xsd:integer .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:hasStillImageFile ?file .
| knora-api:hasStillImageFile knora-api:objectType knora-api:File .
|
| ?file a knora-api:File .
|
|} OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/bookWithIncomingPagesWithAllRequestedProps.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"get a book a page points to and only include the page's partOf link in the results (none of the other properties)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|
|CONSTRUCT {
|
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
|
|} WHERE {
|
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
|
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| <http://rdfh.ch/0803/50e7460a7203> a knora-api:Resource .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:seqnum ?seqnum .
| knora-api:seqnum knora-api:objectType xsd:integer .
|
| ?seqnum a xsd:integer .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:hasStillImageFile ?file .
| knora-api:hasStillImageFile knora-api:objectType knora-api:File .
|
| ?file a knora-api:File .
|
|} OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/bookWithIncomingPagesOnlyLink.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"get incoming links pointing to an incunbaula:book, excluding isPartOf and isRegionOf" in {
var gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
|
| ?incomingRes knora-api:isMainResource true .
|
| ?incomingRes ?incomingProp <http://rdfh.ch/0803/8be1b7cf7103> .
|
|} WHERE {
|
| ?incomingRes a knora-api:Resource .
|
| ?incomingRes ?incomingProp <http://rdfh.ch/0803/8be1b7cf7103> .
|
| <http://rdfh.ch/0803/8be1b7cf7103> a knora-api:Resource .
|
| ?incomingProp knora-api:objectType knora-api:Resource .
|
| knora-api:isRegionOf knora-api:objectType knora-api:Resource .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| FILTER NOT EXISTS {
| ?incomingRes knora-api:isRegionOf <http://rdfh.ch/0803/8be1b7cf7103> .
| }
|
| FILTER NOT EXISTS {
| ?incomingRes knora-api:isPartOf <http://rdfh.ch/0803/8be1b7cf7103> .
| }
|
|} OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/IncomingLinksForBook.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for an anything:Thing that has a decimal value of 2.1" ignore { // literals are not supported
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal "2.1"^^xsd:decimal .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| ?thing anything:hasDecimal "2.1"^^xsd:decimal .
| anything:hasDecimal knora-api:objectType xsd:decimal .
|
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a decimal value of 2.1 2" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| ?thing anything:hasDecimal ?decimal .
| anything:hasDecimal knora-api:objectType xsd:decimal .
|
| ?decimal a xsd:decimal .
|
| FILTER(?decimal = "2.1"^^xsd:decimal)
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingEqualsDecimal.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a decimal value bigger than 2.0" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| ?thing anything:hasDecimal ?decimal .
| anything:hasDecimal knora-api:objectType xsd:decimal .
|
| ?decimal a xsd:decimal .
|
| FILTER(?decimal > "2"^^xsd:decimal)
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingBiggerThanDecimal.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a decimal value smaller than 3.0" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| ?thing anything:hasDecimal ?decimal .
| anything:hasDecimal knora-api:objectType xsd:decimal .
|
| ?decimal a xsd:decimal .
|
| FILTER(?decimal < "3"^^xsd:decimal)
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingSmallerThanDecimal.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for an anything:Thing that has a specific URI value" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasUri ?uri .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| ?thing anything:hasUri ?uri .
|
| FILTER(?uri = "http://www.google.ch"^^xsd:anyURI)
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/thingWithURI.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a Boolean value that is true" ignore { // literals are not supported
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean true
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| ?thing anything:hasBoolean true .
| anything:hasBoolean knora-api:objectType xsd:boolean .
|
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a Boolean value that is true 2" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean ?boolean .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| ?thing anything:hasBoolean ?boolean .
| anything:hasBoolean knora-api:objectType xsd:boolean .
|
| ?boolean a xsd:boolean .
|
| FILTER(?boolean = true)
|
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithBoolean.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for an anything:Thing that may have a Boolean value that is true" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean ?boolean .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| OPTIONAL {
|
| ?thing anything:hasBoolean ?boolean .
| anything:hasBoolean knora-api:objectType xsd:boolean .
|
| ?boolean a xsd:boolean .
|
| FILTER(?boolean = true)
| }
|
| MINUS {
| ?thing anything:hasInteger ?intVal .
| anything:hasInteger knora-api:objectType xsd:integer .
| ?intVal a xsd:integer .
| FILTER(?intVal = 123454321)
| }
|} OFFSET 0""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithBooleanOptionalOffset0.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the second page of results
checkSearchResponseNumberOfResults(responseAs[String], 25)
}
}
"search for an anything:Thing that may have a Boolean value that is true using an increased offset" in {
// set OFFSET to 1 to get "Testding for extended search"
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean ?boolean .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| OPTIONAL {
|
| ?thing anything:hasBoolean ?boolean .
| anything:hasBoolean knora-api:objectType xsd:boolean .
|
| ?boolean a xsd:boolean .
|
| FILTER(?boolean = true)
| }
|
| MINUS {
| ?thing anything:hasInteger ?intVal .
| anything:hasInteger knora-api:objectType xsd:integer .
| ?intVal a xsd:integer .
| FILTER(?intVal = 123454321)
| }
|} OFFSET 1
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithBooleanOptionalOffset1.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the second page of results
checkSearchResponseNumberOfResults(responseAs[String], 18)
}
}
"search for an anything:Thing that either has a Boolean value that is true or a decimal value that equals 2.1 (or both)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean ?boolean .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| {
| ?thing anything:hasBoolean ?boolean .
| anything:hasBoolean knora-api:objectType xsd:boolean .
|
| ?boolean a xsd:boolean .
|
| FILTER(?boolean = true)
| } UNION {
| ?thing anything:hasDecimal ?decimal .
| anything:hasDecimal knora-api:objectType xsd:decimal .
|
| ?decimal a xsd:decimal .
|
| FILTER(?decimal = "2.1"^^xsd:decimal)
| }
|
|} OFFSET 0
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithBooleanOrDecimal.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for a book whose title contains 'Zeit' using the regex function" in {
val gravsearchQuery =
"""
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
| CONSTRUCT {
|
| ?mainRes knora-api:isMainResource true .
|
| ?mainRes <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#title> ?propVal0 .
|
| } WHERE {
|
| ?mainRes a knora-api:Resource .
|
| ?mainRes a <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#book> .
|
|
| ?mainRes <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#title> ?propVal0 .
| <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#title> knora-api:objectType <http://www.w3.org/2001/XMLSchema#string> .
| ?propVal0 a <http://www.w3.org/2001/XMLSchema#string> .
|
| FILTER regex(?propVal0, "Zeit", "i")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeit.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for a book whose title contains 'Zeitglöcklein' using the match function" in {
val gravsearchQuery =
"""
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
| CONSTRUCT {
|
| ?mainRes knora-api:isMainResource true .
|
| ?mainRes <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#title> ?propVal0 .
|
| } WHERE {
|
| ?mainRes a knora-api:Resource .
|
| ?mainRes a <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#book> .
|
| ?mainRes <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#title> ?propVal0 .
| <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#title> knora-api:objectType <http://www.w3.org/2001/XMLSchema#string> .
| ?propVal0 a <http://www.w3.org/2001/XMLSchema#string> .
|
| FILTER knora-api:match(?propVal0, "Zeitglöcklein")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for a book whose title contains 'Zeitglöcklein' and 'Lebens' using the match function" in {
val gravsearchQuery =
"""
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
| CONSTRUCT {
|
| ?mainRes knora-api:isMainResource true .
|
| ?mainRes <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#title> ?propVal0 .
|
| } WHERE {
|
| ?mainRes a knora-api:Resource .
|
| ?mainRes a <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#book> .
|
| ?mainRes <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#title> ?propVal0 .
| <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#title> knora-api:objectType <http://www.w3.org/2001/XMLSchema#string> .
| ?propVal0 a <http://www.w3.org/2001/XMLSchema#string> .
|
| FILTER knora-api:match(?propVal0, "Zeitglöcklein AND Lebens")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for 'Zeitglöcklein des Lebens' using dcterms:title" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
| PREFIX dcterms: <http://purl.org/dc/terms/>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book dcterms:title ?title .
|
| } WHERE {
| ?book a knora-api:Resource .
|
| ?book dcterms:title ?title .
|
| dcterms:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| FILTER(?title = 'Zeitglöcklein des Lebens und Leidens Christi')
|
| } OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for an anything:Thing with a list value" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
| CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasListItem ?listItem .
|
| } WHERE {
| ?thing a knora-api:Resource .
|
| ?thing anything:hasListItem ?listItem .
|
| anything:hasListItem knora-api:objectType knora-api:ListNode .
|
| ?listItem a knora-api:ListNode .
|
| } OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithListValue.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 3)
}
}
"search for a text using the lang function" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a knora-api:Resource .
|
| ?thing a anything:Thing .
|
| ?thing anything:hasText ?text .
|
| anything:hasText knora-api:objectType xsd:string .
|
| ?text a xsd:string .
|
| FILTER(lang(?text) = "fr")
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/LanguageFulltextSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for a specific text using the lang function" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a knora-api:Resource .
|
| ?thing a anything:Thing .
|
| ?thing anything:hasText ?text .
|
| anything:hasText knora-api:objectType xsd:string .
|
| ?text a xsd:string .
|
| FILTER(lang(?text) = "fr" && ?text = "Bonjour")
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/LanguageFulltextSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"perform a fulltext search for 'Bonjour'" in {
Get("/v2/search/Bonjour") ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/LanguageFulltextSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a fulltext search for the term 'text' marked up as a paragraph" in {
Get("/v2/search/text?limitToStandoffClass=" + URLEncoder.encode("http://api.knora.org/ontology/standoff/v2#StandoffParagraphTag", "UTF-8")) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithRichtextWithTermTextInParagraph.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a fulltext search count query for the term 'text' marked up as a paragraph" in {
Get("/v2/search/count/text?limitToStandoffClass=" + URLEncoder.encode("http://api.knora.org/ontology/standoff/v2#StandoffParagraphTag", "UTF-8")) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 1)
}
}
"do a fulltext search for the term 'text' marked up as italic" in {
Get("/v2/search/text?limitToStandoffClass=" + URLEncoder.encode("http://api.knora.org/ontology/standoff/v2#StandoffItalicTag", "UTF-8")) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithRichtextWithTermTextInParagraph.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a fulltext search count query for the term 'text' marked up as italic" in {
Get("/v2/search/count/text?limitToStandoffClass=" + URLEncoder.encode("http://api.knora.org/ontology/standoff/v2#StandoffItalicTag", "UTF-8")) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 1)
}
}
"do a fulltext search for the terms 'interesting' and 'text' marked up as italic" in {
Get("/v2/search/interesting%20text?limitToStandoffClass=" + URLEncoder.encode("http://api.knora.org/ontology/standoff/v2#StandoffItalicTag", "UTF-8")) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithRichtextWithTermTextInParagraph.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a fulltext search count query for the terms 'interesting' and 'text' marked up as italic" in {
Get("/v2/search/interesting%20text?limitToStandoffClass=" + URLEncoder.encode("http://api.knora.org/ontology/standoff/v2#StandoffItalicTag", "UTF-8")) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a fulltext search for the terms 'interesting' and 'boring' marked up as italic" in {
Get("/v2/search/interesting%20boring?limitToStandoffClass=" + URLEncoder.encode("http://api.knora.org/ontology/standoff/v2#StandoffItalicTag", "UTF-8")) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
// there is no single italic element that contains both 'interesting' and 'boring':
/*
<?xml version="1.0" encoding="UTF-8"?>
<text>
<p>
This is a test that contains marked up elements. This is <em>interesting text</em> in italics. This is <em>boring text</em> in italics.
</p>
</text>
*/
checkSearchResponseNumberOfResults(responseAs[String], 0)
}
}
"do a Gravsearch query for link objects that link to an incunabula book" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|
|CONSTRUCT {
| ?linkObj knora-api:isMainResource true .
|
| ?linkObj knora-api:hasLinkTo ?book .
|
|} WHERE {
| ?linkObj a knora-api:Resource .
| ?linkObj a knora-api:LinkObj .
|
| ?linkObj knora-api:hasLinkTo ?book .
| knora-api:hasLinkTo knora-api:objectType knora-api:Resource .
|
| ?book a knora-api:Resource .
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/LinkObjectsToBooks.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 3)
}
}
"do a Gravsearch query for a letter that links to a specific person via two possible properties" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 <http://rdfh.ch/0801/VvYVIy-FSbOJBsh2d9ZFJw> .
|
|
| } WHERE {
| ?letter a knora-api:Resource .
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| beol:creationDate knora-api:objectType knora-api:Date .
| ?date a knora-api:Date .
|
| # testperson2
| ?letter ?linkingProp1 <http://rdfh.ch/0801/VvYVIy-FSbOJBsh2d9ZFJw> .
|
| <http://rdfh.ch/0801/VvYVIy-FSbOJBsh2d9ZFJw> a knora-api:Resource .
|
| ?linkingProp1 knora-api:objectType knora-api:Resource .
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| beol:hasAuthor knora-api:objectType knora-api:Resource .
| beol:hasRecipient knora-api:objectType knora-api:Resource .
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithAuthor.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch count query for a letter that links to a specific person via two possible properties" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 <http://rdfh.ch/0801/VvYVIy-FSbOJBsh2d9ZFJw> .
|
|
| } WHERE {
| ?letter a knora-api:Resource .
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| beol:creationDate knora-api:objectType knora-api:Date .
| ?date a knora-api:Date .
|
| # testperson2
| ?letter ?linkingProp1 <http://rdfh.ch/0801/VvYVIy-FSbOJBsh2d9ZFJw> .
|
| <http://rdfh.ch/0801/VvYVIy-FSbOJBsh2d9ZFJw> a knora-api:Resource .
|
| ?linkingProp1 knora-api:objectType knora-api:Resource .
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| beol:hasAuthor knora-api:objectType knora-api:Resource .
| beol:hasRecipient knora-api:objectType knora-api:Resource .
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 1)
}
}
"do a Gravsearch query for a letter that links to a person with a specified name" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?name .
|
| } WHERE {
| ?letter a knora-api:Resource .
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| beol:creationDate knora-api:objectType knora-api:Date .
| ?date a knora-api:Date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 a knora-api:Resource .
|
| ?linkingProp1 knora-api:objectType knora-api:Resource .
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| beol:hasAuthor knora-api:objectType knora-api:Resource .
| beol:hasRecipient knora-api:objectType knora-api:Resource .
|
| ?person1 beol:hasFamilyName ?name .
|
| beol:hasFamilyName knora-api:objectType xsd:string .
| ?name a xsd:string .
|
| FILTER(?name = "Meier")
|
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch count query for a letter that links to a person with a specified name" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?name .
|
| } WHERE {
| ?letter a knora-api:Resource .
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| beol:creationDate knora-api:objectType knora-api:Date .
| ?date a knora-api:Date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 a knora-api:Resource .
|
| ?linkingProp1 knora-api:objectType knora-api:Resource .
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| beol:hasAuthor knora-api:objectType knora-api:Resource .
| beol:hasRecipient knora-api:objectType knora-api:Resource .
|
| ?person1 beol:hasFamilyName ?name .
|
| beol:hasFamilyName knora-api:objectType xsd:string .
| ?name a xsd:string .
|
| FILTER(?name = "Meier")
|
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 1)
}
}
"do a Gravsearch query for a letter that links to a person with a specified name (optional)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?name .
|
| } WHERE {
| ?letter a knora-api:Resource .
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| beol:creationDate knora-api:objectType knora-api:Date .
| ?date a knora-api:Date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 a knora-api:Resource .
|
| ?linkingProp1 knora-api:objectType knora-api:Resource .
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| beol:hasAuthor knora-api:objectType knora-api:Resource .
| beol:hasRecipient knora-api:objectType knora-api:Resource .
|
| OPTIONAL {
| ?person1 beol:hasFamilyName ?name .
|
| beol:hasFamilyName knora-api:objectType xsd:string .
| ?name a xsd:string .
|
| FILTER(?name = "Meier")
| }
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithNameOptional.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query for a letter that links to another person with a specified name" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?name .
|
| } WHERE {
| ?letter a knora-api:Resource .
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| beol:creationDate knora-api:objectType knora-api:Date .
| ?date a knora-api:Date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 a knora-api:Resource .
|
| ?linkingProp1 knora-api:objectType knora-api:Resource .
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| beol:hasAuthor knora-api:objectType knora-api:Resource .
| beol:hasRecipient knora-api:objectType knora-api:Resource .
|
| ?person1 beol:hasFamilyName ?name .
|
| beol:hasFamilyName knora-api:objectType xsd:string .
| ?name a xsd:string .
|
| FILTER(?name = "Muster")
|
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName2.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"run a Gravsearch query that searches for a person using foaf classes and properties" in {
val gravsearchQuery =
"""
| PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
| PREFIX foaf: <http://xmlns.com/foaf/0.1/>
|
| CONSTRUCT {
| ?person knora-api:isMainResource true .
|
| ?person foaf:familyName ?familyName .
|
| ?person foaf:givenName ?givenName .
|
| } WHERE {
| ?person a knora-api:Resource .
| ?person a foaf:Person .
|
| ?person foaf:familyName ?familyName .
| foaf:familyName knora-api:objectType xsd:string .
|
| ?familyName a xsd:string .
|
| ?person foaf:givenName ?givenName .
| foaf:givenName knora-api:objectType xsd:string .
|
| ?givenName a xsd:string .
|
| FILTER(?familyName = "Meier")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/foafPerson.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"run a Gravsearch query that searches for a single resource specified by its IRI" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true ;
| anything:hasText ?text ;
| anything:hasInteger ?integer .
|
|} WHERE {
| BIND(<http://rdfh.ch/0001/H6gBWUuJSuuO-CilHV8kQw> AS ?thing)
|
| ?thing a knora-api:Resource .
| ?thing a anything:Thing .
| ?thing anything:hasText ?text .
| anything:hasText knora-api:objectType xsd:string .
| ?text a xsd:string .
| ?thing anything:hasInteger ?integer .
| anything:hasInteger knora-api:objectType xsd:integer .
| ?integer a xsd:integer.
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingByIriWithRequestedValues.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query for a letter and get information about the persons associated with it" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?familyName .
|
|
| } WHERE {
| BIND(<http://rdfh.ch/0801/_B3lQa6tSymIq7_7SowBsA> AS ?letter)
| ?letter a knora-api:Resource .
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| beol:creationDate knora-api:objectType knora-api:Date .
| ?date a knora-api:Date .
|
| # testperson2
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 a knora-api:Resource .
|
| ?linkingProp1 knora-api:objectType knora-api:Resource .
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| beol:hasAuthor knora-api:objectType knora-api:Resource .
| beol:hasRecipient knora-api:objectType knora-api:Resource .
|
| ?person1 beol:hasFamilyName ?familyName .
| beol:hasFamilyName knora-api:objectType xsd:string .
|
| ?familyName a xsd:string .
|
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithAuthorWithInformation.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query for the pages of a book whose seqnum is lower than or equals 10, with the book as the main resource" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
| ?book incunabula:title ?title .
|
| ?page knora-api:isPartOf ?book ;
| incunabula:seqnum ?seqnum .
| } WHERE {
| BIND(<http://rdfh.ch/0803/b6b5ff1eb703> AS ?book)
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
| ?title a xsd:string .
|
| ?page a incunabula:page .
| ?page a knora-api:Resource .
|
| ?page knora-api:isPartOf ?book .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| ?page incunabula:seqnum ?seqnum .
| incunabula:seqnum knora-api:objectType xsd:integer .
|
| FILTER(?seqnum <= 10)
|
| ?seqnum a xsd:integer .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/incomingPagesForBook.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"reject a Gravsearch query containing a statement whose subject is not the main resource and whose object is used in ORDER BY" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
| ?book incunabula:title ?title .
|
| ?page knora-api:isPartOf ?book ;
| incunabula:seqnum ?seqnum .
| } WHERE {
| BIND(<http://rdfh.ch/0803/b6b5ff1eb703> AS ?book)
| ?book a knora-api:Resource .
|
| ?book incunabula:title ?title .
| incunabula:title knora-api:objectType xsd:string .
| ?title a xsd:string .
|
| ?page a incunabula:page .
| ?page a knora-api:Resource .
|
| ?page knora-api:isPartOf ?book .
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| ?page incunabula:seqnum ?seqnum .
| incunabula:seqnum knora-api:objectType xsd:integer .
|
| FILTER(?seqnum <= 10)
|
| ?seqnum a xsd:integer .
|
| } ORDER BY ?seqnum
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.BAD_REQUEST, response.toString)
}
}
"do a Gravsearch query for regions that belong to pages that are part of a book with the title 'Zeitglöcklein des Lebens und Leidens Christi'" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?region knora-api:isMainResource true .
|
| ?region knora-api:isRegionOf ?page .
|
| ?page knora-api:isPartOf ?book .
|
| ?book incunabula:title ?title .
|
|} WHERE {
| ?region a knora-api:Resource .
| ?region a knora-api:Region .
|
| ?region knora-api:isRegionOf ?page .
|
| knora-api:isRegionOf knora-api:objectType knora-api:Resource .
|
| ?page a knora-api:Resource .
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf ?book .
|
| knora-api:isPartOf knora-api:objectType knora-api:Resource .
|
| ?book a knora-api:Resource .
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| incunabula:title knora-api:objectType xsd:string .
|
| ?title a xsd:string .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/regionsOfZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query containing a UNION nested in an OPTIONAL" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX gravsearchtest1: <http://0.0.0.0:3333/ontology/0666/gravsearchtest1/simple/v2#>
|
|CONSTRUCT {
| ?Project knora-api:isMainResource true .
| ?isInProject gravsearchtest1:isInProject ?Project .
|} WHERE {
| ?Project a knora-api:Resource .
| ?Project a gravsearchtest1:Project .
|
| OPTIONAL {
| ?isInProject gravsearchtest1:isInProject ?Project .
| gravsearchtest1:isInProject knora-api:objectType knora-api:Resource .
| ?isInProject a knora-api:Resource .
| { ?isInProject a gravsearchtest1:BibliographicNotice . } UNION { ?isInProject a gravsearchtest1:Person . }
| }
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ProjectsWithOptionalPersonOrBiblio.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Queries with type inference
"do a Gravsearch query in which 'rdf:type knora-api:Resource' is inferred from a more specific rdf:type (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
|CONSTRUCT {
| ?letter knora-api:isMainResource true .
| ?letter beol:creationDate ?date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> beol:hasFamilyName ?name .
|} WHERE {
| ?letter a beol:letter .
| ?letter beol:creationDate ?date .
| beol:creationDate knora-api:objectType knora-api:Date .
| ?date a knora-api:Date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> a beol:person .
| ?linkingProp1 knora-api:objectType knora-api:Resource .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> beol:hasFamilyName ?name .
| beol:hasFamilyName knora-api:objectType xsd:string .
| ?name a xsd:string .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|} ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName2.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query in which the object types of property IRIs are inferred (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
|CONSTRUCT {
| ?letter knora-api:isMainResource true .
| ?letter beol:creationDate ?date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> beol:hasFamilyName ?name .
|} WHERE {
| ?letter a beol:letter .
| ?letter beol:creationDate ?date .
| ?date a knora-api:Date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> a beol:person .
| ?linkingProp1 knora-api:objectType knora-api:Resource .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> beol:hasFamilyName ?name .
| ?name a xsd:string .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|} ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName2.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query in which the types of property objects are inferred from the knora-api:objectType of each property (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
|CONSTRUCT {
| ?letter knora-api:isMainResource true .
| ?letter beol:creationDate ?date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> beol:hasFamilyName ?name .
|} WHERE {
| ?letter a beol:letter .
| ?letter beol:creationDate ?date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> a beol:person .
| ?linkingProp1 knora-api:objectType knora-api:Resource .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> beol:hasFamilyName ?name .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|} ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName2.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query in which a property's knora-api:objectType is inferred from its object (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
|CONSTRUCT {
| ?letter knora-api:isMainResource true .
| ?letter beol:creationDate ?date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> beol:hasFamilyName ?name .
|} WHERE {
| ?letter a beol:letter .
| ?letter beol:creationDate ?date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> a beol:person .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> beol:hasFamilyName ?name .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|} ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName2.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query in which the types of property subjects are inferred from the knora-api:subjectType of each property (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
|CONSTRUCT {
| ?letter knora-api:isMainResource true .
| ?letter beol:creationDate ?date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> beol:hasFamilyName ?name .
|} WHERE {
| ?letter beol:creationDate ?date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
| <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> beol:hasFamilyName ?name .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
|
|} ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName2.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query in which the knora-api:objectType of a property variable is inferred from a FILTER (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
|CONSTRUCT {
| ?letter knora-api:isMainResource true .
| ?letter beol:creationDate ?date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
|} WHERE {
| ?letter beol:creationDate ?date .
| ?letter ?linkingProp1 <http://rdfh.ch/0801/H7s3FmuWTkaCXa54eFANOA> .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
|} ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithoutName.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query that finds all the books that have a page with seqnum 100, inferring types (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
|
| ?book knora-api:isMainResource true ;
| incunabula:title ?title .
|
| ?page incunabula:partOf ?book ;
| incunabula:seqnum ?seqnum .
|
|} WHERE {
|
| ?page incunabula:partOf ?book ;
| incunabula:seqnum ?seqnum .
|
| FILTER(?seqnum = 100)
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/booksWithPage100.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query that finds all the letters sent by someone called Meier, ordered by date, inferring types (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
|CONSTRUCT {
|
| ?letter knora-api:isMainResource true ;
| beol:creationDate ?date ;
| beol:hasAuthor ?author .
|
| ?author beol:hasFamilyName ?name .
|
|} WHERE {
|
| ?letter beol:hasAuthor ?author ;
| beol:creationDate ?date .
|
| ?author beol:hasFamilyName ?name .
|
| FILTER(?name = "Meier")
|
|} ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/lettersByMeier.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have the title 'Zeitglöcklein des Lebens' returning the title in the answer (in the complex schema) (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchWithTitleInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have the dcterms:title 'Zeitglöcklein des Lebens' returning the title in the answer (in the complex schema) (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX dcterms: <http://purl.org/dc/terms/>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book dcterms:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book dcterms:title ?title .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchWithTitleInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have the title 'Zeitglöcklein des Lebens' returning the title in the answer (in the simple schema) (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)).addHeader(new SchemaHeader(RouteUtilV2.SIMPLE_SCHEMA_NAME)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchWithTitleInAnswerSimple.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have the dcterms:title 'Zeitglöcklein des Lebens' returning the title in the answer (in the simple schema) (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX dcterms: <http://purl.org/dc/terms/>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book dcterms:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book dcterms:title ?title .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)).addHeader(new SchemaHeader(RouteUtilV2.SIMPLE_SCHEMA_NAME)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchWithTitleInAnswerSimple.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for books that have the title 'Zeitglöcklein des Lebens' returning the title in the answer (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 2)
}
}
"perform a Gravsearch query for books that have the title 'Zeitglöcklein des Lebens' not returning the title in the answer (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchNoTitleInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that do not have the title 'Zeitglöcklein des Lebens' (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| FILTER(?title != "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/NotZeitgloeckleinExtendedSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for books that do not have the title 'Zeitglöcklein des Lebens' (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| FILTER(?title != "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
// 19 - 2 = 18 :-)
// there is a total of 19 incunabula books of which two have the title "Zeitglöcklein des Lebens und Leidens Christi" (see test above)
// however, there are 18 books that have a title that is not "Zeitglöcklein des Lebens und Leidens Christi"
// this is because there is a book that has two titles, one "Zeitglöcklein des Lebens und Leidens Christi" and the other in Latin "Horologium devotionis circa vitam Christi"
checkCountResponse(responseAs[String], 18)
}
}
"perform a Gravsearch query for the page of a book whose seqnum equals 10, returning the seqnum and the link value (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| FILTER(?seqnum = 10)
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PageWithSeqnum10WithSeqnumAndLinkValueInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for the page of a book whose seqnum equals 10, returning the seqnum and the link value (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| FILTER(?seqnum = 10)
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"perform a Gravsearch query for the page of a book whose seqnum equals 10, returning only the seqnum (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| FILTER(?seqnum = 10)
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PageWithSeqnum10OnlySeqnuminAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for the pages of a book whose seqnum is lower than or equals 10 (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| FILTER(?seqnum <= 10)
|
| } ORDER BY ?seqnum
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/pagesOfLatinNarrenschiffWithSeqnumLowerEquals10.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for the pages of a book and return them ordered by their seqnum (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| } ORDER BY ?seqnum
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PagesOfNarrenschiffOrderedBySeqnum.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for the pages of a book and return them ordered by their seqnum and get the next OFFSET (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| } ORDER BY ?seqnum
| OFFSET 1
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PagesOfNarrenschiffOrderedBySeqnumNextOffset.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have been published on the first of March 1497 (Julian Calendar) (with type inference)" ignore { // literals are not supported
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate "JULIAN:1497-03-01"^^knora-api:Date .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate "JULIAN:1497-03-01"^^knora-api:Date .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"perform a Gravsearch query for books that have been published on the first of March 1497 (Julian Calendar) (2) (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(?pubdate = "JULIAN:1497-03-01"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have not been published on the first of March 1497 (Julian Calendar) (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(?pubdate != "JULIAN:1497-03-01"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksNotPublishedOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 2 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 17)
}
}
"perform a Gravsearch query for books that have not been published on the first of March 1497 (Julian Calendar) 2 (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(?pubdate < "JULIAN:1497-03-01"^^knora-api:Date || ?pubdate > "JULIAN:1497-03-01"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksNotPublishedOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 2 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 17)
}
}
"perform a Gravsearch query for books that have been published before 1497 (Julian Calendar) (with type inference)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(?pubdate < "JULIAN:1497"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedBeforeDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 4 (number of results from query below)
checkSearchResponseNumberOfResults(responseAs[String], 15)
}
}
"perform a Gravsearch query for books that have been published 1497 or later (Julian Calendar) (with type inference)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(?pubdate >= "JULIAN:1497"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedAfterOrOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 15 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 4)
}
}
"perform a Gravsearch query for books that have been published after 1497 (Julian Calendar) (with type inference)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(?pubdate > "JULIAN:1497"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedAfterDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 18 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"perform a Gravsearch query for books that have been published 1497 or before (Julian Calendar) (with type inference)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(?pubdate <= "JULIAN:1497"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedBeforeOrOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 1 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 18)
}
}
"perform a Gravsearch query for books that have been published after 1486 and before 1491 (Julian Calendar) (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(?pubdate > "JULIAN:1486"^^knora-api:Date && ?pubdate < "JULIAN:1491"^^knora-api:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedBetweenDates.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 5)
}
}
"get the regions belonging to a page (with type inference)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?region knora-api:isMainResource true .
|
| ?region knora-api:isRegionOf <http://rdfh.ch/0803/9d626dc76c03> .
|
| ?region knora-api:hasGeometry ?geom .
|
| ?region knora-api:hasComment ?comment .
|
| ?region knora-api:hasColor ?color .
| } WHERE {
|
| ?region a knora-api:Region .
|
| ?region knora-api:isRegionOf <http://rdfh.ch/0803/9d626dc76c03> .
|
| ?region knora-api:hasGeometry ?geom .
|
| ?region knora-api:hasComment ?comment .
|
| ?region knora-api:hasColor ?color .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/RegionsForPage.jsonld"), false)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"get a book a page points to and include the page in the results (all properties present in WHERE clause) (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|
|CONSTRUCT {
|
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:seqnum ?seqnum .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:hasStillImageFile ?file .
|
|} WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:seqnum ?seqnum .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:hasStillImageFile ?file .
|
|} OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/bookWithIncomingPagesWithAllRequestedProps.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"get a book a page points to and only include the page's partOf link in the results (none of the other properties) (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|
|CONSTRUCT {
|
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
|
|} WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:seqnum ?seqnum .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:hasStillImageFile ?file .
|
|} OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/bookWithIncomingPagesOnlyLink.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"get incoming links pointing to an incunbaula:book, excluding isPartOf and isRegionOf (with type inference)" in {
var gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
|
| ?incomingRes knora-api:isMainResource true .
|
| ?incomingRes ?incomingProp <http://rdfh.ch/0803/8be1b7cf7103> .
|
|} WHERE {
|
| ?incomingRes ?incomingProp <http://rdfh.ch/0803/8be1b7cf7103> .
|
| <http://rdfh.ch/0803/8be1b7cf7103> a incunabula:book .
|
| FILTER NOT EXISTS {
| ?incomingRes knora-api:isRegionOf <http://rdfh.ch/0803/8be1b7cf7103> .
| }
|
| FILTER NOT EXISTS {
| ?incomingRes knora-api:isPartOf <http://rdfh.ch/0803/8be1b7cf7103> .
| }
|
|} OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/IncomingLinksForBook.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for an anything:Thing that has a decimal value of 2.1 (with type inference)" ignore { // literals are not supported
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal"2.1"^^xsd:decimal .
|} WHERE {
|
| ?thing a anything:Thing .
|
| ?thing anything:hasDecimal "2.1"^^xsd:decimal .
|
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a decimal value of 2.1 2 (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
|
| ?thing anything:hasDecimal ?decimal .
|
| FILTER(?decimal = "2.1"^^xsd:decimal)
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingEqualsDecimal.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a decimal value bigger than 2.0 (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
|
| ?thing anything:hasDecimal ?decimal .
|
| FILTER(?decimal > "2"^^xsd:decimal)
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingBiggerThanDecimal.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a decimal value smaller than 3.0 (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
|
| ?thing anything:hasDecimal ?decimal .
|
| FILTER(?decimal < "3"^^xsd:decimal)
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingSmallerThanDecimal.jsonld"), false)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for an anything:Thing that has a Boolean value that is true (with type inference)" ignore { // literals are not supported
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean true
|} WHERE {
|
| ?thing a anything:Thing .
|
| ?thing anything:hasBoolean true .
|
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a Boolean value that is true 2 (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean ?boolean .
|} WHERE {
|
| ?thing a anything:Thing .
|
| ?thing anything:hasBoolean ?boolean .
|
| FILTER(?boolean = true)
|
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithBoolean.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for an anything:Thing that may have a Boolean value that is true (with type inference)" in {
// set OFFSET to 1 to get "Testding for extended search"
val gravsearchQuery =
"""PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean ?boolean .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| OPTIONAL {
| ?thing anything:hasBoolean ?boolean .
| FILTER(?boolean = true)
| }
|
| MINUS {
| ?thing anything:hasInteger ?intVal .
| FILTER(?intVal = 123454321)
| }
|} OFFSET 1""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithBooleanOptionalOffset1.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the second page of results
checkSearchResponseNumberOfResults(responseAs[String], 18)
}
}
"search for an anything:Thing that either has a Boolean value that is true or a decimal value that equals 2.1 (or both) (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean ?boolean .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| {
| ?thing anything:hasBoolean ?boolean .
|
| FILTER(?boolean = true)
| } UNION {
| ?thing anything:hasDecimal ?decimal .
|
| FILTER(?decimal = "2.1"^^xsd:decimal)
| }
|
|} OFFSET 0
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithBooleanOrDecimal.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for a book whose title contains 'Zeit' using the regex function (with type inference)" in {
val gravsearchQuery =
"""
| PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
|
| ?mainRes knora-api:isMainResource true .
|
| ?mainRes incunabula:title ?propVal0 .
|
| } WHERE {
|
| ?mainRes a incunabula:book .
|
| ?mainRes incunabula:title ?propVal0 .
|
| FILTER regex(?propVal0, "Zeit", "i")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeit.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for a book whose title contains 'Zeitglöcklein' using the match function (with type inference)" in {
val gravsearchQuery =
"""
| PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
|
| ?mainRes knora-api:isMainResource true .
|
| ?mainRes incunabula:title ?propVal0 .
|
| } WHERE {
|
| ?mainRes a incunabula:book .
|
| ?mainRes incunabula:title ?propVal0 .
|
| FILTER knora-api:match(?propVal0, "Zeitglöcklein")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for a book whose title contains 'Zeitglöcklein' and 'Lebens' using the match function (with type inference)" in {
val gravsearchQuery =
"""
| PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
|
| ?mainRes knora-api:isMainResource true .
|
| ?mainRes incunabula:title ?propVal0 .
|
| } WHERE {
|
| ?mainRes a incunabula:book .
|
| ?mainRes incunabula:title ?propVal0 .
|
| FILTER knora-api:match(?propVal0, "Zeitglöcklein AND Lebens")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for 'Zeitglöcklein des Lebens' using dcterms:title (with type inference)" in {
val gravsearchQuery =
"""
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
| PREFIX dcterms: <http://purl.org/dc/terms/>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book dcterms:title ?title .
|
| } WHERE {
| ?book a knora-api:Resource .
|
| ?book dcterms:title ?title .
|
| FILTER(?title = 'Zeitglöcklein des Lebens und Leidens Christi')
|
| } OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for an anything:Thing with a list value (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
| CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasListItem ?listItem .
|
| } WHERE {
| ?thing a anything:Thing .
|
| ?thing anything:hasListItem ?listItem .
|
| } OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithListValue.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 3)
}
}
"search for a text using the lang function (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a anything:Thing .
|
| ?thing anything:hasText ?text .
|
| FILTER(lang(?text) = "fr")
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/LanguageFulltextSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for a specific text using the lang function (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a anything:Thing .
|
| ?thing anything:hasText ?text .
|
| FILTER(lang(?text) = "fr" && ?text = "Bonjour")
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/LanguageFulltextSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query for link objects that link to an incunabula book (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|
|CONSTRUCT {
| ?linkObj knora-api:isMainResource true .
|
| ?linkObj knora-api:hasLinkTo ?book .
|
|} WHERE {
| ?linkObj a knora-api:LinkObj .
|
| ?linkObj knora-api:hasLinkTo ?book .
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/LinkObjectsToBooks.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 3)
}
}
"do a Gravsearch query for a letter that links to a specific person via two possible properties (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 <http://rdfh.ch/0801/VvYVIy-FSbOJBsh2d9ZFJw> .
|
|
| } WHERE {
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| # testperson2
| ?letter ?linkingProp1 <http://rdfh.ch/0801/VvYVIy-FSbOJBsh2d9ZFJw> .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithAuthor.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query for a letter that links to a person with a specified name (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?name .
|
| } WHERE {
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| ?person1 beol:hasFamilyName ?name .
|
| FILTER(?name = "Meier")
|
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query for a letter that links to a person with a specified name (optional) (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?name .
|
| } WHERE {
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| OPTIONAL {
| ?person1 beol:hasFamilyName ?name .
|
| FILTER(?name = "Meier")
| }
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithNameOptional.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query for a letter that links to another person with a specified name (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?name .
|
| } WHERE {
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| ?person1 beol:hasFamilyName ?name .
|
| FILTER(?name = "Muster")
|
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName2.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"run a Gravsearch query that searches for a person using foaf classes and propertie (with type inference)" in {
val gravsearchQuery =
"""
| PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
| PREFIX foaf: <http://xmlns.com/foaf/0.1/>
|
| CONSTRUCT {
| ?person knora-api:isMainResource true .
|
| ?person foaf:familyName ?familyName .
|
| ?person foaf:givenName ?givenName .
|
| } WHERE {
| ?person a knora-api:Resource .
| ?person a foaf:Person .
|
| ?person foaf:familyName ?familyName .
|
| ?familyName a xsd:string .
|
| ?person foaf:givenName ?givenName .
|
| ?givenName a xsd:string .
|
| FILTER(?familyName = "Meier")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/foafPerson.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"run a Gravsearch query that searches for a single resource specified by its IRI (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true ;
| anything:hasText ?text ;
| anything:hasInteger ?integer .
|
|} WHERE {
| BIND(<http://rdfh.ch/0001/H6gBWUuJSuuO-CilHV8kQw> AS ?thing)
|
| ?thing a anything:Thing .
| ?thing anything:hasText ?text .
| ?thing anything:hasInteger ?integer .
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingByIriWithRequestedValues.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query for a letter and get information about the persons associated with it (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?familyName .
|
|
| } WHERE {
| BIND(<http://rdfh.ch/0801/_B3lQa6tSymIq7_7SowBsA> AS ?letter)
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| # testperson2
| ?letter ?linkingProp1 ?person1 .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| ?person1 beol:hasFamilyName ?familyName .
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithAuthorWithInformation.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query for the pages of a book whose seqnum is lower than or equals 10, with the book as the main resource (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
| ?book incunabula:title ?title .
|
| ?page knora-api:isPartOf ?book ;
| incunabula:seqnum ?seqnum .
| } WHERE {
| BIND(<http://rdfh.ch/0803/b6b5ff1eb703> AS ?book)
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf ?book .
|
| ?page incunabula:seqnum ?seqnum .
|
| FILTER(?seqnum <= 10)
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/incomingPagesForBook.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"reject a Gravsearch query containing a statement whose subject is not the main resource and whose object is used in ORDER BY (with type inference)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
| ?book incunabula:title ?title .
|
| ?page knora-api:isPartOf ?book ;
| incunabula:seqnum ?seqnum .
| } WHERE {
| BIND(<http://rdfh.ch/0803/b6b5ff1eb703> AS ?book)
|
| ?book incunabula:title ?title .
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf ?book .
|
| ?page incunabula:seqnum ?seqnum .
|
| FILTER(?seqnum <= 10)
|
| } ORDER BY ?seqnum
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.BAD_REQUEST, response.toString)
}
}
"do a Gravsearch query for regions that belong to pages that are part of a book with the title 'Zeitglöcklein des Lebens und Leidens Christi (with type inference)'" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?region knora-api:isMainResource true .
|
| ?region knora-api:isRegionOf ?page .
|
| ?page knora-api:isPartOf ?book .
|
| ?book incunabula:title ?title .
|
|} WHERE {
| ?region a knora-api:Region .
|
| ?region knora-api:isRegionOf ?page .
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf ?book .
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/regionsOfZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query containing a UNION nested in an OPTIONAL (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX gravsearchtest1: <http://0.0.0.0:3333/ontology/0666/gravsearchtest1/simple/v2#>
|
|CONSTRUCT {
| ?Project knora-api:isMainResource true .
| ?isInProject gravsearchtest1:isInProject ?Project .
|} WHERE {
| ?Project a gravsearchtest1:Project .
|
| OPTIONAL {
| ?isInProject gravsearchtest1:isInProject ?Project .
| { ?isInProject a gravsearchtest1:BibliographicNotice . } UNION { ?isInProject a gravsearchtest1:Person . }
| }
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ProjectsWithOptionalPersonOrBiblio.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query that searches for a list node (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
|CONSTRUCT {
|
|?mainRes knora-api:isMainResource true .
|
|?mainRes anything:hasListItem ?propVal0 .
|
|} WHERE {
|
|?mainRes anything:hasListItem ?propVal0 .
|
|FILTER(?propVal0 = "Tree list node 02"^^knora-api:ListNode)
|
|}
|
|OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithListNodeLabel.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch count query that searches for a list node (with type inference)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/simple/v2#>
|
|CONSTRUCT {
|
|?mainRes knora-api:isMainResource true .
|
|?mainRes anything:hasListItem ?propVal0 .
|
|} WHERE {
|
|?mainRes anything:hasListItem ?propVal0 .
|
|FILTER(?propVal0 = "Tree list node 02"^^knora-api:ListNode)
|
|}
|
|OFFSET 0
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 1)
}
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// Queries that submit the complex schema
"perform a Gravsearch query for an anything:Thing with an optional date and sort by date (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasDate ?date .
|} WHERE {
|
| ?thing a knora-api:Resource .
| ?thing a anything:Thing .
|
| OPTIONAL {
| ?thing anything:hasDate ?date .
| }
|
| MINUS {
| ?thing anything:hasInteger ?intVal .
| ?intVal knora-api:intValueAsInt 123454321 .
| }
|}
|ORDER BY DESC(?date)
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/thingWithOptionalDateSortedDesc.jsonld"), false)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for an anything:Thing with an optional date used as a sort criterion (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasDate ?date .
|} WHERE {
|
| ?thing a knora-api:Resource .
| ?thing a anything:Thing .
|
| OPTIONAL {
| ?thing anything:hasDate ?date .
| }
|
| MINUS {
| ?thing anything:hasInteger ?intVal .
| ?intVal knora-api:intValueAsInt 123454321 .
| }
|}
|ORDER BY DESC(?date)
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 43)
}
}
"perform a Gravsearch query for an anything:Thing that has an optional decimal value greater than 2 and sort by the decimal value (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
| ?thing a knora-api:Resource .
|
| OPTIONAL {
| ?thing anything:hasDecimal ?decimal .
| ?decimal knora-api:decimalValueAsDecimal ?decimalVal .
| FILTER(?decimalVal > "1"^^xsd:decimal)
| }
|
| MINUS {
| ?thing anything:hasInteger ?intVal .
| ?intVal knora-api:intValueAsInt 123454321 .
| }
|} ORDER BY DESC(?decimal)
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingsWithOptionalDecimalGreaterThan1.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query that finds all the books that have a page with seqnum 100 (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
|
| ?book knora-api:isMainResource true ;
| incunabula:title ?title .
|
| ?page incunabula:partOf ?book ;
| incunabula:seqnum ?seqnum .
|
|} WHERE {
|
| ?page incunabula:partOf ?book ;
| incunabula:seqnum ?seqnum .
|
| ?seqnum knora-api:intValueAsInt 100 .
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/booksWithPage100.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query that finds all the letters sent by someone called Meier, ordered by date (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
|CONSTRUCT {
|
| ?letter knora-api:isMainResource true ;
| beol:creationDate ?date ;
| beol:hasAuthor ?author .
|
| ?author beol:hasFamilyName ?name .
|
|} WHERE {
|
| ?letter beol:hasAuthor ?author ;
| beol:creationDate ?date .
|
| ?author beol:hasFamilyName ?name .
|
| ?name knora-api:valueAsString "Meier" .
|
|} ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/lettersByMeier.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have the title 'Zeitglöcklein des Lebens' returning the title in the answer (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString "Zeitglöcklein des Lebens und Leidens Christi" .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchWithTitleInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for books that have the title 'Zeitglöcklein des Lebens' returning the title in the answer (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString "Zeitglöcklein des Lebens und Leidens Christi" .
|
| }
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 2)
}
}
"perform a Gravsearch query for books that have the title 'Zeitglöcklein des Lebens' not returning the title in the answer (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString "Zeitglöcklein des Lebens und Leidens Christi" .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ZeitgloeckleinExtendedSearchNoTitleInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that do not have the title 'Zeitglöcklein des Lebens' (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString ?titleStr .
|
| FILTER(?titleStr != "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/NotZeitgloeckleinExtendedSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for books that do not have the title 'Zeitglöcklein des Lebens' (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString ?titleStr .
|
| FILTER(?titleStr != "Zeitglöcklein des Lebens und Leidens Christi")
|
| }
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
// 19 - 2 = 18 :-)
// there is a total of 19 incunabula books of which two have the title "Zeitglöcklein des Lebens und Leidens Christi" (see test above)
// however, there are 18 books that have a title that is not "Zeitglöcklein des Lebens und Leidens Christi"
// this is because there is a book that has two titles, one "Zeitglöcklein des Lebens und Leidens Christi" and the other in Latin "Horologium devotionis circa vitam Christi"
checkCountResponse(responseAs[String], 18)
}
}
"perform a Gravsearch query for the page of a book whose seqnum equals 10, returning the seqnum and the link value (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| ?seqnum knora-api:intValueAsInt 10 .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PageWithSeqnum10WithSeqnumAndLinkValueInAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch count query for the page of a book whose seqnum equals 10, returning the seqnum and the link value (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| ?seqnum knora-api:intValueAsInt 10 .
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"perform a Gravsearch query for the page of a book whose seqnum equals 10, returning only the seqnum (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| ?seqnum knora-api:intValueAsInt 10 .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PageWithSeqnum10OnlySeqnuminAnswer.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for the pages of a book whose seqnum is lower than or equals 10 (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| ?seqnum knora-api:intValueAsInt ?seqnumInt .
|
| FILTER(?seqnumInt <= 10)
|
| } ORDER BY ?seqnum
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/pagesOfLatinNarrenschiffWithSeqnumLowerEquals10.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for the pages of a book and return them ordered by their seqnum (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| } ORDER BY ?seqnum
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PagesOfNarrenschiffOrderedBySeqnum.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for the pages of a book and return them ordered by their seqnum and get the next OFFSET (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?page knora-api:isMainResource true .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
| } WHERE {
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf <http://rdfh.ch/0803/b6b5ff1eb703> .
|
| ?page incunabula:seqnum ?seqnum .
|
| } ORDER BY ?seqnum
| OFFSET 1
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/PagesOfNarrenschiffOrderedBySeqnumNextOffset.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have been published on the first of March 1497 (Julian Calendar) (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(knora-api:toSimpleDate(?pubdate) = "JULIAN:1497-03-01"^^knora-api-simple:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"perform a Gravsearch query for books that have not been published on the first of March 1497 (Julian Calendar) (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(knora-api:toSimpleDate(?pubdate) != "JULIAN:1497-03-01"^^knora-api-simple:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksNotPublishedOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 2 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 17)
}
}
"perform a Gravsearch query for books that have not been published on the first of March 1497 (Julian Calendar) 2 (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(knora-api:toSimpleDate(?pubdate) < "JULIAN:1497-03-01"^^knora-api-simple:Date || knora-api:toSimpleDate(?pubdate) > "JULIAN:1497-03-01"^^knora-api-simple:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksNotPublishedOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 2 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 17)
}
}
"perform a Gravsearch query for books that have been published before 1497 (Julian Calendar) (submitting the complex schema)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
| PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(knora-api:toSimpleDate(?pubdate) < "JULIAN:1497"^^knora-api-simple:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedBeforeDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 4 (number of results from query below)
checkSearchResponseNumberOfResults(responseAs[String], 15)
}
}
"perform a Gravsearch query for books that have been published 1497 or later (Julian Calendar) (submitting the complex schema)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
| PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(knora-api:toSimpleDate(?pubdate) >= "JULIAN:1497"^^knora-api-simple:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedAfterOrOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 15 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 4)
}
}
"perform a Gravsearch query for books that have been published after 1497 (Julian Calendar) (submitting the complex schema)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
| PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(knora-api:toSimpleDate(?pubdate) > "JULIAN:1497"^^knora-api-simple:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedAfterDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 18 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"perform a Gravsearch query for books that have been published 1497 or before (Julian Calendar) (submitting the complex schema)" in {
val gravsearchQuery =
""" PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
| PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(knora-api:toSimpleDate(?pubdate) <= "JULIAN:1497"^^knora-api-simple:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedBeforeOrOnDate.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the negation of the query condition above, hence the size of the result set must be 19 (total of incunabula:book) minus 1 (number of results from query above)
checkSearchResponseNumberOfResults(responseAs[String], 18)
}
}
"perform a Gravsearch query for books that have been published after 1486 and before 1491 (Julian Calendar) (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
| } WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?book incunabula:pubdate ?pubdate .
|
| FILTER(knora-api:toSimpleDate(?pubdate) > "JULIAN:1486"^^knora-api-simple:Date && knora-api:toSimpleDate(?pubdate) < "JULIAN:1491"^^knora-api-simple:Date)
|
| } ORDER BY ?pubdate
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksPublishedBetweenDates.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 5)
}
}
"get the regions belonging to a page (submitting the complex schema)" in {
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, SharedTestDataADM.gravsearchComplexRegionsForPage)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/RegionsForPage.jsonld"), false)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"get a book a page points to and include the page in the results (all properties present in WHERE clause) (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
|
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:seqnum ?seqnum .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:hasStillImageFileValue ?file .
|
|} WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:seqnum ?seqnum .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:hasStillImageFileValue ?file .
|
|} OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/bookWithIncomingPagesWithAllRequestedProps.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"get a book a page points to and only include the page's partOf link in the results (none of the other properties) (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
|
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
|
|} WHERE {
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:isPartOf ?book .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:seqnum ?seqnum .
|
| <http://rdfh.ch/0803/50e7460a7203> knora-api:hasStillImageFileValue ?file .
|
|} OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/bookWithIncomingPagesOnlyLink.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"get incoming links pointing to an incunbaula:book, excluding isPartOf and isRegionOf (submitting the complex schema)" in {
var gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
|
| ?incomingRes knora-api:isMainResource true .
|
| ?incomingRes ?incomingProp <http://rdfh.ch/0803/8be1b7cf7103> .
|
|} WHERE {
|
| ?incomingRes ?incomingProp <http://rdfh.ch/0803/8be1b7cf7103> .
|
| <http://rdfh.ch/0803/8be1b7cf7103> a incunabula:book .
|
| FILTER NOT EXISTS {
| ?incomingRes knora-api:isRegionOf <http://rdfh.ch/0803/8be1b7cf7103> .
| }
|
| FILTER NOT EXISTS {
| ?incomingRes knora-api:isPartOf <http://rdfh.ch/0803/8be1b7cf7103> .
| }
|
|} OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/IncomingLinksForBook.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for an anything:Thing that has a decimal value of 2.1 (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
|
| ?thing anything:hasDecimal ?decimal .
|
| ?decimal knora-api:decimalValueAsDecimal "2.1"^^xsd:decimal .
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingEqualsDecimal.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a decimal value bigger than 2.0 (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
|
| ?thing anything:hasDecimal ?decimal .
|
| ?decimal knora-api:decimalValueAsDecimal ?decimalDec .
|
| FILTER(?decimalDec > "2"^^xsd:decimal)
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingBiggerThanDecimal.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for an anything:Thing that has a decimal value smaller than 3.0 (submitting the complex schema)" in {
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, SharedTestDataADM.gravsearchComplexThingSmallerThanDecimal)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingSmallerThanDecimal.jsonld"), false)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for an anything:Thing that has a Boolean value that is true (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean ?boolean .
|} WHERE {
|
| ?thing a anything:Thing .
|
| ?thing anything:hasBoolean ?boolean .
|
| ?boolean knora-api:booleanValueAsBoolean true .
|
|}
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithBoolean.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for an anything:Thing that may have a Boolean value that is true (submitting the complex schema)" in {
// set OFFSET to 1 to get "Testding for extended search"
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean ?boolean .
|} WHERE {
|
| ?thing a anything:Thing .
|
| OPTIONAL {
| ?thing anything:hasBoolean ?boolean .
| ?boolean knora-api:booleanValueAsBoolean true .
| }
|
| MINUS {
| ?thing anything:hasInteger ?intVal .
| ?intVal knora-api:intValueAsInt 123454321 .
| }
|} OFFSET 1""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithBooleanOptionalOffset1.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
// this is the second page of results
checkSearchResponseNumberOfResults(responseAs[String], 18)
}
}
"search for an anything:Thing that either has a Boolean value that is true or a decimal value that equals 2.1 (or both) (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasBoolean ?boolean .
|
| ?thing anything:hasDecimal ?decimal .
|} WHERE {
|
| ?thing a anything:Thing .
|
| {
| ?thing anything:hasBoolean ?boolean .
|
| ?boolean knora-api:booleanValueAsBoolean ?booleanBool .
|
| FILTER(?booleanBool = true)
| } UNION {
| ?thing anything:hasDecimal ?decimal .
|
| ?decimal knora-api:decimalValueAsDecimal "2.1"^^xsd:decimal .
| }
|
|} OFFSET 0
|
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithBooleanOrDecimal.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for a book whose title contains 'Zeit' using the regex function (submitting the complex schema)" in {
val gravsearchQuery =
"""
| PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
|
| ?mainRes knora-api:isMainResource true .
|
| ?mainRes incunabula:title ?title .
|
| } WHERE {
|
| ?mainRes a incunabula:book .
|
| ?mainRes incunabula:title ?title .
|
| ?title knora-api:valueAsString ?titleStr .
|
| FILTER regex(?titleStr, "Zeit", "i")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeit.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for a book whose title contains 'Zeitglöcklein' using the match function (submitting the complex schema)" in {
val gravsearchQuery =
"""
| PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
|
| ?mainRes knora-api:isMainResource true .
|
| ?mainRes incunabula:title ?title .
|
| } WHERE {
|
| ?mainRes a incunabula:book .
|
| ?mainRes incunabula:title ?title .
|
| ?title knora-api:valueAsString ?titleStr .
|
| FILTER knora-api:match(?titleStr, "Zeitglöcklein")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for a book whose title contains 'Zeitglöcklein' and 'Lebens' using the match function (submitting the complex schema)" in {
val gravsearchQuery =
"""
| PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
| PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
|
| ?mainRes knora-api:isMainResource true .
|
| ?mainRes incunabula:title ?title .
|
| } WHERE {
|
| ?mainRes a incunabula:book .
|
| ?mainRes incunabula:title ?title .
|
| ?title knora-api:valueAsString ?titleStr .
|
| FILTER knora-api:match(?titleStr, "Zeitglöcklein AND Lebens")
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/BooksWithTitleContainingZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 2)
}
}
"search for an anything:Thing with a list value (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
| CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasListItem ?listItem .
|
| } WHERE {
| ?thing a anything:Thing .
|
| ?thing anything:hasListItem ?listItem .
|
| } OFFSET 0
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithListValue.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 3)
}
}
"search for a text in a particular language (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a anything:Thing .
|
| ?thing anything:hasText ?text .
|
| ?text knora-api:textValueHasLanguage "fr" .
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/LanguageFulltextSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"search for a specific text using the lang function (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a anything:Thing .
|
| ?thing anything:hasText ?text .
|
| ?text knora-api:valueAsString "Bonjour" .
|
| ?text knora-api:textValueHasLanguage "fr" .
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/LanguageFulltextSearch.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query for link objects that link to an incunabula book (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|
|CONSTRUCT {
| ?linkObj knora-api:isMainResource true .
|
| ?linkObj knora-api:hasLinkTo ?book .
|
|} WHERE {
| ?linkObj a knora-api:LinkObj .
|
| ?linkObj knora-api:hasLinkTo ?book .
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/LinkObjectsToBooks.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 3)
}
}
"do a Gravsearch query for a letter that links to a specific person via two possible properties (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 <http://rdfh.ch/0801/VvYVIy-FSbOJBsh2d9ZFJw> .
|
|
| } WHERE {
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| # testperson2
| ?letter ?linkingProp1 <http://rdfh.ch/0801/VvYVIy-FSbOJBsh2d9ZFJw> .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithAuthor.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query for a letter that links to a person with a specified name (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?name .
|
| } WHERE {
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| ?person1 beol:hasFamilyName ?name .
|
| ?name knora-api:valueAsString "Meier" .
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query for a letter that links to a person with a specified name (optional) (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?name .
|
| } WHERE {
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| OPTIONAL {
| ?person1 beol:hasFamilyName ?name .
|
| ?name knora-api:valueAsString "Meier" .
| }
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithNameOptional.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"do a Gravsearch query for a letter that links to another person with a specified name (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?name .
|
| } WHERE {
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| ?person1 beol:hasFamilyName ?name .
|
| ?name knora-api:valueAsString "Muster" .
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithPersonWithName2.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
checkSearchResponseNumberOfResults(responseAs[String], 1)
}
}
"run a Gravsearch query that searches for a single resource specified by its IRI (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true ;
| anything:hasText ?text ;
| anything:hasInteger ?integer .
|
|} WHERE {
| BIND(<http://rdfh.ch/0001/H6gBWUuJSuuO-CilHV8kQw> AS ?thing)
|
| ?thing a anything:Thing .
| ?thing anything:hasText ?text .
| ?thing anything:hasInteger ?integer .
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingByIriWithRequestedValues.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query for a letter and get information about the persons associated with it (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:creationDate ?date .
|
| ?letter ?linkingProp1 ?person1 .
|
| ?person1 beol:hasFamilyName ?familyName .
|
|
| } WHERE {
| BIND(<http://rdfh.ch/0801/_B3lQa6tSymIq7_7SowBsA> AS ?letter)
| ?letter a beol:letter .
|
| ?letter beol:creationDate ?date .
|
| # testperson2
| ?letter ?linkingProp1 ?person1 .
|
| FILTER(?linkingProp1 = beol:hasAuthor || ?linkingProp1 = beol:hasRecipient)
|
| ?person1 beol:hasFamilyName ?familyName .
|
| } ORDER BY ?date
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithAuthorWithInformation.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"do a Gravsearch query for the pages of a book whose seqnum is lower than or equals 10, with the book as the main resource (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
| ?book incunabula:title ?title .
|
| ?page knora-api:isPartOf ?book ;
| incunabula:seqnum ?seqnum .
| } WHERE {
| BIND(<http://rdfh.ch/0803/b6b5ff1eb703> AS ?book)
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf ?book .
|
| ?page incunabula:seqnum ?seqnum .
|
| ?seqnum knora-api:intValueAsInt ?seqnumInt .
|
| FILTER(?seqnumInt <= 10)
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/incomingPagesForBook.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"reject a Gravsearch query containing a statement whose subject is not the main resource and whose object is used in ORDER BY (submitting the complex schema)" in {
val gravsearchQuery =
"""PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
| CONSTRUCT {
| ?book knora-api:isMainResource true .
| ?book incunabula:title ?title .
|
| ?page knora-api:isPartOf ?book ;
| incunabula:seqnum ?seqnum .
| } WHERE {
| BIND(<http://rdfh.ch/0803/b6b5ff1eb703> AS ?book)
|
| ?book incunabula:title ?title .
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf ?book .
|
| ?page incunabula:seqnum ?seqnum .
|
| ?seqnum knora-api:intValueAsInt ?seqnumInt .
|
| FILTER(?seqnumInt <= 10)
|
| } ORDER BY ?seqnum
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.BAD_REQUEST, response.toString)
}
}
"do a Gravsearch query for regions that belong to pages that are part of a book with the title 'Zeitglöcklein des Lebens und Leidens Christi' (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
| ?region knora-api:isMainResource true .
|
| ?region knora-api:isRegionOf ?page .
|
| ?page knora-api:isPartOf ?book .
|
| ?book incunabula:title ?title .
|
|} WHERE {
| ?region a knora-api:Region .
|
| ?region knora-api:isRegionOf ?page .
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf ?book .
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString "Zeitglöcklein des Lebens und Leidens Christi" .
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/regionsOfZeitgloecklein.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"reject a Gravsearch query in the complex schema that uses knora-api:isMainResource in the simple schema" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?region knora-api-simple:isMainResource true .
|
| ?region knora-api:isRegionOf ?page .
|
| ?page knora-api:isPartOf ?book .
|
| ?book incunabula:title ?title .
|
|} WHERE {
| ?region a knora-api:Region .
|
| ?region knora-api:isRegionOf ?page .
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf ?book .
|
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString "Zeitglöcklein des Lebens und Leidens Christi" .
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.BAD_REQUEST, response.toString)
}
}
"reject a Gravsearch query in the complex schema that uses a Knora property in the simple schema" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX incunabula-simple: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
| ?region knora-api:isMainResource true .
|
| ?region knora-api:isRegionOf ?page .
|
| ?page knora-api:isPartOf ?book .
|
| ?book incunabula:title ?title .
|
|} WHERE {
| ?region a knora-api:Region .
|
| ?region knora-api:isRegionOf ?page .
|
| ?page a incunabula:page .
|
| ?page knora-api:isPartOf ?book .
|
| ?book a incunabula:book .
|
| ?book incunabula-simple:title ?title .
|
| FILTER(?title = "Zeitglöcklein des Lebens und Leidens Christi")
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.BAD_REQUEST, response.toString)
}
}
"reject a Gravsearch query that uses a string literal in the CONSTRUCT clause" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString "Zeitglöcklein des Lebens und Leidens Christi" .
|
|
|} WHERE {
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString "Zeitglöcklein des Lebens und Leidens Christi" .
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.BAD_REQUEST, response.toString)
}
}
"reject a Gravsearch query in the complex schema with a variable in the CONSTRUCT clause referring to a non-property entity that isn't a resource or value" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|
|CONSTRUCT {
| ?book knora-api:isMainResource true .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString ?titleStr .
|
|
|} WHERE {
| ?book a incunabula:book .
|
| ?book incunabula:title ?title .
|
| ?title knora-api:valueAsString ?titleStr .
|
| FILTER(?titleStr = "Zeitglöcklein des Lebens und Leidens Christi")
|
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.BAD_REQUEST, response.toString)
}
}
"search for a list value that refers to a particular list node (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
| CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasListItem ?listItem .
|
| } WHERE {
| ?thing anything:hasListItem ?listItem .
|
| ?listItem knora-api:listValueAsListNode <http://rdfh.ch/lists/0001/treeList02> .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/thingReferringToSpecificListNode.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for a list value that does not refer to a particular list node (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
| CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasListItem ?listItem .
|
| } WHERE {
| ?thing anything:hasListItem ?listItem .
|
| FILTER NOT EXISTS {
|
| ?listItem knora-api:listValueAsListNode <http://rdfh.ch/lists/0001/treeList02> .
|
| }
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/thingNotReferringToSpecificListNode.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for a list value that does not refer to a particular list node, performing a count query (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
| CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasListItem ?listItem .
|
| } WHERE {
| ?thing anything:hasListItem ?listItem .
|
| FILTER NOT EXISTS {
|
| ?listItem knora-api:listValueAsListNode <http://rdfh.ch/lists/0001/treeList02> .
|
| }
|
| }
""".stripMargin
Post("/v2/searchextended/count", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
checkCountResponse(responseAs[String], 2)
}
}
"search for a list value that refers to a particular list node that has subnodes (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
| CONSTRUCT {
| ?thing knora-api:isMainResource true .
|
| ?thing anything:hasListItem ?listItem .
|
| } WHERE {
| ?thing anything:hasListItem ?listItem .
|
| ?listItem knora-api:listValueAsListNode <http://rdfh.ch/lists/0001/treeList> .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/thingReferringToSpecificListNodeWithSubnodes.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for a beol:letter with list value that refers to a particular list node (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX beol: <http://0.0.0.0:3333/ontology/0801/beol/v2#>
|
| CONSTRUCT {
| ?letter knora-api:isMainResource true .
|
| ?letter beol:hasSubject ?subject .
|
| } WHERE {
| ?letter a beol:letter .
|
| ?letter beol:hasSubject ?subject .
|
| ?subject knora-api:listValueAsListNode <http://rdfh.ch/lists/0801/logarithmic_curves> .
|
| }
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/letterWithSubject.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for a standoff link using the knora-api:standoffLink function (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX standoff: <http://api.knora.org/ontology/standoff/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a anything:Thing .
| ?thing anything:hasText ?text .
| ?text knora-api:textValueHasStandoff ?standoffTag .
| ?standoffTag a knora-api:StandoffLinkTag .
| FILTER knora-api:standoffLink(?thing, ?standoffTag, ?otherThing)
| ?otherThing a anything:Thing .
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/thingsWithStandoffLinks.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for a standoff link using the knora-api:standoffLink function, referring to the target resource in the function call only (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX standoff: <http://api.knora.org/ontology/standoff/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a anything:Thing .
| ?thing anything:hasText ?text .
| ?text knora-api:textValueHasStandoff ?standoffTag .
| ?standoffTag a knora-api:StandoffLinkTag .
| FILTER knora-api:standoffLink(?thing, ?standoffTag, ?otherThing)
|
| # Note that ?otherThing is only used as a argument in the function, not in any other statement
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/thingsWithStandoffLinks.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for a standoff link using the knora-api:standoffLink function specifying an Iri for the target resource (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX standoff: <http://api.knora.org/ontology/standoff/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a anything:Thing .
| ?thing anything:hasText ?text .
| ?text knora-api:textValueHasStandoff ?standoffTag .
| ?standoffTag a knora-api:StandoffLinkTag .
| FILTER knora-api:standoffLink(?thing, ?standoffTag, <http://rdfh.ch/0001/a-thing>)
| <http://rdfh.ch/0001/a-thing> a anything:Thing .
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/thingsWithStandoffLinksToSpecificThing.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for a standoff link using the knora-api:standoffLink function specifying an Iri for the target resource, referring to the target resource in the function call only (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX standoff: <http://api.knora.org/ontology/standoff/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a anything:Thing .
| ?thing anything:hasText ?text .
| ?text knora-api:textValueHasStandoff ?standoffTag .
| ?standoffTag a knora-api:StandoffLinkTag .
| FILTER knora-api:standoffLink(?thing, ?standoffTag, <http://rdfh.ch/0001/a-thing>)
|
| # Note that <http://rdfh.ch/0001/a-thing> is only used as a argument in the function, not in any other statement
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/thingsWithStandoffLinksToSpecificThing.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for matching words in a particular type of standoff tag (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX standoff: <http://api.knora.org/ontology/standoff/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasRichtext ?text .
|} WHERE {
| ?thing a anything:Thing .
| ?thing anything:hasRichtext ?text .
| ?text knora-api:valueAsString ?textStr .
| ?text knora-api:textValueHasStandoff ?standoffTag .
| ?standoffTag a standoff:StandoffItalicTag .
| FILTER knora-api:matchInStandoff(?textStr, ?standoffTag, "interesting text")
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
val expectedAnswerJSONLD = readOrWriteTextFile(responseAs[String], new File("src/test/resources/test-data/searchR2RV2/ThingWithRichtextWithTermTextInParagraph.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = responseAs[String])
}
}
"search for a standoff date tag indicating a date in a particular range (submitting the complex schema)" in {
// First, create a standoff-to-XML mapping that can handle standoff date tags.
val mappingFileToSend = new File("_test_data/test_route/texts/mappingForHTML.xml")
val paramsCreateHTMLMappingFromXML =
s"""
|{
| "knora-api:mappingHasName": "HTMLMapping",
| "knora-api:attachedToProject": {
| "@id": "$anythingProjectIri"
| },
| "rdfs:label": "mapping for HTML",
| "@context": {
| "rdfs": "${OntologyConstants.Rdfs.RdfsPrefixExpansion}",
| "knora-api": "${OntologyConstants.KnoraApiV2Complex.KnoraApiV2PrefixExpansion}"
| }
|}
""".stripMargin
val formDataMapping = Multipart.FormData(
Multipart.FormData.BodyPart(
"json",
HttpEntity(ContentTypes.`application/json`, paramsCreateHTMLMappingFromXML)
),
Multipart.FormData.BodyPart(
"xml",
HttpEntity.fromPath(ContentTypes.`text/xml(UTF-8)`, mappingFileToSend.toPath),
Map("filename" -> mappingFileToSend.getName)
)
)
// send mapping xml to route
Post("/v2/mapping", formDataMapping) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> standoffPath ~> check {
assert(status == StatusCodes.OK)
}
// Next, create a resource with a text value containing a standoff date tag. TODO: Use API v2.
val xmlFileToSend = new File("_test_data/test_route/texts/HTML.xml")
val newValueParams =
s"""
|{
| "project_id": "http://rdfh.ch/projects/0001",
| "res_id": "http://rdfh.ch/0001/a-thing",
| "prop": "http://www.knora.org/ontology/0001/anything#hasText",
| "richtext_value": {
| "xml": ${JsString(FileUtil.readTextFile(xmlFileToSend))},
| "mapping_id": "$anythingProjectIri/mappings/HTMLMapping"
| }
|}
""".stripMargin
Post("/v1/values", HttpEntity(ContentTypes.`application/json`, newValueParams)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> valuesPath ~> check {
assert(status == StatusCodes.OK)
}
// Finally, do a Gravsearch query that finds the date tag.
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a anything:Thing .
| ?thing anything:hasText ?text .
| ?text knora-api:textValueHasStandoff ?standoffEventTag .
| ?standoffEventTag a anything:StandoffEventTag .
| FILTER(knora-api:toSimpleDate(?standoffEventTag) = "GREGORIAN:2016-12 CE"^^knora-api-simple:Date)
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
assert(responseAs[String].contains("we will have a party"))
}
}
"search for a standoff tag using knora-api:standoffTagHasStartAncestor (submitting the complex schema)" in {
val gravsearchQuery =
"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX standoff: <http://api.knora.org/ontology/standoff/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|PREFIX knora-api-simple: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasText ?text .
|} WHERE {
| ?thing a anything:Thing .
| ?thing anything:hasText ?text .
| ?text knora-api:textValueHasStandoff ?standoffDateTag .
| ?standoffDateTag a knora-api:StandoffDateTag .
| FILTER(knora-api:toSimpleDate(?standoffDateTag) = "GREGORIAN:2016-12-24 CE"^^knora-api-simple:Date)
| ?standoffDateTag knora-api:standoffTagHasStartAncestor ?standoffParagraphTag .
| ?standoffParagraphTag a standoff:StandoffParagraphTag .
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
assert(status == StatusCodes.OK, response.toString)
assert(responseAs[String].contains("we will have a party"))
}
}
"reject a link value property in a query in the simple schema" in {
val gravsearchQuery =
"""
|PREFIX incunabula: <http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#>
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/simple/v2#>
|
|CONSTRUCT {
| ?book knora-api:isMainResource true .
| ?book incunabula:title ?title .
| ?page incunabula:partOfValue ?book .
|} WHERE {
| ?book a incunabula:book .
| ?book incunabula:title ?title .
| ?page a incunabula:page .
| ?page incunabula:partOfValue ?book .
|}
""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(incunabulaUserEmail, password)) ~> searchPath ~> check {
val responseStr = responseAs[String]
assert(status == StatusCodes.NOT_FOUND, responseStr)
assert(responseStr.contains("http://0.0.0.0:3333/ontology/0803/incunabula/simple/v2#partOfValue"))
}
}
"create a resource with a large text containing a lot of markup (32849 words, 6738 standoff tags)" ignore { // uses too much memory for GitHub CI
// Create a resource containing the text of Hamlet.
val hamletXml = FileUtil.readTextFile(new File("src/test/resources/test-data/resourcesR2RV2/hamlet.xml"))
val jsonLDEntity =
s"""{
| "@type" : "anything:Thing",
| "anything:hasRichtext" : {
| "@type" : "knora-api:TextValue",
| "knora-api:textValueAsXml" : ${stringFormatter.toJsonEncodedString(hamletXml)},
| "knora-api:textValueHasMapping" : {
| "@id" : "http://rdfh.ch/standoff/mappings/StandardMapping"
| }
| },
| "knora-api:attachedToProject" : {
| "@id" : "http://rdfh.ch/projects/0001"
| },
| "rdfs:label" : "test thing",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/v2#"
| }
|}""".stripMargin
Post("/v2/resources", HttpEntity(RdfMediaTypes.`application/ld+json`, jsonLDEntity)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcePath ~> check {
val resourceCreateResponseStr = responseAs[String]
assert(status == StatusCodes.OK, resourceCreateResponseStr)
val resourceCreateResponseAsJsonLD: JsonLDDocument = JsonLDUtil.parseJsonLD(resourceCreateResponseStr)
val resourceIri: IRI = resourceCreateResponseAsJsonLD.body.requireStringWithValidation(JsonLDConstants.ID, stringFormatter.validateAndEscapeIri)
assert(resourceIri.toSmartIri.isKnoraDataIri)
hamletResourceIri.set(resourceIri)
}
}
"search for the large text and its markup and receive it as XML, and check that it matches the original XML" ignore { // depends on previous test
val hamletXml = FileUtil.readTextFile(new File("src/test/resources/test-data/resourcesR2RV2/hamlet.xml"))
val gravsearchQuery =
s"""PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX standoff: <http://api.knora.org/ontology/standoff/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?thing knora-api:isMainResource true .
| ?thing anything:hasRichtext ?text .
|} WHERE {
| BIND(<${hamletResourceIri.get}> AS ?thing)
| ?thing a anything:Thing .
| ?thing anything:hasRichtext ?text .
|}""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
val searchResponseStr = responseAs[String]
assert(status == StatusCodes.OK, searchResponseStr)
val searchResponseAsJsonLD: JsonLDDocument = JsonLDUtil.parseJsonLD(searchResponseStr)
val xmlFromResponse: String = searchResponseAsJsonLD.body.requireObject("http://0.0.0.0:3333/ontology/0001/anything/v2#hasRichtext").
requireString(OntologyConstants.KnoraApiV2Complex.TextValueAsXml)
// Compare it to the original XML.
val xmlDiff: Diff = DiffBuilder.compare(Input.fromString(hamletXml)).withTest(Input.fromString(xmlFromResponse)).build()
xmlDiff.hasDifferences should be(false)
}
}
"find a resource with two different incoming links" in {
// Create the target resource.
val targetResource: String =
"""{
| "@type" : "anything:BlueThing",
| "knora-api:attachedToProject" : {
| "@id" : "http://rdfh.ch/projects/0001"
| },
| "rdfs:label" : "blue thing with incoming links",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/v2#"
| }
|}""".stripMargin
val targetResourceIri: IRI = Post(s"/v2/resources", HttpEntity(RdfMediaTypes.`application/ld+json`, targetResource)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcePath ~> check {
val createTargetResourceResponseStr = responseAs[String]
assert(response.status == StatusCodes.OK, createTargetResourceResponseStr)
val responseJsonDoc: JsonLDDocument = responseToJsonLDDocument(response)
responseJsonDoc.body.requireStringWithValidation(JsonLDConstants.ID, stringFormatter.validateAndEscapeIri)
}
assert(targetResourceIri.toSmartIri.isKnoraDataIri)
val sourceResource1: String =
s"""{
| "@type" : "anything:BlueThing",
| "knora-api:attachedToProject" : {
| "@id" : "http://rdfh.ch/projects/0001"
| },
| "anything:hasBlueThingValue" : {
| "@type" : "knora-api:LinkValue",
| "knora-api:linkValueHasTargetIri" : {
| "@id" : "$targetResourceIri"
| }
| },
| "rdfs:label" : "blue thing with link to other blue thing",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/v2#"
| }
|}""".stripMargin
val sourceResource1Iri: IRI = Post(s"/v2/resources", HttpEntity(RdfMediaTypes.`application/ld+json`, sourceResource1)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcePath ~> check {
val createSourceResource1ResponseStr = responseAs[String]
assert(response.status == StatusCodes.OK, createSourceResource1ResponseStr)
val responseJsonDoc: JsonLDDocument = responseToJsonLDDocument(response)
responseJsonDoc.body.requireStringWithValidation(JsonLDConstants.ID, stringFormatter.validateAndEscapeIri)
}
assert(sourceResource1Iri.toSmartIri.isKnoraDataIri)
val sourceResource2: String =
s"""{
| "@type" : "anything:Thing",
| "knora-api:attachedToProject" : {
| "@id" : "http://rdfh.ch/projects/0001"
| },
| "anything:hasOtherThingValue" : {
| "@type" : "knora-api:LinkValue",
| "knora-api:linkValueHasTargetIri" : {
| "@id" : "$targetResourceIri"
| }
| },
| "rdfs:label" : "thing with link to blue thing",
| "@context" : {
| "rdf" : "http://www.w3.org/1999/02/22-rdf-syntax-ns#",
| "knora-api" : "http://api.knora.org/ontology/knora-api/v2#",
| "rdfs" : "http://www.w3.org/2000/01/rdf-schema#",
| "xsd" : "http://www.w3.org/2001/XMLSchema#",
| "anything" : "http://0.0.0.0:3333/ontology/0001/anything/v2#"
| }
|}""".stripMargin
val sourceResource2Iri: IRI = Post(s"/v2/resources", HttpEntity(RdfMediaTypes.`application/ld+json`, sourceResource2)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> resourcePath ~> check {
val createSourceResource2ResponseStr = responseAs[String]
assert(response.status == StatusCodes.OK, createSourceResource2ResponseStr)
val responseJsonDoc: JsonLDDocument = responseToJsonLDDocument(response)
responseJsonDoc.body.requireStringWithValidation(JsonLDConstants.ID, stringFormatter.validateAndEscapeIri)
}
assert(sourceResource2Iri.toSmartIri.isKnoraDataIri)
val gravsearchQuery =
s"""
|PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX standoff: <http://api.knora.org/ontology/standoff/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?targetThing knora-api:isMainResource true .
| ?firstIncoming anything:hasBlueThing ?targetThing .
| ?secondIncoming anything:hasOtherThing ?targetThing .
|} WHERE {
| ?targetThing a anything:BlueThing .
| ?firstIncoming anything:hasBlueThing ?targetThing .
| ?secondIncoming anything:hasOtherThing ?targetThing .
|}
""".stripMargin
val searchResultIri: IRI = Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> addCredentials(BasicHttpCredentials(anythingUserEmail, password)) ~> searchPath ~> check {
val searchResponseStr = responseAs[String]
assert(status == StatusCodes.OK, searchResponseStr)
val responseJsonDoc: JsonLDDocument = responseToJsonLDDocument(response)
responseJsonDoc.body.requireStringWithValidation(JsonLDConstants.ID, stringFormatter.validateAndEscapeIri)
}
assert(searchResultIri == targetResourceIri)
}
"get a resource with a link to another resource that the user doesn't have permission to see" in {
val gravsearchQuery =
s"""PREFIX knora-api: <http://api.knora.org/ontology/knora-api/v2#>
|PREFIX anything: <http://0.0.0.0:3333/ontology/0001/anything/v2#>
|
|CONSTRUCT {
| ?mainThing knora-api:isMainResource true .
| ?mainThing anything:hasOtherThing ?hiddenThing .
| ?hiddenThing anything:hasInteger ?intValInHiddenThing .
| ?mainThing anything:hasOtherThing ?visibleThing .
| ?visibleThing anything:hasInteger ?intValInVisibleThing .
|} WHERE {
| ?mainThing a anything:Thing .
| ?mainThing anything:hasOtherThing ?hiddenThing .
| ?hiddenThing anything:hasInteger ?intValInHiddenThing .
| ?intValInHiddenThing knora-api:intValueAsInt 123454321 .
| ?mainThing anything:hasOtherThing ?visibleThing .
| ?visibleThing anything:hasInteger ?intValInVisibleThing .
| ?intValInVisibleThing knora-api:intValueAsInt 543212345 .
|}""".stripMargin
Post("/v2/searchextended", HttpEntity(SparqlQueryConstants.`application/sparql-query`, gravsearchQuery)) ~> searchPath ~> check {
val searchResponseStr = responseAs[String]
assert(status == StatusCodes.OK, searchResponseStr)
val expectedAnswerJSONLD = readOrWriteTextFile(searchResponseStr, new File("src/test/resources/test-data/searchR2RV2/ThingWithHiddenThing.jsonld"), writeTestDataFiles)
compareJSONLDForResourcesResponse(expectedJSONLD = expectedAnswerJSONLD, receivedJSONLD = searchResponseStr)
}
}
}
}
|
musicEnfanthen/Knora
|
webapi/src/test/scala/org/knora/webapi/e2e/v2/SearchRouteV2R2RSpec.scala
|
Scala
|
agpl-3.0
| 378,725 |
package net.nablux.dockergen
import scala.collection.mutable
trait DockerImage {
protected var commands = new mutable.MutableList[List[String]]
protected var maintainer: Option[String] = None
def image: String
def tag: String
protected def addCommand(which: String, params: String*) = {
commands += (which :: params.toList)
}
def toDockerString: String = {
val sb = new StringBuffer
// print header
sb append s"FROM $image:$tag\\n"
maintainer.foreach(m =>
sb append s"MAINTAINER $m\\n")
// print all commands now
commands.foreach(cmd => {
sb append cmd.mkString(" ")
sb append "\\n"
})
sb.toString
}
def MAINTAINER(name: String, email: String) = {
maintainer = Some(s"$name <$email>")
}
def ENV(name: String, value: String) = {
addCommand("ENV", name, value)
}
def RUN(cmd: String) = {
addCommand("RUN", cmd)
}
def CMD(cmd: String) = {
addCommand("CMD", cmd)
}
def ##(comment: String) = {
addCommand("#", comment)
}
}
|
tgpfeiffer/dockergen
|
src/main/scala/net/nablux/dockergen/DockerImage.scala
|
Scala
|
bsd-3-clause
| 1,036 |
package meow
import org.specs._
object GrowlSpec extends Specification {
"Growl" should {
"find a binary" in {
Growl.bin must_== "/usr/local/bin/growlnotify"
}
"make sticky notifications" in {
(Growl sticky() argList) must contain("-s")
}
"show notifications with a message" in {
val g = Growl message("foo")
(g argList) must contain("-m")
(g argList) must contain("foo")
}
"show notifications with a title" in {
val g = Growl title("foo")
(g argList) must contain("-t")
(g argList) must contain("foo")
}
}
}
|
softprops/meow
|
src/test/scala/meow/GrowlSpec.scala
|
Scala
|
mit
| 597 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package samples
import scala.language.postfixOps
import scala.util.parsing.combinator.RegexParsers
class LoopParser extends RegexParsers {
override type Elem = Char
def identifier = """[_\\p{L}][_\\p{L}\\p{Nd}]*""".r
def integer = """(0|[1-9]\\d*)""".r ^^ { _.toInt }
def loop = "for"~identifier~"in"~integer~"to"~integer~statement ^^ {
case f~variable~i~lBound~t~uBound~statement => ForLoop(variable, lBound, uBound,statement)
}
def statements = statement*
def block = "{"~>statements<~"}" ^^ { l => Block(l) }
def statement : Parser[Statement] = loop | block
}
abstract trait Statement
case class Block(statements : List[Statement]) extends Statement
case class ForLoop(variable: String, lowerBound:Int, upperBound: Int, statement:Statement) extends Statement
|
hjuergens/date-parser
|
date-rule-combinators/src/main/scala/samples/LoopParser.scala
|
Scala
|
apache-2.0
| 990 |
package dk.bayes.dsl.variable.gaussian.multivariate
import dk.bayes.dsl.InferEngine
import dk.bayes.dsl.variable.gaussian.multivariatelinear.MultivariateLinearGaussian
import dk.bayes.math.gaussian.canonical.DenseCanonicalGaussian
import breeze.linalg.inv
import breeze.linalg.cholesky
import dk.bayes.math.linear.invchol
object inferMultivariateGaussianSimplest extends InferEngine[MultivariateGaussian, MultivariateGaussian] {
/**
* Supported model: x -> z
* z - MultivariateLinearGaussian z = x + gaussian noise
*/
def isSupported(x: MultivariateGaussian): Boolean = {
val child = x.getChildren match {
case Seq(child) if child.isInstanceOf[MultivariateLinearGaussian] => child.asInstanceOf[MultivariateLinearGaussian]
case _ => return false
}
(child.getParents().size == 1 && child.getParents()(0).eq(x)) &&
!child.hasChildren &&
child.yValue.isDefined
}
def infer(x: MultivariateGaussian): MultivariateGaussian = {
val child = x.getChildren.head.asInstanceOf[MultivariateLinearGaussian]
val xVInv = invchol(cholesky(x.v).t)
val childVInv = invchol(cholesky(child.v).t)
val posteriorVar = invchol(cholesky(xVInv + child.a.t * childVInv * child.a).t)
val posteriorMean = posteriorVar * (child.a.t * childVInv * (child.yValue.get - child.b) + xVInv * x.m)
new MultivariateGaussian(posteriorMean, posteriorVar)
}
}
|
danielkorzekwa/bayes-scala
|
src/main/scala/dk/bayes/dsl/variable/gaussian/multivariate/inferMultivariateGaussianSimplest.scala
|
Scala
|
bsd-2-clause
| 1,408 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.accounts.frs102.BoxesFixture
import uk.gov.hmrc.ct.box.CtValidation
class ACQ5034Spec extends WordSpec with Matchers with BoxesFixture {
"ACQ5034" should {
"for Full Accounts pass validation" when {
"all no value" in {
ac44noValue
ac45noValue
acq5031noValue
acq5032noValue
acq5033noValue
acq5035noValue
ACQ5034(None).validate(boxRetriever) shouldBe empty
}
}
"for Full Accounts fail validation" when {
val cannotExistError = Set(CtValidation(Some("ACQ5034"),"error.ACQ5034.cannot.exist",None))
"ac44,ac45 have no value and acq5034 has value" in {
ac44noValue
ac45noValue
acq5031noValue
acq5032noValue
acq5033noValue
acq5035noValue
ACQ5034(Some(false)).validate(boxRetriever) shouldBe cannotExistError
}
}
}
}
|
hmrc/ct-calculations
|
src/test/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/ACQ5034Spec.scala
|
Scala
|
apache-2.0
| 1,597 |
package scorex.testkit.properties
import bifrost.PersistentNodeViewModifier
import bifrost.consensus.{History, SyncInfo}
import bifrost.transaction.Transaction
import bifrost.transaction.box.Box
import bifrost.transaction.box.proposition.Proposition
import bifrost.transaction.state.{MinimalState, StateChanges}
import scorex.testkit.TestkitHelpers
trait StateChangesGenerationTest[P <: Proposition,
TX <: Transaction[P],
PM <: PersistentNodeViewModifier[P, TX],
B <: Box[P],
ST <: MinimalState[P, B, TX, PM, ST],
SI <: SyncInfo,
HT <: History[P, TX, PM, SI, HT]] extends StateTests[P, TX, PM, B, ST] with TestkitHelpers {
val history: HT
def genValidModifier(history: HT): PM
property("State should be able to generate changes from block and apply them") {
check { _ =>
val block = genValidModifier(history)
val blockChanges = state.changes(block).get
val existingBoxIds = blockChanges.boxIdsToRemove.filter(bi => state.closedBox(bi).isDefined)
val changes: StateChanges[P, B] = blockChanges.copy(boxIdsToRemove = existingBoxIds)
val newState = state.applyChanges(changes, block.id).get
changes.toAppend.foreach { b =>
newState.closedBox(b.id).isDefined shouldBe true
}
changes.boxIdsToRemove.foreach { bId =>
newState.closedBox(bId).isDefined shouldBe false
}
}
}
}
|
Topl/Project-Bifrost
|
src/test/scala/scorex/testkit/properties/StateChangesGenerationTest.scala
|
Scala
|
mpl-2.0
| 1,401 |
package info.glennengstrand.news.dao
import scala.concurrent.Future
import scala.concurrent._
import ExecutionContext.Implicits.global
import scala.collection.JavaConverters._
import com.datastax.oss.driver.api.core.cql.{BoundStatement, PreparedStatement}
import info.glennengstrand.news.model.Outbound
class OutboundDao extends DataAccess[Outbound] {
private val selectCql = "select toTimestamp(occurred) as occurred, subject, story from Outbound where participantid = ? order by occurred desc"
private val insertCql = "insert into Outbound (ParticipantID, Occurred, Subject, Story) values (?, now(), ?, ?)"
private lazy val insertStmt = CassandraDao.session.prepare(insertCql)
private lazy val selectStmt = CassandraDao.session.prepare(selectCql)
override def fetchSingle(id: Int): Future[Outbound] = {
Future.successful(Outbound(None, None, None, None))
}
override def insert(ob: Outbound): Future[Outbound] = {
val bs = insertStmt.bind(new java.lang.Integer(extractId(ob.from.get.toString.asInstanceOf[String]).toInt), ob.subject.get.toString.asInstanceOf[String], ob.story.get.toString.asInstanceOf[String])
CassandraDao.session.execute(bs)
Future(ob)
}
override def fetchMulti(id: Int): Future[Seq[Outbound]] = {
val bs = selectStmt.bind(id.asInstanceOf[Object])
val retVal = for {
r <- CassandraDao.session.execute(bs).iterator().asScala
} yield Outbound(Option(toLink(id.toLong)), Option(format(r.getInstant(0))), Option(r.getString(1)), Option(r.getString(2)))
Future.successful(retVal.toSeq)
}
}
|
gengstrand/clojure-news-feed
|
server/feed11/src/main/scala/info/glennengstrand/news/dao/OutboundDao.scala
|
Scala
|
epl-1.0
| 1,569 |
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.pipes
import org.neo4j.cypher.internal.commands.expressions.Expression
import java.lang.String
import org.neo4j.helpers.ThisShouldNotHappenError
import collection.mutable.Map
class SlicePipe(source:Pipe, skip:Option[Expression], limit:Option[Expression]) extends Pipe {
// val symbols = source.symbols
val symbols = source.symbols
//TODO: Make this nicer. I'm sure it's expensive and silly.
def createResults(state: QueryState): Traversable[ExecutionContext] = {
val sourceTraversable = source.createResults(state)
if(sourceTraversable.isEmpty)
return Seq()
val first: ExecutionContext = sourceTraversable.head
def asInt(v:Expression)=v(first).asInstanceOf[Int]
(skip, limit) match {
case (Some(x), None) => sourceTraversable.drop(asInt(x))
case (None, Some(x)) => sourceTraversable.take(asInt(x))
case (Some(startAt), Some(count)) => {
val start = asInt(startAt)
sourceTraversable.slice(start, start + asInt(count))
}
case (None, None)=>throw new ThisShouldNotHappenError("Andres Taylor", "A slice pipe that doesn't slice should never exist.")
}
}
override def executionPlan(): String = {
val info = (skip, limit) match {
case (None, Some(l)) => "Limit: " + l.toString()
case (Some(s), None) => "Skip: " + s.toString()
case (Some(s), Some(l)) => "Skip: " + s.toString() + ", " + "Limit: " + l.toString()
case (None, None)=>throw new ThisShouldNotHappenError("Andres Taylor", "A slice pipe that doesn't slice should never exist.")
}
source.executionPlan() + "\\r\\n" + "Slice(" + info + ")"
}
}
|
dksaputra/community
|
cypher/src/main/scala/org/neo4j/cypher/internal/pipes/SlicePipe.scala
|
Scala
|
gpl-3.0
| 2,457 |
package org.moe.runtime.nativeobjects
import org.moe.runtime._
import scala.util.{Try, Success, Failure}
class MoeStrObject(
v: String, t : Option[MoeType] = None
) extends MoeNativeObject[String](v, t) {
// runtime methods
def increment (r: MoeRuntime): Unit = setNativeValue(MoeUtil.magicalStringIncrement(getNativeValue))
def chomp (r: MoeRuntime): MoeBoolObject = {
val s = getNativeValue
if (s.indexOf("\n") == s.length - 1) {
setNativeValue(s.dropRight(1))
return r.NativeObjects.getTrue
} else {
return r.NativeObjects.getFalse
}
}
def chop (r: MoeRuntime): MoeStrObject = {
val s = getNativeValue
val (n, c) = s.splitAt(s.length - 1)
setNativeValue(n)
r.NativeObjects.getStr(c)
}
def uc (r: MoeRuntime): MoeStrObject = r.NativeObjects.getStr(getNativeValue.toUpperCase())
def lc (r: MoeRuntime): MoeStrObject = r.NativeObjects.getStr(getNativeValue.toLowerCase())
def ucfirst (r: MoeRuntime): MoeStrObject = r.NativeObjects.getStr(getNativeValue.capitalize)
def lcfirst (r: MoeRuntime): MoeStrObject = {
val s = getNativeValue
val (n, c) = s.splitAt(1)
r.NativeObjects.getStr(n.toLowerCase() + c)
}
def length (r: MoeRuntime): MoeIntObject = r.NativeObjects.getInt(getNativeValue.length)
def reverse (r: MoeRuntime): MoeStrObject = r.NativeObjects.getStr(getNativeValue.reverse)
def split (r: MoeRuntime, s: MoeStrObject): MoeArrayObject = r.NativeObjects.getArray(
getNativeValue.split(s.unboxToString.get).map(r.NativeObjects.getStr(_)).toArray:_*
)
def concat (r: MoeRuntime, x: MoeStrObject): MoeStrObject = r.NativeObjects.getStr(
getNativeValue + x.unboxToString.get
)
def concatAll (r: MoeRuntime, a: MoeArrayObject): MoeStrObject = r.NativeObjects.getStr(
getNativeValue + a.unboxToArrayBuffer.get.map(_.unboxToString.get).mkString
)
def pad (r: MoeRuntime, n: MoeIntObject): MoeStrObject = r.NativeObjects.getStr(
List.fill(n.unboxToInt.get)(" ").mkString + getNativeValue
)
def rpad (r: MoeRuntime, n: MoeIntObject): MoeStrObject = r.NativeObjects.getStr(
getNativeValue + List.fill(n.unboxToInt.get)(" ").mkString
)
def index (r: MoeRuntime): Unit = {} //(r: MoeRuntime): Unit = {} // ($substring, ?$position)
def rindex (r: MoeRuntime): Unit = {} // ($substring, ?$position)
def sprintf (r: MoeRuntime): Unit = {} // ($format, @items)
def substr (r: MoeRuntime): Unit = {} // ($offset, ?$length)
def quotemeta (r: MoeRuntime): Unit = {}
def repeat (r: MoeRuntime, other: MoeIntObject): MoeStrObject = {
val str = getNativeValue
val n = other.unboxToInt.get
r.NativeObjects.getStr(List.fill(n)(str).mkString)
}
// equality
def equal_to (r: MoeRuntime, other: MoeObject): MoeBoolObject =
r.NativeObjects.getBool(getNativeValue == other.unboxToString.get)
def not_equal_to (r: MoeRuntime, other: MoeObject): MoeBoolObject =
r.NativeObjects.getBool(getNativeValue != other.unboxToString.get)
def compare_to (r: MoeRuntime, other: MoeObject): MoeIntObject =
r.NativeObjects.getInt(
getNativeValue compareTo other.unboxToString.get match {
case 0 => 0
case r => if (r < 0) -1 else 1
}
)
// comparison
def less_than (r: MoeRuntime, other: MoeObject): MoeBoolObject =
r.NativeObjects.getBool(getNativeValue < other.unboxToString.get)
def greater_than (r: MoeRuntime, other: MoeObject): MoeBoolObject =
r.NativeObjects.getBool(getNativeValue > other.unboxToString.get)
def less_than_or_equal_to (r: MoeRuntime, other: MoeObject): MoeBoolObject =
r.NativeObjects.getBool(getNativeValue <= other.unboxToString.get)
def greater_than_or_equal_to (r: MoeRuntime, other: MoeObject): MoeBoolObject =
r.NativeObjects.getBool(getNativeValue >= other.unboxToString.get)
// regular expression matching
def matches (r: MoeRuntime, pattern: MoeStrObject): MoeBoolObject =
new MoeRegexObject(pattern.unboxToString.get).matches(r, this)
def matches (r: MoeRuntime, pattern: MoeRegexObject): MoeBoolObject =
pattern.matches(r, this)
// TODO: find method that returns a "Match" object to access captures etc
def subst (
r: MoeRuntime,
pattern: MoeStrObject,
replacement: MoeStrObject,
flags: MoeStrObject
): MoeStrObject =
r.NativeObjects.getStr(
if (flags.unboxToString.get == "g")
getNativeValue.replace(pattern.unboxToString.get, replacement.unboxToString.get)
else
getNativeValue.replaceFirst(pattern.unboxToString.get, replacement.unboxToString.get)
)
def subst (
r: MoeRuntime,
pattern: MoeRegexObject,
replacement: MoeStrObject,
flags: MoeStrObject
): MoeStrObject =
pattern.replace(r, this, replacement, Some(flags))
// transliteration -- like in Perl5, except /r flag is the default
// behavior; i.e. the original string is not modified and the
// transliterated string is returned
import scala.util.matching.Regex._
def trans(
r: MoeRuntime,
search: MoeStrObject,
replace: MoeStrObject,
flags: MoeStrObject
): MoeStrObject = {
def expandCharSequence(s: String): List[Char] = {
s.foldLeft(List[Char]()){
(a, c) => if (a.length > 1 && a.last == '-') a.dropRight(2) ++ (a.init.last to c).toList else a ++ List(c)
}
}
val complement = flags.unboxToString.get.contains('c')
val squash = flags.unboxToString.get.contains('s')
val delete = flags.unboxToString.get.contains('d')
val searchList = expandCharSequence(search.unboxToString.get)
var replaceList_t = expandCharSequence(replace.unboxToString.get)
val replaceList = if (delete) {
replaceList_t // use the replace-list as is
}
else {
if (replaceList_t.isEmpty)
searchList
else // truncate/extend replace-list to match search-list length
if (replaceList_t.length > searchList.length)
replaceList_t.drop(replaceList_t.length - searchList.length)
else if (searchList.length > replaceList_t.length)
replaceList_t ++ List.fill(searchList.length - replaceList_t.length)(replaceList_t.last)
else
replaceList_t
}
val transMap = searchList.zip(replaceList).toMap
def isFound(c: Char) = if (complement) !searchList.contains(c) else searchList.contains(c)
def maybeSquashed(a: String, c: Char) = if (squash && !a.isEmpty && a.last == c) a else a + c
r.NativeObjects.getStr(
getNativeValue.foldLeft(""){
(a, c) => {
if (isFound(c)) {
if (complement)
if (delete) a else maybeSquashed(a, replaceList.last)
else
transMap.get(c) match {
case Some(x) => maybeSquashed(a, x)
case None => if (delete) a else a + c
}
}
else {
a + c
}
}
}
)
}
// MoeNativeObject overrides
override def copy = new MoeStrObject(getNativeValue, getAssociatedType)
// MoeObject overrides
override def isFalse: Boolean = getNativeValue match {
case "" | "0" => true
case _ => false
}
override def toString = "\"" + getNativeValue + "\""
// unboxing
override def unboxToString : Try[String] = Try(getNativeValue)
override def unboxToInt : Try[Int] = Try(getNativeValue.toInt)
override def unboxToDouble : Try[Double] = Try(getNativeValue.toDouble)
}
|
MoeOrganization/moe
|
src/main/scala/org/moe/runtime/nativeobjects/MoeStrObject.scala
|
Scala
|
mit
| 7,713 |
/*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.tensor.Tensor
import org.scalatest.FlatSpec
/**
* Created by yao on 9/21/16.
*/
@com.intel.analytics.bigdl.tags.Parallel
class MulConstantSpec extends FlatSpec {
"MulConstant" should "generate correct output and grad" in {
val input = Tensor[Double](2, 2, 2).randn()
val scalar = 25.0
val expectedOutput = input.clone().apply1(_ * scalar)
val gradOutput = Tensor[Double](2, 2, 2).rand()
val expectedGrad = gradOutput.clone().apply1(_ * scalar)
val module = new MulConstant[Double](scalar)
val output = module.forward(input)
assert(expectedOutput equals output)
val gradInput = module.backward(input, gradOutput)
assert(gradInput equals expectedGrad )
}
"MulConstant with inPlace = true" should "generate correct output and grad" in {
var input = Tensor[Double](2, 2, 2).randn()
val scalar = 25.0
val expectedOutput = input.clone().apply1(_ * scalar)
val gradOutput = Tensor[Double](2, 2, 2).rand()
val expectedGrad = gradOutput.clone().apply1(_ * scalar)
// Test forward
val module = new MulConstant[Double](scalar, true)
val output = module.forward(input)
assert(expectedOutput equals output)
// Test backward
input = Tensor[Double](2, 2, 2).randn()
val expectedInput = input.clone().apply1(_ / scalar)
val gradInput = module.backward(input, gradOutput)
assert(gradInput equals expectedGrad)
assert(input equals expectedInput)
}
}
|
SeaOfOcean/BigDL
|
dl/src/test/scala/com/intel/analytics/bigdl/nn/MulConstantSpec.scala
|
Scala
|
apache-2.0
| 2,316 |
package dpla.ingestion3.reports
import org.apache.spark.sql.{DataFrame, Dataset, SparkSession}
import dpla.ingestion3.model._
import org.apache.spark.sql.functions.mean
/**
* Metadata Completeness QA report.
*/
class MetadataCompletenessReport(
val input: Dataset[OreAggregation],
val spark: SparkSession,
val params: Array[String] = Array()
) extends Report with Serializable {
override val sparkAppName: String = "MetadataCompletenessReport"
override def getInput: Dataset[OreAggregation] = input
override def getSparkSession: SparkSession = spark
override def getParams: Option[Array[String]] = {
if (params.nonEmpty) Some(params) else None
}
/**
* Process the incoming dataset.
*
* @see Report.process()
*
* @param ds Dataset of DplaMapData (mapped or enriched records)
* @param spark The Spark session, which contains encoding / parsing info.
* @return DataFrame, typically of Row[value: String, count: Int]
*/
override def process(ds: Dataset[OreAggregation], spark: SparkSession): DataFrame = {
val sqlContext = spark.sqlContext
val itemTallies: Dataset[CompletenessTally] = getItemTallies(ds, spark)
itemTallies.createOrReplaceTempView("itemTallies")
// For each record, get the metadata completeness "score" for each metric.
val metrics = sqlContext.sql("""select title + rights + dataProvider +
isShownAt + provider + preview
as mandatory,
title + description + creator + language +
subject + extent + format
as descriptiveness,
title + description + creator +
publisher + contributor + type +
place + subject + date + relation
as searchability,
collection + description + creator +
type + date + place + subject + relation
as contextualization,
title + collection + description +
creator + type + identifier + date
as identification,
title + creator + type + place +
subject + date + relation
as browsing,
creator + publisher + date + rights +
isShownAt
as reusability,
title + description + creator + type +
identifier + language + place + subject +
date + format + rights + dataProvider +
isShownAt + provider + preview
as completeness
from itemTallies""")
// Get the average metric scores across the data sample.
metrics.agg(
mean("mandatory").alias("mandatory"),
mean("descriptiveness").alias("descriptiveness"),
mean("searchability").alias("searchability"),
mean("contextualization").alias("contextualization"),
mean("identification").alias("identification"),
mean("browsing").alias("browsing"),
mean("reusability").alias("reusability"),
mean("completeness").alias("completeness")
)
}
/**
* Map a Dataset of DplaMapData to a Dataset of CompletenessTally.
*/
private def getItemTallies(ds: Dataset[OreAggregation], spark: SparkSession):
Dataset[CompletenessTally] = {
import spark.implicits._
ds.map(dplaMapData => {
CompletenessTally(
title = tally(dplaMapData.sourceResource.title),
collection = tally(dplaMapData.sourceResource.title),
description = tally(dplaMapData.sourceResource.description),
creator = tally(dplaMapData.sourceResource.creator),
publisher = tally(dplaMapData.sourceResource.publisher),
contributor = tally(dplaMapData.sourceResource.creator),
`type` = tally(dplaMapData.sourceResource.`type`),
identifier = tally(dplaMapData.sourceResource.identifier),
language = tally(dplaMapData.sourceResource.language),
temporal = tally(dplaMapData.sourceResource.temporal),
place = tally(dplaMapData.sourceResource.place),
subject = tally(dplaMapData.sourceResource.subject),
date = tally(dplaMapData.sourceResource.date),
extent = tally(dplaMapData.sourceResource.extent),
format = tally(dplaMapData.sourceResource.format),
relation = tally(dplaMapData.sourceResource.relation),
id = tally(Seq(dplaMapData.dplaUri)),
dataProvider = tally(Seq(dplaMapData.dataProvider)),
provider = tally(Seq(dplaMapData.provider)),
preview = tally(Seq(dplaMapData.preview)),
rights = {
val sourceResourceRights = dplaMapData.sourceResource.rights
val edmRights = dplaMapData.edmRights
if (sourceResourceRights.nonEmpty || edmRights.nonEmpty) 1 else 0
},
// add isShownAt value once it has been added to DplaMapData
isShownAt = 0
)
})
}
/**
* Get an integer representing whether or not a value is present.
*
* @param value: A Sequence containing zero to many values from a DplaDataMap
* object.
* @return 1 if there is at least one value; otherwise 0
*/
private def tally(value: Seq[Any]): Integer = {
if(value.nonEmpty) 1 else 0
}
}
/**
* Tallies the presence or absence of values in certain fields for a single record;
* 1 if the field value is present; 0 if the field value is absent.
* Numerical representations are used b/c it makes it easier to calculate totals,
* averages, etc.
*/
case class CompletenessTally(title: Integer,
collection: Integer,
description: Integer,
creator: Integer,
publisher: Integer,
contributor: Integer,
`type`: Integer,
identifier: Integer,
language: Integer,
temporal: Integer,
place: Integer,
subject: Integer,
date: Integer,
extent: Integer,
format: Integer,
relation: Integer,
id: Integer,
dataProvider: Integer,
provider: Integer,
preview: Integer,
isShownAt: Integer,
rights: Integer)
|
dpla/ingestion3
|
src/main/scala/dpla/ingestion3/reports/MetadataCompletenessReport.scala
|
Scala
|
mit
| 7,334 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.sources
import java.sql.Timestamp
import com.google.common.collect.ImmutableList
import org.apache.calcite.plan.RelOptCluster
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.logical.LogicalValues
import org.apache.calcite.rex.{RexLiteral, RexNode}
import org.apache.calcite.tools.RelBuilder
import org.apache.flink.api.common.typeinfo.{SqlTimeTypeInfo, TypeInformation}
import org.apache.flink.api.common.typeutils.CompositeType
import org.apache.flink.table.api.{DataTypes, TableException, Types, ValidationException}
import org.apache.flink.table.calcite.FlinkTypeFactory
import org.apache.flink.table.expressions.utils.ApiExpressionUtils.{typeLiteral, unresolvedCall}
import org.apache.flink.table.expressions.{PlannerExpressionConverter, ResolvedFieldReference}
import org.apache.flink.table.functions.BuiltInFunctionDefinitions.CAST
import org.apache.flink.table.types.utils.TypeConversions.{fromDataTypeToLegacyInfo, fromLegacyInfoToDataType}
import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo
import scala.collection.JavaConverters._
/** Util class for [[TableSource]]. */
object TableSourceUtil {
/** Returns true if the [[TableSource]] has a rowtime attribute. */
def hasRowtimeAttribute(tableSource: TableSource[_]): Boolean =
getRowtimeAttributes(tableSource).nonEmpty
/** Returns true if the [[TableSource]] has a proctime attribute. */
def hasProctimeAttribute(tableSource: TableSource[_]): Boolean =
getProctimeAttribute(tableSource).nonEmpty
/**
* Computes the indices that map the input type of the DataStream to the schema of the table.
*
* The mapping is based on the field names and fails if a table field cannot be
* mapped to a field of the input type.
*
* @param tableSource The table source for which the table schema is mapped to the input type.
* @param isStreamTable True if the mapping is computed for a streaming table, false otherwise.
* @param selectedFields The indexes of the table schema fields for which a mapping is
* computed. If None, a mapping for all fields is computed.
* @return An index mapping from input type to table schema.
*/
def computeIndexMapping(
tableSource: TableSource[_],
isStreamTable: Boolean,
selectedFields: Option[Array[Int]]): Array[Int] = {
val inputType = fromDataTypeToLegacyInfo(tableSource.getProducedDataType)
val tableSchema = tableSource.getTableSchema
// get names of selected fields
val tableFieldNames = if (selectedFields.isDefined) {
val names = tableSchema.getFieldNames
selectedFields.get.map(names(_))
} else {
tableSchema.getFieldNames
}
// get types of selected fields
val tableFieldTypes = if (selectedFields.isDefined) {
val types = tableSchema.getFieldTypes
selectedFields.get.map(types(_))
} else {
tableSchema.getFieldTypes
}
// get rowtime and proctime attributes
val rowtimeAttributes = getRowtimeAttributes(tableSource)
val proctimeAttributes = getProctimeAttribute(tableSource)
// compute mapping of selected fields and time attributes
val mapping: Array[Int] = tableFieldTypes.zip(tableFieldNames).map {
case (t: SqlTimeTypeInfo[_], name: String)
if t.getTypeClass == classOf[Timestamp] && proctimeAttributes.contains(name) =>
if (isStreamTable) {
TimeIndicatorTypeInfo.PROCTIME_STREAM_MARKER
} else {
TimeIndicatorTypeInfo.PROCTIME_BATCH_MARKER
}
case (t: SqlTimeTypeInfo[_], name: String)
if t.getTypeClass == classOf[Timestamp] && rowtimeAttributes.contains(name) =>
if (isStreamTable) {
TimeIndicatorTypeInfo.ROWTIME_STREAM_MARKER
} else {
TimeIndicatorTypeInfo.ROWTIME_BATCH_MARKER
}
case (t: TypeInformation[_], name) =>
// check if field is registered as time indicator
if (getProctimeAttribute(tableSource).contains(name)) {
throw new ValidationException(s"Processing time field '$name' has invalid type $t. " +
s"Processing time attributes must be of type ${Types.SQL_TIMESTAMP}.")
}
if (getRowtimeAttributes(tableSource).contains(name)) {
throw new ValidationException(s"Rowtime field '$name' has invalid type $t. " +
s"Rowtime attributes must be of type ${Types.SQL_TIMESTAMP}.")
}
val (physicalName, idx, tpe) = resolveInputField(name, tableSource)
// validate that mapped fields are are same type
if (tpe != t) {
throw new ValidationException(s"Type $t of table field '$name' does not " +
s"match with type $tpe of the field '$physicalName' of the TableSource return type.")
}
idx
}
// ensure that only one field is mapped to an atomic type
if (!inputType.isInstanceOf[CompositeType[_]] && mapping.count(_ >= 0) > 1) {
throw new ValidationException(
s"More than one table field matched to atomic input type $inputType.")
}
mapping
}
/**
* Returns the Calcite schema of a [[TableSource]].
*
* @param tableSource The [[TableSource]] for which the Calcite schema is generated.
* @param selectedFields The indices of all selected fields. None, if all fields are selected.
* @param streaming Flag to determine whether the schema of a stream or batch table is created.
* @param typeFactory The type factory to create the schema.
* @return The Calcite schema for the selected fields of the given [[TableSource]].
*/
def getRelDataType(
tableSource: TableSource[_],
selectedFields: Option[Array[Int]],
streaming: Boolean,
typeFactory: FlinkTypeFactory): RelDataType = {
val fieldNames = tableSource.getTableSchema.getFieldNames
var fieldTypes = tableSource.getTableSchema.getFieldTypes
if (streaming) {
// adjust the type of time attributes for streaming tables
val rowtimeAttributes = getRowtimeAttributes(tableSource)
val proctimeAttributes = getProctimeAttribute(tableSource)
// patch rowtime fields with time indicator type
rowtimeAttributes.foreach { rowtimeField =>
val idx = fieldNames.indexOf(rowtimeField)
fieldTypes = fieldTypes.patch(idx, Seq(TimeIndicatorTypeInfo.ROWTIME_INDICATOR), 1)
}
// patch proctime field with time indicator type
proctimeAttributes.foreach { proctimeField =>
val idx = fieldNames.indexOf(proctimeField)
fieldTypes = fieldTypes.patch(idx, Seq(TimeIndicatorTypeInfo.PROCTIME_INDICATOR), 1)
}
}
val (selectedFieldNames, selectedFieldTypes) = if (selectedFields.isDefined) {
// filter field names and types by selected fields
(selectedFields.get.map(fieldNames(_)), selectedFields.get.map(fieldTypes(_)))
} else {
(fieldNames, fieldTypes)
}
typeFactory.buildLogicalRowType(selectedFieldNames, selectedFieldTypes)
}
/**
* Returns the [[RowtimeAttributeDescriptor]] of a [[TableSource]].
*
* @param tableSource The [[TableSource]] for which the [[RowtimeAttributeDescriptor]] is
* returned.
* @param selectedFields The fields which are selected from the [[TableSource]].
* If None, all fields are selected.
* @return The [[RowtimeAttributeDescriptor]] of the [[TableSource]].
*/
def getRowtimeAttributeDescriptor(
tableSource: TableSource[_],
selectedFields: Option[Array[Int]]): Option[RowtimeAttributeDescriptor] = {
tableSource match {
case r: DefinedRowtimeAttributes =>
val descriptors = r.getRowtimeAttributeDescriptors
if (descriptors.size() == 0) {
None
} else if (descriptors.size > 1) {
throw new ValidationException("Table with has more than a single rowtime attribute.")
} else {
// exactly one rowtime attribute descriptor
if (selectedFields.isEmpty) {
// all fields are selected.
Some(descriptors.get(0))
} else {
val descriptor = descriptors.get(0)
// look up index of row time attribute in schema
val fieldIdx = tableSource.getTableSchema.getFieldNames.indexOf(
descriptor.getAttributeName)
// is field among selected fields?
if (selectedFields.get.contains(fieldIdx)) {
Some(descriptor)
} else {
None
}
}
}
case _ => None
}
}
/**
* Obtains the [[RexNode]] expression to extract the rowtime timestamp for a [[TableSource]].
*
* @param tableSource The [[TableSource]] for which the expression is extracted.
* @param selectedFields The selected fields of the [[TableSource]].
* If None, all fields are selected.
* @param cluster The [[RelOptCluster]] of the current optimization process.
* @param relBuilder The [[RelBuilder]] to build the [[RexNode]].
* @param resultType The result type of the timestamp expression.
* @return The [[RexNode]] expression to extract the timestamp of the table source.
*/
def getRowtimeExtractionExpression(
tableSource: TableSource[_],
selectedFields: Option[Array[Int]],
cluster: RelOptCluster,
relBuilder: RelBuilder,
resultType: TypeInformation[_]): Option[RexNode] = {
val typeFactory = cluster.getTypeFactory.asInstanceOf[FlinkTypeFactory]
/**
* Creates a RelNode with a schema that corresponds on the given fields
* Fields for which no information is available, will have default values.
*/
def createSchemaRelNode(fields: Array[(String, Int, TypeInformation[_])]): RelNode = {
val maxIdx = fields.map(_._2).max
val idxMap: Map[Int, (String, TypeInformation[_])] = Map(
fields.map(f => f._2 -> (f._1, f._3)): _*)
val (physicalFields, physicalTypes) = (0 to maxIdx)
.map(i => idxMap.getOrElse(i, ("", Types.BYTE))).unzip
val physicalSchema: RelDataType = typeFactory.buildLogicalRowType(
physicalFields,
physicalTypes)
LogicalValues.create(
cluster,
physicalSchema,
ImmutableList.of().asInstanceOf[ImmutableList[ImmutableList[RexLiteral]]])
}
val rowtimeDesc = getRowtimeAttributeDescriptor(tableSource, selectedFields)
rowtimeDesc.map { r =>
val tsExtractor = r.getTimestampExtractor
val fieldAccesses = if (tsExtractor.getArgumentFields.nonEmpty) {
val resolvedFields = resolveInputFields(tsExtractor.getArgumentFields, tableSource)
// push an empty values node with the physical schema on the relbuilder
relBuilder.push(createSchemaRelNode(resolvedFields))
// get extraction expression
resolvedFields.map(f => new ResolvedFieldReference(f._1, f._3, f._2))
} else {
new Array[ResolvedFieldReference](0)
}
val expression = tsExtractor.getExpression(fieldAccesses)
// add cast to requested type and convert expression to RexNode
// If resultType is TimeIndicatorTypeInfo, its internal format is long, but cast
// from Timestamp is java.sql.Timestamp. So we need cast to long first.
val castExpression = unresolvedCall(CAST,
unresolvedCall(CAST, expression, typeLiteral(DataTypes.BIGINT())),
typeLiteral(fromLegacyInfoToDataType(resultType)))
// TODO we convert to planner expressions as a temporary solution
val rexExpression = castExpression
.accept(PlannerExpressionConverter.INSTANCE)
.toRexNode(relBuilder)
relBuilder.clear()
rexExpression
}
}
/**
* Returns the indexes of the physical fields that required to compute the given logical fields.
*
* @param tableSource The [[TableSource]] for which the physical indexes are computed.
* @param logicalFieldIndexes The indexes of the accessed logical fields for which the physical
* indexes are computed.
* @return The indexes of the physical fields are accessed to forward and compute the logical
* fields.
*/
def getPhysicalIndexes(
tableSource: TableSource[_],
logicalFieldIndexes: Array[Int]): Array[Int] = {
// get the mapping from logical to physical positions.
// stream / batch distinction not important here
val fieldMapping = computeIndexMapping(tableSource, isStreamTable = true, None)
logicalFieldIndexes
// resolve logical indexes to physical indexes
.map(fieldMapping(_))
// resolve time indicator markers to physical indexes
.flatMap {
case TimeIndicatorTypeInfo.PROCTIME_STREAM_MARKER =>
// proctime field do not access a physical field
Seq()
case TimeIndicatorTypeInfo.ROWTIME_STREAM_MARKER =>
// rowtime field is computed.
// get names of fields which are accessed by the expression to compute the rowtime field.
val rowtimeAttributeDescriptor = getRowtimeAttributeDescriptor(tableSource, None)
val accessedFields = if (rowtimeAttributeDescriptor.isDefined) {
rowtimeAttributeDescriptor.get.getTimestampExtractor.getArgumentFields
} else {
throw new TableException("Computed field mapping includes a rowtime marker but the " +
"TableSource does not provide a RowtimeAttributeDescriptor. " +
"This is a bug and should be reported.")
}
// resolve field names to physical fields
resolveInputFields(accessedFields, tableSource).map(_._2)
case idx =>
Seq(idx)
}
}
/** Returns a list with all rowtime attribute names of the [[TableSource]]. */
private def getRowtimeAttributes(tableSource: TableSource[_]): Array[String] = {
tableSource match {
case r: DefinedRowtimeAttributes =>
r.getRowtimeAttributeDescriptors.asScala.map(_.getAttributeName).toArray
case _ =>
Array()
}
}
/** Returns the proctime attribute of the [[TableSource]] if it is defined. */
private def getProctimeAttribute(tableSource: TableSource[_]): Option[String] = {
tableSource match {
case p: DefinedProctimeAttribute if p.getProctimeAttribute != null =>
Some(p.getProctimeAttribute)
case _ =>
None
}
}
/**
* Identifies for a field name of the logical schema, the corresponding physical field in the
* return type of a [[TableSource]].
*
* @param fieldName The logical field to look up.
* @param tableSource The table source in which to look for the field.
* @return The name, index, and type information of the physical field.
*/
private def resolveInputField(
fieldName: String,
tableSource: TableSource[_]): (String, Int, TypeInformation[_]) = {
val returnType = fromDataTypeToLegacyInfo(tableSource.getProducedDataType)
/** Look up a field by name in a type information */
def lookupField(fieldName: String, failMsg: String): (String, Int, TypeInformation[_]) = {
returnType match {
case c: CompositeType[_] =>
// get and check field index
val idx = c.getFieldIndex(fieldName)
if (idx < 0) {
throw new ValidationException(failMsg)
}
// return field name, index, and field type
(fieldName, idx, c.getTypeAt(idx))
case t: TypeInformation[_] =>
// no composite type, we return the full atomic type as field
(fieldName, 0, t)
}
}
tableSource match {
case d: DefinedFieldMapping if d.getFieldMapping != null =>
// resolve field name in field mapping
val resolvedFieldName = d.getFieldMapping.get(fieldName)
if (resolvedFieldName == null) {
throw new ValidationException(
s"Field '$fieldName' could not be resolved by the field mapping.")
}
// look up resolved field in return type
lookupField(
resolvedFieldName,
s"Table field '$fieldName' was resolved to TableSource return type field " +
s"'$resolvedFieldName', but field '$resolvedFieldName' was not found in the return " +
s"type $returnType of the TableSource. " +
s"Please verify the field mapping of the TableSource.")
case _ =>
// look up field in return type
lookupField(
fieldName,
s"Table field '$fieldName' was not found in the return type $returnType of the " +
s"TableSource.")
}
}
/**
* Identifies the physical fields in the return type [[TypeInformation]] of a [[TableSource]]
* for a list of field names of the [[TableSource]]'s [[org.apache.flink.table.api.TableSchema]].
*
* @param fieldNames The field names to look up.
* @param tableSource The table source in which to look for the field.
* @return The name, index, and type information of the physical field.
*/
private def resolveInputFields(
fieldNames: Array[String],
tableSource: TableSource[_]): Array[(String, Int, TypeInformation[_])] = {
fieldNames.map(resolveInputField(_, tableSource))
}
}
|
fhueske/flink
|
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/sources/TableSourceUtil.scala
|
Scala
|
apache-2.0
| 18,134 |
package scala.build
import sbt._, Keys._
object TestJDeps {
val testJDepsImpl: Def.Initialize[Task[Unit]] = Def.task {
val libraryJar = (packageBin in Compile in LocalProject("library")).value
val reflectJar = (packageBin in Compile in LocalProject("reflect")).value
// jdeps -s -P build/pack/lib/scala-{library,reflect}.jar | grep -v build/pack | perl -pe 's/.*\\((.*)\\)$/$1/' | sort -u
val jdepsOut = scala.sys.process.Process("jdeps", Seq("-s", "-P", libraryJar.getPath, reflectJar.getPath)).lineStream
val profilePart = ".*\\\\((.*)\\\\)$".r
val profiles = jdepsOut.collect {
case profilePart(profile) => profile
}.toSet
if (profiles != Set("compact1"))
throw new RuntimeException(jdepsOut.mkString("Detected dependency outside of compact1:\\n", "\\n", ""))
}
}
|
martijnhoekstra/scala
|
project/TestJDeps.scala
|
Scala
|
apache-2.0
| 812 |
/**
* MIT License
*
* Copyright (c) 2016-2018 James Sherwood-Jones <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.jsherz.luskydive.apis
import java.time.Instant
import java.util.UUID
import akka.http.scaladsl.model.{ContentTypes, HttpEntity, StatusCodes}
import akka.http.scaladsl.server.Route
import com.jsherz.luskydive.dao._
import com.jsherz.luskydive.json.LoginJsonSupport._
import com.jsherz.luskydive.json.{LoginRequest, LoginResponse}
import com.jsherz.luskydive.services.JwtService
import org.mockito.Matchers._
import org.mockito.Mockito
import org.mockito.Mockito._
/**
* Ensures the login functions correctly.
*/
class LoginApiSpec extends BaseApiSpec {
private val url = "/login"
private var dao: AuthDao = Mockito.spy(new StubAuthDao())
private val jwtService: JwtService = new JwtService {
override def verifyJwt(token: String): Option[UUID] = ???
override def createJwt(uuid: UUID, issuedAt: Instant, expiresAt: Instant): String = "a.b.c"
}
private var route = new LoginApi(dao, jwtService).route
before {
dao = Mockito.spy(new StubAuthDao())
route = new LoginApi(dao, jwtService).route
}
"LoginApi" should {
"return success with no errors if a valid e-mail & password are given" in {
val request = LoginRequest(StubAuthDao.validEmail, StubAuthDao.validPassword)
Post(url, request) ~> route ~> check {
response.status shouldEqual StatusCodes.OK
responseAs[LoginResponse].success shouldEqual true
responseAs[LoginResponse].errors shouldBe empty
responseAs[LoginResponse].apiKey shouldBe Some("a.b.c")
verify(dao).login(anyString, anyString)
}
}
"return unsupported media type when not JSON" in {
Seq(ContentTypes.`text/xml(UTF-8)`, ContentTypes.`text/plain(UTF-8)`).foreach { contentType =>
val request = HttpEntity(contentType, "foo bar test")
Post(url, request) ~> Route.seal(route) ~> check {
response.status shouldEqual StatusCodes.UnsupportedMediaType
verify(dao, never()).login(anyString, anyString)
}
}
}
"return bad request if no e-mail or password are given" in {
val request = HttpEntity(ContentTypes.`application/json`, """{}""")
Post(url, request) ~> Route.seal(route) ~> check {
response.status shouldEqual StatusCodes.BadRequest
verify(dao, never()).login(anyString, anyString)
}
}
"return bad request if no e-mail is given" in {
val request = HttpEntity(ContentTypes.`application/json`, """{"password":"eggbeardHype"}""")
Post(url, request) ~> Route.seal(route) ~> check {
response.status shouldEqual StatusCodes.BadRequest
verify(dao, never()).login(anyString, anyString)
}
}
"return bad request if no password is given" in {
val request = HttpEntity(ContentTypes.`application/json`, """{"email":"[email protected]"}""")
Post(url, request) ~> Route.seal(route) ~> check {
response.status shouldEqual StatusCodes.BadRequest
verify(dao, never()).login(anyString, anyString)
}
}
"return method not allowed if not a post request" in {
Seq(Get, Put, Delete, Patch).foreach { method =>
method(url) ~> Route.seal(route) ~> check {
response.status shouldEqual StatusCodes.MethodNotAllowed
}
}
verify(dao, never()).login(anyString, anyString)
}
"return failed with an error if the login information is incorrect" in {
val request = LoginRequest(StubAuthDao.invalidEmail, StubAuthDao.invalidPassword)
Post(url, request) ~> route ~> check {
response.status shouldEqual StatusCodes.OK
responseAs[LoginResponse].success shouldEqual false
responseAs[LoginResponse].errors shouldBe Map(
"password" -> "error.invalidEmailPass"
)
responseAs[LoginResponse].apiKey shouldBe None
verify(dao).login(anyString, anyString)
}
}
"return failed with an appropriate error if the account is locked" in {
val request = LoginRequest(StubAuthDao.accountLockedEmail, StubAuthDao.accountLockedPassword)
Post(url, request) ~> route ~> check {
response.status shouldEqual StatusCodes.OK
responseAs[LoginResponse].success shouldEqual false
responseAs[LoginResponse].errors shouldBe Map(
"email" -> "error.accountLocked"
)
responseAs[LoginResponse].apiKey shouldBe None
verify(dao).login(anyString, anyString)
}
}
}
}
|
jSherz/lsd-members
|
backend/src/test/scala/com/jsherz/luskydive/apis/LoginApiSpec.scala
|
Scala
|
mit
| 5,659 |
package com.twitter.finagle.client
import com.twitter.finagle._
import com.twitter.finagle.param._
import com.twitter.finagle.factory.{
BindingFactory, RefcountedFactory, StatsFactoryWrapper, TimeoutFactory}
import com.twitter.finagle.filter.{ExceptionSourceFilter, MonitorFilter}
import com.twitter.finagle.loadbalancer.LoadBalancerFactory
import com.twitter.finagle.service._
import com.twitter.finagle.stack.Endpoint
import com.twitter.finagle.stack.nilStack
import com.twitter.finagle.stats.{ClientStatsReceiver, RollupStatsReceiver}
import com.twitter.finagle.tracing.{ClientDestTracingFilter, TracingFilter}
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.util.Showable
import com.twitter.util.Var
private[finagle] object StackClient {
/**
* Canonical Roles for each Client-related Stack modules.
*/
object Role {
object LoadBalancer extends Stack.Role
object Pool extends Stack.Role
object RequestDraining extends Stack.Role
object PrepFactory extends Stack.Role
/** PrepConn is special in that it's the first role before the `Endpoint` role */
object PrepConn extends Stack.Role
}
/**
* A [[com.twitter.finagle.Stack]] representing an endpoint.
* Note that this is terminated by a [[com.twitter.finagle.service.FailingFactory]]:
* users are expected to terminate it with a concrete service factory.
*
* @see [[com.twitter.finagle.service.ExpiringService]]
* @see [[com.twitter.finagle.service.FailFastFactory]]
* @see [[com.twitter.finagle.client.DefaultPool]]
* @see [[com.twitter.finagle.service.TimeoutFilter]]
* @see [[com.twitter.finagle.service.FailureAccrualFactory]]
* @see [[com.twitter.finagle.service.StatsServiceFactory]]
* @see [[com.twitter.finagle.tracing.ClientDestTracingFilter]]
* @see [[com.twitter.finagle.filter.MonitorFilter]]
* @see [[com.twitter.finagle.filter.ExceptionSourceFilter]]
*/
def endpointStack[Req, Rep]: Stack[ServiceFactory[Req, Rep]] = {
// Ensure that we have performed global initialization.
com.twitter.finagle.Init()
val stk = new StackBuilder[ServiceFactory[Req, Rep]](nilStack[Req, Rep])
stk.push(Role.PrepConn, identity[ServiceFactory[Req, Rep]](_))
stk.push(ExpiringService.module)
stk.push(FailFastFactory.module)
stk.push(DefaultPool.module)
stk.push(TimeoutFilter.module)
stk.push(FailureAccrualFactory.module)
stk.push(StatsServiceFactory.module)
stk.push(StatsFilter.module)
stk.push(ClientDestTracingFilter.module)
stk.push(MonitorFilter.module)
stk.push(ExceptionSourceFilter.module)
stk.result
}
/**
* Creates a default finagle client [[com.twitter.finagle.Stack]].
* The stack can be configured via [[com.twitter.finagle.Stack.Param]]'s
* in the finagle package object ([[com.twitter.finagle.param]]) and specific
* params defined in the companion objects of the respective modules.
*
* @see [[com.twitter.finagle.client.StackClient#endpointStack]]
* @see [[com.twitter.finagle.loadbalancer.LoadBalancerFactory]]
* @see [[com.twitter.finagle.factory.RefCountedFactory]]
* @see [[com.twitter.finagle.factory.TimeoutFactory]]
* @see [[com.twitter.finagle.factory.StatsFactoryWrapper]]
* @see [[com.twitter.finagle.filter.TracingFilter]]
*/
def newStack[Req, Rep]: Stack[ServiceFactory[Req, Rep]] = {
val stk = new StackBuilder(endpointStack[Req, Rep])
stk.push(LoadBalancerFactory.module)
stk.push(Role.RequestDraining, (fac: ServiceFactory[Req, Rep]) =>
new RefcountedFactory(fac))
stk.push(TimeoutFactory.module)
stk.push(StatsFactoryWrapper.module)
stk.push(TracingFilter.module)
stk.push(Role.PrepFactory, identity[ServiceFactory[Req, Rep]](_))
stk.result
}
}
/**
* A [[com.twitter.finagle.Stack]]-based client.
*/
private[finagle] abstract class StackClient[Req, Rep, In, Out](
val stack: Stack[ServiceFactory[Req, Rep]],
val params: Stack.Params
) extends Client[Req, Rep] { self =>
/**
* A convenient type alias for a client dispatcher.
*/
type Dispatcher = Transport[In, Out] => Service[Req, Rep]
/**
* Creates a new StackClient with the default stack (StackClient#newStack)
* and [[com.twitter.finagle.stats.ClientStatsReceiver]].
*/
def this() = this(
StackClient.newStack[Req, Rep],
Stack.Params.empty + Stats(ClientStatsReceiver)
)
/**
* Defines a typed [[com.twitter.finagle.Transporter]] for this client.
* Concrete StackClient implementations are expected to specify this.
*/
protected val newTransporter: Stack.Params => Transporter[In, Out]
/**
* Defines a dispatcher, a function which reconciles the stream based
* `Transport` with a Request/Response oriented `Service`.
* Together with a `Transporter`, it forms the foundation of a
* finagle client. Concrete implementations are expected to specify this.
*
* @see [[com.twitter.finagle.dispatch.GenSerialServerDispatcher]]
*/
protected val newDispatcher: Stack.Params => Dispatcher
/**
* Creates a new StackClient with `f` applied to `stack`.
*/
def transformed(f: Stack[ServiceFactory[Req, Rep]] => Stack[ServiceFactory[Req, Rep]]) =
copy(stack = f(stack))
/**
* Creates a new StackClient with `p` added to the `params`
* used to configure this StackClient's `stack`.
*/
def configured[P: Stack.Param](p: P): StackClient[Req, Rep, In, Out] =
copy(params = params+p)
/**
* A copy constructor in lieu of defining StackClient as a
* case class.
*/
def copy(
stack: Stack[ServiceFactory[Req, Rep]] = self.stack,
params: Stack.Params = self.params
): StackClient[Req, Rep, In, Out] =
new StackClient[Req, Rep, In, Out](stack, params) {
protected val newTransporter = self.newTransporter
protected val newDispatcher = self.newDispatcher
}
/**
* A stackable module that creates new `Transports` (via transporter)
* when applied.
*/
private[this] val endpointer = new Stack.Simple[ServiceFactory[Req, Rep]](Endpoint) {
val description = "Send requests over the wire"
def make(prms: Stack.Params, next: ServiceFactory[Req, Rep]) = {
val Transporter.EndpointAddr(addr) = prms[Transporter.EndpointAddr]
val transporter = newTransporter(prms)
val dispatcher = newDispatcher(prms)
ServiceFactory(() => transporter(addr) map dispatcher)
}
}
/** @inheritdoc */
def newClient(dest: Name, label0: String): ServiceFactory[Req, Rep] = {
val Stats(stats) = params[Stats]
val Label(label1) = params[Label]
// For historical reasons, we have two sources for identifying
// a client. The most recently set `label0` takes precedence.
val clientLabel = (label0, label1) match {
case ("", "") => Showable.show(dest)
case ("", l1) => l1
case (l0, l1) => l0
}
val clientStack = stack ++ (endpointer +: nilStack)
val clientParams = params +
Label(clientLabel) +
Stats(stats.scope(clientLabel))
dest match {
case Name.Bound(addr) =>
clientStack.make(clientParams + LoadBalancerFactory.Dest(addr))
case Name.Path(path) =>
val newStack: Var[Addr] => ServiceFactory[Req, Rep] =
addr => clientStack.make(clientParams + LoadBalancerFactory.Dest(addr))
new BindingFactory(path, newStack, stats.scope("interpreter"))
}
}
}
/**
* A [[com.twitter.finagle.Stack Stack]]-based client which preserves
* `Like` client semantics. This makes it appropriate for implementing rich
* clients, since the rich type can be preserved without having to drop down
* to StackClient[Req, Rep, In, Out] when making changes.
*/
private[finagle]
abstract class StackClientLike[Req, Rep, In, Out, Repr <: StackClientLike[Req, Rep, In, Out, Repr]](
client: StackClient[Req, Rep, In, Out]
) extends Client[Req, Rep] {
val stack = client.stack
protected def newInstance(client: StackClient[Req, Rep, In, Out]): Repr
/**
* Creates a new `Repr` with an underlying StackClient where `p` has been
* added to the `params` used to configure this StackClient's `stack`.
*/
def configured[P: Stack.Param](p: P): Repr =
newInstance(client.configured(p))
/**
* Creates a new `Repr` with an underlying StackClient where `f` has been
* applied to `stack`.
*/
protected def transformed(f: Stack[ServiceFactory[Req, Rep]] => Stack[ServiceFactory[Req, Rep]]): Repr =
newInstance(client.transformed(f))
/** @inheritdoc */
def newClient(dest: Name, label: String) = client.newClient(dest, label)
}
|
JustinTulloss/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/client/StackClient.scala
|
Scala
|
apache-2.0
| 8,600 |
package robco
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.sql.functions._
object SparkSql2 {
// case class Person(firstName: String, lastName: String, gender: String)
case class CvcInfo(cvcId: String, description: String)
case class AvcInfo(avcId: String, cvcId: String)
case class AvcData(avcId: String, octetsIn: String, octetsOut: String)
def main(args: Array[String]) = {
val sparkMaster = "spark://" + args(0) + ":7077"
val hdfsRoot = "hdfs://" + args(1) + ":9000"
val startTime = System.currentTimeMillis()/1000
val conf =
new SparkConf().setMaster(sparkMaster)
.set("spark.driver.memory", "6G")
.set("spark.storage.memoryFraction", "0.6")
.set("spark.executor.memory", "6G")
.set("spark.driver.maxResultSize", "4G")
val dataset = args(2)
val sc = new SparkContext(conf)
sys.ShutdownHookThread { sc.stop() }
val hc = new org.apache.spark.sql.hive.HiveContext(sc)
import hc.implicits._
val cvcInfoRDD = sc.textFile(s"$hdfsRoot/data-schema-$dataset-cvcInfo.csv").map(_.split(",")).map(a => CvcInfo(a(0),a(1)))
val avcInfoRDD = sc.textFile(s"$hdfsRoot/data-schema-$dataset-avcInfo.csv").map(_.split(",")).map(a => AvcInfo(a(0),a(1)))
val avcDataRDD = sc.textFile(s"$hdfsRoot/data-schema-$dataset-avcData.csv").map(_.split(",")).map(a => AvcData(a(0),a(1),a(2)))
val cvcInfo = cvcInfoRDD.toDF
val avcInfo = avcInfoRDD.toDF
val avcData = avcDataRDD.toDF
cvcInfo.registerTempTable("cvcInfo")
avcInfo.registerTempTable("avcInfo")
avcData.registerTempTable("avcData")
val dataLoadTime = (System.currentTimeMillis()/1000) - startTime
println("loading data time: " + dataLoadTime)
val startTime2 = System.currentTimeMillis()/1000
val rows = hc.sql("""
SELECT ai.*, ad.*, ci.*
FROM avcInfo ai, avcData ad, cvcInfo ci
WHERE ad.avcId = ai.avcId AND
ai.cvcId = ci.cvcId
""")
val results = rows.groupBy(cvcInfo.col("cvcId")).agg(cvcInfo.col("cvcId"), sum(avcData.col("octetsOut"))).collect
val queryTime = (System.currentTimeMillis()/1000) - startTime2
println("query time: " + queryTime)
println("total time taken: " + (dataLoadTime + queryTime))
results.foreach(println)
println("number of results: " + results.length)
}
}
|
amazoncop/spark
|
src/main/scala/robco/SparkSql2.scala
|
Scala
|
apache-2.0
| 2,375 |
package rpgboss.editor.imageset.metadata
import scala.swing._
import rpgboss.editor.uibase.SwingUtils._
import rpgboss.editor.misc.TileUtils
import scala.swing.event._
import rpgboss.editor.Internationalized._
import rpgboss.editor.uibase._
import rpgboss.model._
import rpgboss.model.resource._
import java.awt.image.BufferedImage
import rpgboss.editor.StateMaster
import rpgboss.editor.imageset.selector._
import javax.imageio.ImageIO
import java.awt.geom.Line2D
import java.awt.AlphaComposite
/**
* @param autotiles If true, this list item denotes autotiles. Other two
* params are then unimportant.
*
* @param text What to display in the ListView
*
* @param tilesetIdx Which tileset index it's associated with. Only used if
* autotiles == false
*/
case class TilesetListItem(autotiles: Boolean, text: String, tilesetIdx: Int) {
override def toString() = text
}
class TilesetsMetadataPanel(sm: StateMaster)
extends BoxPanel(Orientation.Horizontal) {
// A mutable array that we update as we modify things
val autotiles =
Autotile.list(sm.getProj).map(Autotile.readFromDisk(sm.getProj, _))
val dirtyAutotileIdxs = collection.mutable.Set[Int]()
// We don't modify this array directly, but modify the contents
val tilesets =
Tileset.list(sm.getProj).map(Tileset.readFromDisk(sm.getProj, _))
val dirtyTilesetIdxs = collection.mutable.Set[Int]()
// Two variables
var autotilesSelected = true
var tilesetIdx = -1
var metadataMode = MetadataMode.default
/**
* Returns a tuple of metadata i.e. Some((blockedDirs)) if tile exists
* tilesetIdx is always going to be valid when this is called
* Return None if (xTile, yTile) is invalid
*/
def getTileMeta(x: Int, y: Int) = {
if (autotilesSelected) {
val idx = x
if (idx < autotiles.length) {
val m = autotiles(idx).metadata
Some(TileMetadata(m.blockedDirs, m.height, m.normalizedVehicleDirs))
} else None
} else {
val tileset = tilesets(tilesetIdx)
val blockedDir = tileset.metadata.blockedDirsAry(y)(x)
val height = tileset.metadata.heightAry(y)(x)
Some(TileMetadata(blockedDir, height,
AutotileMetadata.defaultVehicleDirs))
}
}
def inBounds(xTile: Int, yTile: Int): Boolean = {
if (autotilesSelected) {
true
} else {
val tileset = tilesets(tilesetIdx)
tileset.inBounds(xTile, yTile)
}
}
/**
* User clicks this tile. Not required to do anything.
* tilesetIdx is always going to be valid when this is called
*/
def updateTileMeta(x: Int, y: Int, newMetadata: TileMetadata) = {
if (autotilesSelected) {
val (newBlockedDirs) = newMetadata
val idx = x
val autotile = autotiles(idx)
val newAutotileMetadata = autotile.metadata.copy(
blockedDirs = newMetadata.blockedDirs,
height = newMetadata.height,
vehicleDirs = newMetadata.vehicleDirs)
autotiles.update(idx, autotile.copy(metadata = newAutotileMetadata))
dirtyAutotileIdxs.add(idx)
} else {
val tileset = tilesets(tilesetIdx)
val m = tileset.metadata
m.blockedDirsAry(y).update(x, newMetadata.blockedDirs)
m.heightAry(y).update(x, newMetadata.height)
dirtyTilesetIdxs.add(tilesetIdx)
}
}
val tilesetListView = new ListView(
Array(TilesetListItem(true, "<html><b>*** Autotiles</b></html>", -1)) ++
tilesets.zipWithIndex.map {
case (ts, i) => TilesetListItem(false, ts.name, i)
})
val metadataPanelContainer = new BoxPanel(Orientation.Vertical) {
preferredSize = new Dimension(8 * 32 + 12, 500)
}
// Guaranteed to be called with a valid index
def updateTilesetSelection(selectAutotiles: Boolean, idx: Int) = {
// Clear old item
metadataPanelContainer.contents.clear()
if (selectAutotiles) {
def srcImg = TileUtils.getAutotileCollageImg(autotiles)
metadataPanelContainer.contents +=
new TileMetadataPanel(srcImg, this, true)
autotilesSelected = true
} else {
val t = tilesets(idx)
metadataPanelContainer.contents +=
new TileMetadataPanel(t.img, this, false)
tilesetIdx = idx
autotilesSelected = false
}
metadataPanelContainer.revalidate()
}
def save() = {
// TODO: Perhaps we should only save the actually modified ones...
(tilesets ++ autotiles).map(_.writeMetadata())
}
contents += new DesignGridPanel {
row.grid().add(leftLabel("Tilesets:"))
row.grid().add(tilesetListView)
row.grid().add(leftLabel(getMessage("Tilesets_Help")))
}
contents += metadataPanelContainer
contents += new DesignGridPanel {
val btns = enumButtons(MetadataMode)(
metadataMode,
newMode => {
metadataMode = newMode
metadataPanelContainer.repaint()
},
Nil)
new ButtonGroup(btns: _*)
row().grid().add(leftLabel("Edit mode:"))
btns.foreach { btn =>
row().grid().add(btn)
}
}
listenTo(tilesetListView.selection)
reactions += {
case ListSelectionChanged(`tilesetListView`, _, _) =>
val item = tilesetListView.selection.items.head
if (item.autotiles) {
updateTilesetSelection(true, -1)
} else {
updateTilesetSelection(false, item.tilesetIdx)
}
}
// Init selection
tilesetListView.selectIndices(0)
}
|
lefay1982/rpgboss
|
desktop/src/main/scala/rpgboss/editor/imageset/metadata/TilesetsMetadataPanel.scala
|
Scala
|
agpl-3.0
| 5,397 |
package org.apache.spark.ml.util
import org.apache.spark.Partitioner
/**
* Created by meng on 10/8/14.
*/
class IdentityPartitioner(override val numPartitions: Int) extends Partitioner {
override def getPartition(key: Any): Int = key.asInstanceOf[Int]
override def equals(other: Any): Boolean = {
other match {
case p: IdentityPartitioner =>
this.numPartitions == p.numPartitions
case _ =>
false
}
}
}
|
mengxr/spark-als
|
src/main/scala/org/apache/spark/ml/util/IdentityPartitioner.scala
|
Scala
|
apache-2.0
| 448 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.util.Properties
import scala.collection.JavaConverters._
import scala.collection.Map
import org.json4s.JsonAST.{JArray, JInt, JString, JValue}
import org.json4s.JsonDSL._
import org.json4s.jackson.JsonMethods._
import org.scalatest.Assertions
import org.scalatest.exceptions.TestFailedException
import org.apache.spark._
import org.apache.spark.executor._
import org.apache.spark.metrics.ExecutorMetricType
import org.apache.spark.rdd.RDDOperationScope
import org.apache.spark.resource._
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.cluster.ExecutorInfo
import org.apache.spark.shuffle.MetadataFetchFailedException
import org.apache.spark.storage._
class JsonProtocolSuite extends SparkFunSuite {
import JsonProtocolSuite._
test("SparkListenerEvent") {
val stageSubmitted =
SparkListenerStageSubmitted(makeStageInfo(100, 200, 300, 400L, 500L), properties)
val stageCompleted = SparkListenerStageCompleted(makeStageInfo(101, 201, 301, 401L, 501L))
val taskStart = SparkListenerTaskStart(111, 0, makeTaskInfo(222L, 333, 1, 444L, false))
val taskGettingResult =
SparkListenerTaskGettingResult(makeTaskInfo(1000L, 2000, 5, 3000L, true))
val taskEnd = SparkListenerTaskEnd(1, 0, "ShuffleMapTask", Success,
makeTaskInfo(123L, 234, 67, 345L, false),
new ExecutorMetrics(Array(543L, 123456L, 12345L, 1234L, 123L, 12L, 432L,
321L, 654L, 765L, 256912L, 123456L, 123456L, 61728L, 30364L, 15182L)),
makeTaskMetrics(300L, 400L, 500L, 600L, 700, 800, hasHadoopInput = false, hasOutput = false))
val taskEndWithHadoopInput = SparkListenerTaskEnd(1, 0, "ShuffleMapTask", Success,
makeTaskInfo(123L, 234, 67, 345L, false),
new ExecutorMetrics(Array(543L, 123456L, 12345L, 1234L, 123L, 12L, 432L,
321L, 654L, 765L, 256912L, 123456L, 123456L, 61728L, 30364L, 15182L)),
makeTaskMetrics(300L, 400L, 500L, 600L, 700, 800, hasHadoopInput = true, hasOutput = false))
val taskEndWithOutput = SparkListenerTaskEnd(1, 0, "ResultTask", Success,
makeTaskInfo(123L, 234, 67, 345L, false),
new ExecutorMetrics(Array(543L, 123456L, 12345L, 1234L, 123L, 12L, 432L,
321L, 654L, 765L, 256912L, 123456L, 123456L, 61728L, 30364L, 15182L)),
makeTaskMetrics(300L, 400L, 500L, 600L, 700, 800, hasHadoopInput = true, hasOutput = true))
val jobStart = {
val stageIds = Seq[Int](1, 2, 3, 4)
val stageInfos = stageIds.map(x =>
makeStageInfo(x, x * 200, x * 300, x * 400L, x * 500L))
SparkListenerJobStart(10, jobSubmissionTime, stageInfos, properties)
}
val jobEnd = SparkListenerJobEnd(20, jobCompletionTime, JobSucceeded)
val environmentUpdate = SparkListenerEnvironmentUpdate(Map[String, Seq[(String, String)]](
"JVM Information" -> Seq(("GC speed", "9999 objects/s"), ("Java home", "Land of coffee")),
"Spark Properties" -> Seq(("Job throughput", "80000 jobs/s, regardless of job type")),
"Hadoop Properties" -> Seq(("hadoop.tmp.dir", "/usr/local/hadoop/tmp")),
"System Properties" -> Seq(("Username", "guest"), ("Password", "guest")),
"Classpath Entries" -> Seq(("Super library", "/tmp/super_library"))
))
val blockManagerAdded = SparkListenerBlockManagerAdded(1L,
BlockManagerId("Stars", "In your multitude...", 300), 500)
val blockManagerRemoved = SparkListenerBlockManagerRemoved(2L,
BlockManagerId("Scarce", "to be counted...", 100))
val unpersistRdd = SparkListenerUnpersistRDD(12345)
val logUrlMap = Map("stderr" -> "mystderr", "stdout" -> "mystdout").toMap
val attributes = Map("ContainerId" -> "ct1", "User" -> "spark").toMap
val resources = Map(ResourceUtils.GPU ->
new ResourceInformation(ResourceUtils.GPU, Array("0", "1")))
val applicationStart = SparkListenerApplicationStart("The winner of all", Some("appId"),
42L, "Garfield", Some("appAttempt"))
val applicationStartWithLogs = SparkListenerApplicationStart("The winner of all", Some("appId"),
42L, "Garfield", Some("appAttempt"), Some(logUrlMap))
val applicationEnd = SparkListenerApplicationEnd(42L)
val executorAdded = SparkListenerExecutorAdded(executorAddedTime, "exec1",
new ExecutorInfo("Hostee.awesome.com", 11, logUrlMap, attributes, resources.toMap, 4))
val executorRemoved = SparkListenerExecutorRemoved(executorRemovedTime, "exec2", "test reason")
val executorBlacklisted = SparkListenerExecutorBlacklisted(executorBlacklistedTime, "exec1", 22)
val executorUnblacklisted =
SparkListenerExecutorUnblacklisted(executorUnblacklistedTime, "exec1")
val nodeBlacklisted = SparkListenerNodeBlacklisted(nodeBlacklistedTime, "node1", 33)
val nodeUnblacklisted =
SparkListenerNodeUnblacklisted(nodeUnblacklistedTime, "node1")
val executorMetricsUpdate = {
// Use custom accum ID for determinism
val accumUpdates =
makeTaskMetrics(300L, 400L, 500L, 600L, 700, 800, hasHadoopInput = true, hasOutput = true)
.accumulators().map(AccumulatorSuite.makeInfo)
.zipWithIndex.map { case (a, i) => a.copy(id = i) }
val executorUpdates = new ExecutorMetrics(
Array(543L, 123456L, 12345L, 1234L, 123L, 12L, 432L,
321L, 654L, 765L, 256912L, 123456L, 123456L, 61728L, 30364L, 15182L, 10L, 90L, 2L, 20L))
SparkListenerExecutorMetricsUpdate("exec3", Seq((1L, 2, 3, accumUpdates)),
Map((0, 0) -> executorUpdates))
}
val blockUpdated =
SparkListenerBlockUpdated(BlockUpdatedInfo(BlockManagerId("Stars",
"In your multitude...", 300), RDDBlockId(0, 0), StorageLevel.MEMORY_ONLY, 100L, 0L))
val stageExecutorMetrics =
SparkListenerStageExecutorMetrics("1", 2, 3,
new ExecutorMetrics(Array(543L, 123456L, 12345L, 1234L, 123L, 12L, 432L,
321L, 654L, 765L, 256912L, 123456L, 123456L, 61728L, 30364L, 15182L, 10L, 90L, 2L, 20L)))
val rprofBuilder = new ResourceProfileBuilder()
val taskReq = new TaskResourceRequests().cpus(1).resource("gpu", 1)
val execReq =
new ExecutorResourceRequests().cores(2).resource("gpu", 2, "myscript")
rprofBuilder.require(taskReq).require(execReq)
val resourceProfile = rprofBuilder.build
resourceProfile.setResourceProfileId(21)
val resourceProfileAdded = SparkListenerResourceProfileAdded(resourceProfile)
testEvent(stageSubmitted, stageSubmittedJsonString)
testEvent(stageCompleted, stageCompletedJsonString)
testEvent(taskStart, taskStartJsonString)
testEvent(taskGettingResult, taskGettingResultJsonString)
testEvent(taskEnd, taskEndJsonString)
testEvent(taskEndWithHadoopInput, taskEndWithHadoopInputJsonString)
testEvent(taskEndWithOutput, taskEndWithOutputJsonString)
testEvent(jobStart, jobStartJsonString)
testEvent(jobEnd, jobEndJsonString)
testEvent(environmentUpdate, environmentUpdateJsonString)
testEvent(blockManagerAdded, blockManagerAddedJsonString)
testEvent(blockManagerRemoved, blockManagerRemovedJsonString)
testEvent(unpersistRdd, unpersistRDDJsonString)
testEvent(applicationStart, applicationStartJsonString)
testEvent(applicationStartWithLogs, applicationStartJsonWithLogUrlsString)
testEvent(applicationEnd, applicationEndJsonString)
testEvent(executorAdded, executorAddedJsonString)
testEvent(executorRemoved, executorRemovedJsonString)
testEvent(executorBlacklisted, executorBlacklistedJsonString)
testEvent(executorUnblacklisted, executorUnblacklistedJsonString)
testEvent(nodeBlacklisted, nodeBlacklistedJsonString)
testEvent(nodeUnblacklisted, nodeUnblacklistedJsonString)
testEvent(executorMetricsUpdate, executorMetricsUpdateJsonString)
testEvent(blockUpdated, blockUpdatedJsonString)
testEvent(stageExecutorMetrics, stageExecutorMetricsJsonString)
testEvent(resourceProfileAdded, resourceProfileJsonString)
}
test("Dependent Classes") {
val logUrlMap = Map("stderr" -> "mystderr", "stdout" -> "mystdout").toMap
val attributes = Map("ContainerId" -> "ct1", "User" -> "spark").toMap
testRDDInfo(makeRddInfo(2, 3, 4, 5L, 6L))
testStageInfo(makeStageInfo(10, 20, 30, 40L, 50L))
testTaskInfo(makeTaskInfo(999L, 888, 55, 777L, false))
testTaskMetrics(makeTaskMetrics(
33333L, 44444L, 55555L, 66666L, 7, 8, hasHadoopInput = false, hasOutput = false))
testBlockManagerId(BlockManagerId("Hong", "Kong", 500))
testExecutorInfo(new ExecutorInfo("host", 43, logUrlMap, attributes))
// StorageLevel
testStorageLevel(StorageLevel.NONE)
testStorageLevel(StorageLevel.DISK_ONLY)
testStorageLevel(StorageLevel.DISK_ONLY_2)
testStorageLevel(StorageLevel.MEMORY_ONLY)
testStorageLevel(StorageLevel.MEMORY_ONLY_2)
testStorageLevel(StorageLevel.MEMORY_ONLY_SER)
testStorageLevel(StorageLevel.MEMORY_ONLY_SER_2)
testStorageLevel(StorageLevel.MEMORY_AND_DISK)
testStorageLevel(StorageLevel.MEMORY_AND_DISK_2)
testStorageLevel(StorageLevel.MEMORY_AND_DISK_SER)
testStorageLevel(StorageLevel.MEMORY_AND_DISK_SER_2)
// JobResult
val exception = new Exception("Out of Memory! Please restock film.")
exception.setStackTrace(stackTrace)
val jobFailed = JobFailed(exception)
testJobResult(JobSucceeded)
testJobResult(jobFailed)
// TaskEndReason
val fetchFailed = FetchFailed(BlockManagerId("With or", "without you", 15), 17, 16L, 18, 19,
"Some exception")
val fetchMetadataFailed = new MetadataFetchFailedException(17,
19, "metadata Fetch failed exception").toTaskFailedReason
val exceptionFailure = new ExceptionFailure(exception, Seq.empty[AccumulableInfo])
testTaskEndReason(Success)
testTaskEndReason(Resubmitted)
testTaskEndReason(fetchFailed)
testTaskEndReason(fetchMetadataFailed)
testTaskEndReason(exceptionFailure)
testTaskEndReason(TaskResultLost)
testTaskEndReason(TaskKilled("test"))
testTaskEndReason(TaskCommitDenied(2, 3, 4))
testTaskEndReason(ExecutorLostFailure("100", true, Some("Induced failure")))
testTaskEndReason(UnknownReason)
// BlockId
testBlockId(RDDBlockId(1, 2))
testBlockId(ShuffleBlockId(1, 2, 3))
testBlockId(BroadcastBlockId(1L, "insert_words_of_wisdom_here"))
testBlockId(TaskResultBlockId(1L))
testBlockId(StreamBlockId(1, 2L))
}
/* ============================== *
| Backward compatibility tests |
* ============================== */
test("ExceptionFailure backward compatibility: full stack trace") {
val exceptionFailure = ExceptionFailure("To be", "or not to be", stackTrace, null, None)
val oldEvent = JsonProtocol.taskEndReasonToJson(exceptionFailure)
.removeField({ _._1 == "Full Stack Trace" })
assertEquals(exceptionFailure, JsonProtocol.taskEndReasonFromJson(oldEvent))
}
test("StageInfo backward compatibility (details, accumulables)") {
val info = makeStageInfo(1, 2, 3, 4L, 5L)
val newJson = JsonProtocol.stageInfoToJson(info)
// Fields added after 1.0.0.
assert(info.details.nonEmpty)
assert(info.accumulables.nonEmpty)
val oldJson = newJson
.removeField { case (field, _) => field == "Details" }
.removeField { case (field, _) => field == "Accumulables" }
val newInfo = JsonProtocol.stageInfoFromJson(oldJson)
assert(info.name === newInfo.name)
assert("" === newInfo.details)
assert(0 === newInfo.accumulables.size)
}
test("StageInfo resourceProfileId") {
val info = makeStageInfo(1, 2, 3, 4L, 5L, 5)
val json = JsonProtocol.stageInfoToJson(info)
// Fields added after 1.0.0.
assert(info.details.nonEmpty)
assert(info.resourceProfileId === 5)
val newInfo = JsonProtocol.stageInfoFromJson(json)
assert(info.name === newInfo.name)
assert(5 === newInfo.resourceProfileId)
}
test("InputMetrics backward compatibility") {
// InputMetrics were added after 1.0.1.
val metrics = makeTaskMetrics(1L, 2L, 3L, 4L, 5, 6, hasHadoopInput = true, hasOutput = false)
val newJson = JsonProtocol.taskMetricsToJson(metrics)
val oldJson = newJson.removeField { case (field, _) => field == "Input Metrics" }
val newMetrics = JsonProtocol.taskMetricsFromJson(oldJson)
}
test("Input/Output records backwards compatibility") {
// records read were added after 1.2
val metrics = makeTaskMetrics(1L, 2L, 3L, 4L, 5, 6,
hasHadoopInput = true, hasOutput = true, hasRecords = false)
val newJson = JsonProtocol.taskMetricsToJson(metrics)
val oldJson = newJson.removeField { case (field, _) => field == "Records Read" }
.removeField { case (field, _) => field == "Records Written" }
val newMetrics = JsonProtocol.taskMetricsFromJson(oldJson)
assert(newMetrics.inputMetrics.recordsRead == 0)
assert(newMetrics.outputMetrics.recordsWritten == 0)
}
test("Shuffle Read/Write records backwards compatibility") {
// records read were added after 1.2
val metrics = makeTaskMetrics(1L, 2L, 3L, 4L, 5, 6,
hasHadoopInput = false, hasOutput = false, hasRecords = false)
val newJson = JsonProtocol.taskMetricsToJson(metrics)
val oldJson = newJson.removeField { case (field, _) => field == "Total Records Read" }
.removeField { case (field, _) => field == "Shuffle Records Written" }
val newMetrics = JsonProtocol.taskMetricsFromJson(oldJson)
assert(newMetrics.shuffleReadMetrics.recordsRead == 0)
assert(newMetrics.shuffleWriteMetrics.recordsWritten == 0)
}
test("OutputMetrics backward compatibility") {
// OutputMetrics were added after 1.1
val metrics = makeTaskMetrics(1L, 2L, 3L, 4L, 5, 6, hasHadoopInput = false, hasOutput = true)
val newJson = JsonProtocol.taskMetricsToJson(metrics)
val oldJson = newJson.removeField { case (field, _) => field == "Output Metrics" }
val newMetrics = JsonProtocol.taskMetricsFromJson(oldJson)
}
test("BlockManager events backward compatibility") {
// SparkListenerBlockManagerAdded/Removed in Spark 1.0.0 do not have a "time" property.
val blockManagerAdded = SparkListenerBlockManagerAdded(1L,
BlockManagerId("Stars", "In your multitude...", 300), 500)
val blockManagerRemoved = SparkListenerBlockManagerRemoved(2L,
BlockManagerId("Scarce", "to be counted...", 100))
val oldBmAdded = JsonProtocol.blockManagerAddedToJson(blockManagerAdded)
.removeField({ _._1 == "Timestamp" })
val deserializedBmAdded = JsonProtocol.blockManagerAddedFromJson(oldBmAdded)
assert(SparkListenerBlockManagerAdded(-1L, blockManagerAdded.blockManagerId,
blockManagerAdded.maxMem) === deserializedBmAdded)
val oldBmRemoved = JsonProtocol.blockManagerRemovedToJson(blockManagerRemoved)
.removeField({ _._1 == "Timestamp" })
val deserializedBmRemoved = JsonProtocol.blockManagerRemovedFromJson(oldBmRemoved)
assert(SparkListenerBlockManagerRemoved(-1L, blockManagerRemoved.blockManagerId) ===
deserializedBmRemoved)
}
test("FetchFailed backwards compatibility") {
// FetchFailed in Spark 1.1.0 does not have a "Message" property.
val fetchFailed = FetchFailed(BlockManagerId("With or", "without you", 15), 17, 16L, 18, 19,
"ignored")
val oldEvent = JsonProtocol.taskEndReasonToJson(fetchFailed)
.removeField({ _._1 == "Message" })
val expectedFetchFailed = FetchFailed(BlockManagerId("With or", "without you", 15), 17, 16L,
18, 19, "Unknown reason")
assert(expectedFetchFailed === JsonProtocol.taskEndReasonFromJson(oldEvent))
}
test("ShuffleReadMetrics: Local bytes read backwards compatibility") {
// Metrics about local shuffle bytes read were added in 1.3.1.
val metrics = makeTaskMetrics(1L, 2L, 3L, 4L, 5, 6,
hasHadoopInput = false, hasOutput = false, hasRecords = false)
val newJson = JsonProtocol.taskMetricsToJson(metrics)
val oldJson = newJson.removeField { case (field, _) => field == "Local Bytes Read" }
val newMetrics = JsonProtocol.taskMetricsFromJson(oldJson)
assert(newMetrics.shuffleReadMetrics.localBytesRead == 0)
}
test("SparkListenerApplicationStart backwards compatibility") {
// SparkListenerApplicationStart in Spark 1.0.0 do not have an "appId" property.
// SparkListenerApplicationStart pre-Spark 1.4 does not have "appAttemptId".
// SparkListenerApplicationStart pre-Spark 1.5 does not have "driverLogs
val applicationStart = SparkListenerApplicationStart("test", None, 1L, "user", None, None)
val oldEvent = JsonProtocol.applicationStartToJson(applicationStart)
.removeField({ _._1 == "App ID" })
.removeField({ _._1 == "App Attempt ID" })
.removeField({ _._1 == "Driver Logs"})
assert(applicationStart === JsonProtocol.applicationStartFromJson(oldEvent))
}
test("ExecutorLostFailure backward compatibility") {
// ExecutorLostFailure in Spark 1.1.0 does not have an "Executor ID" property.
val executorLostFailure = ExecutorLostFailure("100", true, Some("Induced failure"))
val oldEvent = JsonProtocol.taskEndReasonToJson(executorLostFailure)
.removeField({ _._1 == "Executor ID" })
val expectedExecutorLostFailure = ExecutorLostFailure("Unknown", true, Some("Induced failure"))
assert(expectedExecutorLostFailure === JsonProtocol.taskEndReasonFromJson(oldEvent))
}
test("SparkListenerJobStart backward compatibility") {
// Prior to Spark 1.2.0, SparkListenerJobStart did not have a "Stage Infos" property.
val stageIds = Seq[Int](1, 2, 3, 4)
val stageInfos = stageIds.map(x => makeStageInfo(x, x * 200, x * 300, x * 400L, x * 500L))
val dummyStageInfos =
stageIds.map(id => new StageInfo(id, 0, "unknown", 0, Seq.empty, Seq.empty, "unknown",
resourceProfileId = ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID))
val jobStart = SparkListenerJobStart(10, jobSubmissionTime, stageInfos, properties)
val oldEvent = JsonProtocol.jobStartToJson(jobStart).removeField({_._1 == "Stage Infos"})
val expectedJobStart =
SparkListenerJobStart(10, jobSubmissionTime, dummyStageInfos, properties)
assertEquals(expectedJobStart, JsonProtocol.jobStartFromJson(oldEvent))
}
test("SparkListenerJobStart and SparkListenerJobEnd backward compatibility") {
// Prior to Spark 1.3.0, SparkListenerJobStart did not have a "Submission Time" property.
// Also, SparkListenerJobEnd did not have a "Completion Time" property.
val stageIds = Seq[Int](1, 2, 3, 4)
val stageInfos = stageIds.map(x => makeStageInfo(x * 10, x * 20, x * 30, x * 40L, x * 50L))
val jobStart = SparkListenerJobStart(11, jobSubmissionTime, stageInfos, properties)
val oldStartEvent = JsonProtocol.jobStartToJson(jobStart)
.removeField({ _._1 == "Submission Time"})
val expectedJobStart = SparkListenerJobStart(11, -1, stageInfos, properties)
assertEquals(expectedJobStart, JsonProtocol.jobStartFromJson(oldStartEvent))
val jobEnd = SparkListenerJobEnd(11, jobCompletionTime, JobSucceeded)
val oldEndEvent = JsonProtocol.jobEndToJson(jobEnd)
.removeField({ _._1 == "Completion Time"})
val expectedJobEnd = SparkListenerJobEnd(11, -1, JobSucceeded)
assertEquals(expectedJobEnd, JsonProtocol.jobEndFromJson(oldEndEvent))
}
test("RDDInfo backward compatibility (scope, parent IDs, callsite)") {
// "Scope" and "Parent IDs" were introduced in Spark 1.4.0
// "Callsite" was introduced in Spark 1.6.0
val rddInfo = new RDDInfo(1, "one", 100, StorageLevel.NONE, false, Seq(1, 6, 8),
"callsite", Some(new RDDOperationScope("fable")))
val oldRddInfoJson = JsonProtocol.rddInfoToJson(rddInfo)
.removeField({ _._1 == "Parent IDs"})
.removeField({ _._1 == "Scope"})
.removeField({ _._1 == "Callsite"})
val expectedRddInfo = new RDDInfo(
1, "one", 100, StorageLevel.NONE, false, Seq.empty, "", scope = None)
assertEquals(expectedRddInfo, JsonProtocol.rddInfoFromJson(oldRddInfoJson))
}
test("StageInfo backward compatibility (parent IDs)") {
// Prior to Spark 1.4.0, StageInfo did not have the "Parent IDs" property
val stageInfo = new StageInfo(1, 1, "me-stage", 1, Seq.empty, Seq(1, 2, 3), "details",
resourceProfileId = ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID)
val oldStageInfo = JsonProtocol.stageInfoToJson(stageInfo).removeField({ _._1 == "Parent IDs"})
val expectedStageInfo = new StageInfo(1, 1, "me-stage", 1, Seq.empty, Seq.empty, "details",
resourceProfileId = ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID)
assertEquals(expectedStageInfo, JsonProtocol.stageInfoFromJson(oldStageInfo))
}
// `TaskCommitDenied` was added in 1.3.0 but JSON de/serialization logic was added in 1.5.1
test("TaskCommitDenied backward compatibility") {
val denied = TaskCommitDenied(1, 2, 3)
val oldDenied = JsonProtocol.taskEndReasonToJson(denied)
.removeField({ _._1 == "Job ID" })
.removeField({ _._1 == "Partition ID" })
.removeField({ _._1 == "Attempt Number" })
val expectedDenied = TaskCommitDenied(-1, -1, -1)
assertEquals(expectedDenied, JsonProtocol.taskEndReasonFromJson(oldDenied))
}
test("AccumulableInfo backward compatibility") {
// "Internal" property of AccumulableInfo was added in 1.5.1
val accumulableInfo = makeAccumulableInfo(1, internal = true, countFailedValues = true)
val accumulableInfoJson = JsonProtocol.accumulableInfoToJson(accumulableInfo)
val oldJson = accumulableInfoJson.removeField({ _._1 == "Internal" })
val oldInfo = JsonProtocol.accumulableInfoFromJson(oldJson)
assert(!oldInfo.internal)
// "Count Failed Values" property of AccumulableInfo was added in 2.0.0
val oldJson2 = accumulableInfoJson.removeField({ _._1 == "Count Failed Values" })
val oldInfo2 = JsonProtocol.accumulableInfoFromJson(oldJson2)
assert(!oldInfo2.countFailedValues)
// "Metadata" property of AccumulableInfo was added in 2.0.0
val oldJson3 = accumulableInfoJson.removeField({ _._1 == "Metadata" })
val oldInfo3 = JsonProtocol.accumulableInfoFromJson(oldJson3)
assert(oldInfo3.metadata.isEmpty)
}
test("ExceptionFailure backward compatibility: accumulator updates") {
// "Task Metrics" was replaced with "Accumulator Updates" in 2.0.0. For older event logs,
// we should still be able to fallback to constructing the accumulator updates from the
// "Task Metrics" field, if it exists.
val tm = makeTaskMetrics(1L, 2L, 3L, 4L, 5, 6, hasHadoopInput = true, hasOutput = true)
val tmJson = JsonProtocol.taskMetricsToJson(tm)
val accumUpdates = tm.accumulators().map(AccumulatorSuite.makeInfo)
val exception = new SparkException("sentimental")
val exceptionFailure = new ExceptionFailure(exception, accumUpdates)
val exceptionFailureJson = JsonProtocol.taskEndReasonToJson(exceptionFailure)
val tmFieldJson: JValue = "Task Metrics" -> tmJson
val oldExceptionFailureJson: JValue =
exceptionFailureJson.removeField { _._1 == "Accumulator Updates" }.merge(tmFieldJson)
val oldExceptionFailure =
JsonProtocol.taskEndReasonFromJson(oldExceptionFailureJson).asInstanceOf[ExceptionFailure]
assert(exceptionFailure.className === oldExceptionFailure.className)
assert(exceptionFailure.description === oldExceptionFailure.description)
assertSeqEquals[StackTraceElement](
exceptionFailure.stackTrace, oldExceptionFailure.stackTrace, assertStackTraceElementEquals)
assert(exceptionFailure.fullStackTrace === oldExceptionFailure.fullStackTrace)
assertSeqEquals[AccumulableInfo](
exceptionFailure.accumUpdates, oldExceptionFailure.accumUpdates, (x, y) => x == y)
}
test("ExecutorMetricsUpdate backward compatibility: executor metrics update") {
// executorMetricsUpdate was added in 2.4.0.
val executorMetricsUpdate = makeExecutorMetricsUpdate("1", true, true)
val oldExecutorMetricsUpdateJson =
JsonProtocol.executorMetricsUpdateToJson(executorMetricsUpdate)
.removeField( _._1 == "Executor Metrics Updated")
val expectedExecutorMetricsUpdate = makeExecutorMetricsUpdate("1", true, false)
assertEquals(expectedExecutorMetricsUpdate,
JsonProtocol.executorMetricsUpdateFromJson(oldExecutorMetricsUpdateJson))
}
test("executorMetricsFromJson backward compatibility: handle missing metrics") {
// any missing metrics should be set to 0
val executorMetrics = new ExecutorMetrics(Array(12L, 23L, 45L, 67L, 78L, 89L,
90L, 123L, 456L, 789L, 40L, 20L, 20L, 10L, 20L, 10L))
val oldExecutorMetricsJson =
JsonProtocol.executorMetricsToJson(executorMetrics)
.removeField( _._1 == "MappedPoolMemory")
val exepectedExecutorMetrics = new ExecutorMetrics(Array(12L, 23L, 45L, 67L,
78L, 89L, 90L, 123L, 456L, 0L, 40L, 20L, 20L, 10L, 20L, 10L))
assertEquals(exepectedExecutorMetrics,
JsonProtocol.executorMetricsFromJson(oldExecutorMetricsJson))
}
test("AccumulableInfo value de/serialization") {
import InternalAccumulator._
val blocks = Seq[(BlockId, BlockStatus)](
(TestBlockId("meebo"), BlockStatus(StorageLevel.MEMORY_ONLY, 1L, 2L)),
(TestBlockId("feebo"), BlockStatus(StorageLevel.DISK_ONLY, 3L, 4L)))
val blocksJson = JArray(blocks.toList.map { case (id, status) =>
("Block ID" -> id.toString) ~
("Status" -> JsonProtocol.blockStatusToJson(status))
})
testAccumValue(Some(RESULT_SIZE), 3L, JInt(3))
testAccumValue(Some(shuffleRead.REMOTE_BLOCKS_FETCHED), 2, JInt(2))
testAccumValue(Some(UPDATED_BLOCK_STATUSES), blocks.asJava, blocksJson)
// For anything else, we just cast the value to a string
testAccumValue(Some("anything"), blocks, JString(blocks.toString))
testAccumValue(Some("anything"), 123, JString("123"))
}
/** Create an AccumulableInfo and verify we can serialize and deserialize it. */
private def testAccumulableInfo(
name: String,
value: Option[Any],
expectedValue: Option[Any]): Unit = {
val isInternal = name.startsWith(InternalAccumulator.METRICS_PREFIX)
val accum = AccumulableInfo(
123L,
Some(name),
update = value,
value = value,
internal = isInternal,
countFailedValues = false)
val json = JsonProtocol.accumulableInfoToJson(accum)
val newAccum = JsonProtocol.accumulableInfoFromJson(json)
assert(newAccum == accum.copy(update = expectedValue, value = expectedValue))
}
test("SPARK-31923: unexpected value type of internal accumulator") {
// Because a user may use `METRICS_PREFIX` in an accumulator name, we should test unexpected
// types to make sure we don't crash.
import InternalAccumulator.METRICS_PREFIX
testAccumulableInfo(
METRICS_PREFIX + "fooString",
value = Some("foo"),
expectedValue = None)
testAccumulableInfo(
METRICS_PREFIX + "fooList",
value = Some(java.util.Arrays.asList("string")),
expectedValue = Some(java.util.Collections.emptyList())
)
val blocks = Seq(
(TestBlockId("block1"), BlockStatus(StorageLevel.MEMORY_ONLY, 1L, 2L)),
(TestBlockId("block2"), BlockStatus(StorageLevel.DISK_ONLY, 3L, 4L)))
testAccumulableInfo(
METRICS_PREFIX + "fooList",
value = Some(java.util.Arrays.asList(
"string",
blocks(0),
blocks(1))),
expectedValue = Some(blocks.asJava)
)
testAccumulableInfo(
METRICS_PREFIX + "fooSet",
value = Some(Set("foo")),
expectedValue = None)
}
test("SPARK-30936: forwards compatibility - ignore unknown fields") {
val expected = TestListenerEvent("foo", 123)
val unknownFieldsJson =
"""{
| "Event" : "org.apache.spark.util.TestListenerEvent",
| "foo" : "foo",
| "bar" : 123,
| "unknown" : "unknown"
|}""".stripMargin
assert(JsonProtocol.sparkEventFromJson(parse(unknownFieldsJson)) === expected)
}
test("SPARK-30936: backwards compatibility - set default values for missing fields") {
val expected = TestListenerEvent("foo", 0)
val unknownFieldsJson =
"""{
| "Event" : "org.apache.spark.util.TestListenerEvent",
| "foo" : "foo"
|}""".stripMargin
assert(JsonProtocol.sparkEventFromJson(parse(unknownFieldsJson)) === expected)
}
}
private[spark] object JsonProtocolSuite extends Assertions {
import InternalAccumulator._
private val jobSubmissionTime = 1421191042750L
private val jobCompletionTime = 1421191296660L
private val executorAddedTime = 1421458410000L
private val executorRemovedTime = 1421458922000L
private val executorBlacklistedTime = 1421458932000L
private val executorUnblacklistedTime = 1421458942000L
private val nodeBlacklistedTime = 1421458952000L
private val nodeUnblacklistedTime = 1421458962000L
private def testEvent(event: SparkListenerEvent, jsonString: String): Unit = {
val actualJsonString = compact(render(JsonProtocol.sparkEventToJson(event)))
val newEvent = JsonProtocol.sparkEventFromJson(parse(actualJsonString))
assertJsonStringEquals(jsonString, actualJsonString, event.getClass.getSimpleName)
assertEquals(event, newEvent)
}
private def testRDDInfo(info: RDDInfo): Unit = {
val newInfo = JsonProtocol.rddInfoFromJson(JsonProtocol.rddInfoToJson(info))
assertEquals(info, newInfo)
}
private def testStageInfo(info: StageInfo): Unit = {
val newInfo = JsonProtocol.stageInfoFromJson(JsonProtocol.stageInfoToJson(info))
assertEquals(info, newInfo)
}
private def testStorageLevel(level: StorageLevel): Unit = {
val newLevel = JsonProtocol.storageLevelFromJson(JsonProtocol.storageLevelToJson(level))
assertEquals(level, newLevel)
}
private def testTaskMetrics(metrics: TaskMetrics): Unit = {
val newMetrics = JsonProtocol.taskMetricsFromJson(JsonProtocol.taskMetricsToJson(metrics))
assertEquals(metrics, newMetrics)
}
private def testBlockManagerId(id: BlockManagerId): Unit = {
val newId = JsonProtocol.blockManagerIdFromJson(JsonProtocol.blockManagerIdToJson(id))
assert(id === newId)
}
private def testTaskInfo(info: TaskInfo): Unit = {
val newInfo = JsonProtocol.taskInfoFromJson(JsonProtocol.taskInfoToJson(info))
assertEquals(info, newInfo)
}
private def testJobResult(result: JobResult): Unit = {
val newResult = JsonProtocol.jobResultFromJson(JsonProtocol.jobResultToJson(result))
assertEquals(result, newResult)
}
private def testTaskEndReason(reason: TaskEndReason): Unit = {
val newReason = JsonProtocol.taskEndReasonFromJson(JsonProtocol.taskEndReasonToJson(reason))
assertEquals(reason, newReason)
}
private def testBlockId(blockId: BlockId): Unit = {
val newBlockId = BlockId(blockId.toString)
assert(blockId === newBlockId)
}
private def testExecutorInfo(info: ExecutorInfo): Unit = {
val newInfo = JsonProtocol.executorInfoFromJson(JsonProtocol.executorInfoToJson(info))
assertEquals(info, newInfo)
}
private def testAccumValue(name: Option[String], value: Any, expectedJson: JValue): Unit = {
val json = JsonProtocol.accumValueToJson(name, value)
assert(json === expectedJson)
val newValue = JsonProtocol.accumValueFromJson(name, json)
val expectedValue = if (name.exists(_.startsWith(METRICS_PREFIX))) value else value.toString
assert(newValue === expectedValue)
}
/** -------------------------------- *
| Util methods for comparing events |
* --------------------------------- */
private[spark] def assertEquals(event1: SparkListenerEvent, event2: SparkListenerEvent): Unit = {
(event1, event2) match {
case (e1: SparkListenerStageSubmitted, e2: SparkListenerStageSubmitted) =>
assert(e1.properties === e2.properties)
assertEquals(e1.stageInfo, e2.stageInfo)
case (e1: SparkListenerStageCompleted, e2: SparkListenerStageCompleted) =>
assertEquals(e1.stageInfo, e2.stageInfo)
case (e1: SparkListenerTaskStart, e2: SparkListenerTaskStart) =>
assert(e1.stageId === e2.stageId)
assertEquals(e1.taskInfo, e2.taskInfo)
case (e1: SparkListenerTaskGettingResult, e2: SparkListenerTaskGettingResult) =>
assertEquals(e1.taskInfo, e2.taskInfo)
case (e1: SparkListenerTaskEnd, e2: SparkListenerTaskEnd) =>
assert(e1.stageId === e2.stageId)
assert(e1.stageAttemptId === e2.stageAttemptId)
assert(e1.taskType === e2.taskType)
assertEquals(e1.reason, e2.reason)
assertEquals(e1.taskInfo, e2.taskInfo)
assertEquals(e1.taskExecutorMetrics, e2.taskExecutorMetrics)
assertEquals(e1.taskMetrics, e2.taskMetrics)
case (e1: SparkListenerJobStart, e2: SparkListenerJobStart) =>
assert(e1.jobId === e2.jobId)
assert(e1.properties === e2.properties)
assert(e1.stageIds === e2.stageIds)
case (e1: SparkListenerJobEnd, e2: SparkListenerJobEnd) =>
assert(e1.jobId === e2.jobId)
assertEquals(e1.jobResult, e2.jobResult)
case (e1: SparkListenerEnvironmentUpdate, e2: SparkListenerEnvironmentUpdate) =>
assertEquals(e1.environmentDetails, e2.environmentDetails)
case (e1: SparkListenerExecutorAdded, e2: SparkListenerExecutorAdded) =>
assert(e1.executorId === e1.executorId)
assertEquals(e1.executorInfo, e2.executorInfo)
case (e1: SparkListenerExecutorRemoved, e2: SparkListenerExecutorRemoved) =>
assert(e1.executorId === e1.executorId)
case (e1: SparkListenerExecutorMetricsUpdate, e2: SparkListenerExecutorMetricsUpdate) =>
assert(e1.execId === e2.execId)
assertSeqEquals[(Long, Int, Int, Seq[AccumulableInfo])](
e1.accumUpdates,
e2.accumUpdates,
(a, b) => {
val (taskId1, stageId1, stageAttemptId1, updates1) = a
val (taskId2, stageId2, stageAttemptId2, updates2) = b
assert(taskId1 === taskId2)
assert(stageId1 === stageId2)
assert(stageAttemptId1 === stageAttemptId2)
assertSeqEquals[AccumulableInfo](updates1, updates2, (a, b) => a.equals(b))
})
assertSeqEquals[((Int, Int), ExecutorMetrics)](
e1.executorUpdates.toSeq.sortBy(_._1),
e2.executorUpdates.toSeq.sortBy(_._1),
(a, b) => {
val (k1, v1) = a
val (k2, v2) = b
assert(k1 === k2)
assertEquals(v1, v2)
}
)
case (e1: SparkListenerStageExecutorMetrics, e2: SparkListenerStageExecutorMetrics) =>
assert(e1.execId === e2.execId)
assert(e1.stageId === e2.stageId)
assert(e1.stageAttemptId === e2.stageAttemptId)
assertEquals(e1.executorMetrics, e2.executorMetrics)
case (e1, e2) =>
assert(e1 === e2)
case _ => fail("Events don't match in types!")
}
}
private def assertEquals(info1: StageInfo, info2: StageInfo): Unit = {
assert(info1.stageId === info2.stageId)
assert(info1.name === info2.name)
assert(info1.numTasks === info2.numTasks)
assert(info1.submissionTime === info2.submissionTime)
assert(info1.completionTime === info2.completionTime)
assert(info1.rddInfos.size === info2.rddInfos.size)
(0 until info1.rddInfos.size).foreach { i =>
assertEquals(info1.rddInfos(i), info2.rddInfos(i))
}
assert(info1.accumulables === info2.accumulables)
assert(info1.details === info2.details)
}
private def assertEquals(info1: RDDInfo, info2: RDDInfo): Unit = {
assert(info1.id === info2.id)
assert(info1.name === info2.name)
assert(info1.numPartitions === info2.numPartitions)
assert(info1.numCachedPartitions === info2.numCachedPartitions)
assert(info1.memSize === info2.memSize)
assert(info1.diskSize === info2.diskSize)
assertEquals(info1.storageLevel, info2.storageLevel)
}
private def assertEquals(level1: StorageLevel, level2: StorageLevel): Unit = {
assert(level1.useDisk === level2.useDisk)
assert(level1.useMemory === level2.useMemory)
assert(level1.deserialized === level2.deserialized)
assert(level1.replication === level2.replication)
}
private def assertEquals(info1: TaskInfo, info2: TaskInfo): Unit = {
assert(info1.taskId === info2.taskId)
assert(info1.index === info2.index)
assert(info1.attemptNumber === info2.attemptNumber)
assert(info1.launchTime === info2.launchTime)
assert(info1.executorId === info2.executorId)
assert(info1.host === info2.host)
assert(info1.taskLocality === info2.taskLocality)
assert(info1.speculative === info2.speculative)
assert(info1.gettingResultTime === info2.gettingResultTime)
assert(info1.finishTime === info2.finishTime)
assert(info1.failed === info2.failed)
assert(info1.accumulables === info2.accumulables)
}
private def assertEquals(info1: ExecutorInfo, info2: ExecutorInfo): Unit = {
assert(info1.executorHost == info2.executorHost)
assert(info1.totalCores == info2.totalCores)
}
private def assertEquals(metrics1: TaskMetrics, metrics2: TaskMetrics): Unit = {
assert(metrics1.executorDeserializeTime === metrics2.executorDeserializeTime)
assert(metrics1.executorDeserializeCpuTime === metrics2.executorDeserializeCpuTime)
assert(metrics1.executorRunTime === metrics2.executorRunTime)
assert(metrics1.executorCpuTime === metrics2.executorCpuTime)
assert(metrics1.resultSize === metrics2.resultSize)
assert(metrics1.jvmGCTime === metrics2.jvmGCTime)
assert(metrics1.resultSerializationTime === metrics2.resultSerializationTime)
assert(metrics1.memoryBytesSpilled === metrics2.memoryBytesSpilled)
assert(metrics1.diskBytesSpilled === metrics2.diskBytesSpilled)
assertEquals(metrics1.shuffleReadMetrics, metrics2.shuffleReadMetrics)
assertEquals(metrics1.shuffleWriteMetrics, metrics2.shuffleWriteMetrics)
assertEquals(metrics1.inputMetrics, metrics2.inputMetrics)
assertBlocksEquals(metrics1.updatedBlockStatuses, metrics2.updatedBlockStatuses)
}
private def assertEquals(metrics1: ShuffleReadMetrics, metrics2: ShuffleReadMetrics): Unit = {
assert(metrics1.remoteBlocksFetched === metrics2.remoteBlocksFetched)
assert(metrics1.localBlocksFetched === metrics2.localBlocksFetched)
assert(metrics1.fetchWaitTime === metrics2.fetchWaitTime)
assert(metrics1.remoteBytesRead === metrics2.remoteBytesRead)
}
private def assertEquals(metrics1: ShuffleWriteMetrics, metrics2: ShuffleWriteMetrics): Unit = {
assert(metrics1.bytesWritten === metrics2.bytesWritten)
assert(metrics1.writeTime === metrics2.writeTime)
}
private def assertEquals(metrics1: InputMetrics, metrics2: InputMetrics): Unit = {
assert(metrics1.bytesRead === metrics2.bytesRead)
}
private def assertEquals(result1: JobResult, result2: JobResult): Unit = {
(result1, result2) match {
case (JobSucceeded, JobSucceeded) =>
case (r1: JobFailed, r2: JobFailed) =>
assertEquals(r1.exception, r2.exception)
case _ => fail("Job results don't match in types!")
}
}
private def assertEquals(reason1: TaskEndReason, reason2: TaskEndReason): Unit = {
(reason1, reason2) match {
case (Success, Success) =>
case (Resubmitted, Resubmitted) =>
case (r1: FetchFailed, r2: FetchFailed) =>
assert(r1.shuffleId === r2.shuffleId)
assert(r1.mapId === r2.mapId)
assert(r1.mapIndex === r2.mapIndex)
assert(r1.reduceId === r2.reduceId)
assert(r1.bmAddress === r2.bmAddress)
assert(r1.message === r2.message)
case (r1: ExceptionFailure, r2: ExceptionFailure) =>
assert(r1.className === r2.className)
assert(r1.description === r2.description)
assertSeqEquals(r1.stackTrace, r2.stackTrace, assertStackTraceElementEquals)
assert(r1.fullStackTrace === r2.fullStackTrace)
assertSeqEquals[AccumulableInfo](r1.accumUpdates, r2.accumUpdates, (a, b) => a.equals(b))
case (TaskResultLost, TaskResultLost) =>
case (r1: TaskKilled, r2: TaskKilled) =>
assert(r1.reason == r2.reason)
case (TaskCommitDenied(jobId1, partitionId1, attemptNumber1),
TaskCommitDenied(jobId2, partitionId2, attemptNumber2)) =>
assert(jobId1 === jobId2)
assert(partitionId1 === partitionId2)
assert(attemptNumber1 === attemptNumber2)
case (ExecutorLostFailure(execId1, exit1CausedByApp, reason1),
ExecutorLostFailure(execId2, exit2CausedByApp, reason2)) =>
assert(execId1 === execId2)
assert(exit1CausedByApp === exit2CausedByApp)
assert(reason1 === reason2)
case (UnknownReason, UnknownReason) =>
case _ => fail("Task end reasons don't match in types!")
}
}
private def assertEquals(
details1: Map[String, Seq[(String, String)]],
details2: Map[String, Seq[(String, String)]]): Unit = {
details1.zip(details2).foreach {
case ((key1, values1: Seq[(String, String)]), (key2, values2: Seq[(String, String)])) =>
assert(key1 === key2)
values1.zip(values2).foreach { case (v1, v2) => assert(v1 === v2) }
}
}
private def assertEquals(exception1: Exception, exception2: Exception): Unit = {
assert(exception1.getMessage === exception2.getMessage)
assertSeqEquals(
exception1.getStackTrace,
exception2.getStackTrace,
assertStackTraceElementEquals)
}
private def assertEquals(metrics1: ExecutorMetrics, metrics2: ExecutorMetrics): Unit = {
ExecutorMetricType.metricToOffset.foreach { metric =>
assert(metrics1.getMetricValue(metric._1) === metrics2.getMetricValue(metric._1))
}
}
private def assertJsonStringEquals(expected: String, actual: String, metadata: String): Unit = {
val expectedJson = parse(expected)
val actualJson = parse(actual)
if (expectedJson != actualJson) {
// scalastyle:off
// This prints something useful if the JSON strings don't match
println(s"=== EXPECTED ===\n${pretty(expectedJson)}\n")
println(s"=== ACTUAL ===\n${pretty(actualJson)}\n")
// scalastyle:on
throw new TestFailedException(s"$metadata JSON did not equal", 1)
}
}
private def assertSeqEquals[T](seq1: Seq[T], seq2: Seq[T], assertEquals: (T, T) => Unit): Unit = {
assert(seq1.length === seq2.length)
seq1.zip(seq2).foreach { case (t1, t2) =>
assertEquals(t1, t2)
}
}
private def assertOptionEquals[T](
opt1: Option[T],
opt2: Option[T],
assertEquals: (T, T) => Unit): Unit = {
if (opt1.isDefined) {
assert(opt2.isDefined)
assertEquals(opt1.get, opt2.get)
} else {
assert(!opt2.isDefined)
}
}
/**
* Use different names for methods we pass in to assertSeqEquals or assertOptionEquals
*/
private def assertBlocksEquals(
blocks1: Seq[(BlockId, BlockStatus)],
blocks2: Seq[(BlockId, BlockStatus)]) = {
assertSeqEquals(blocks1, blocks2, assertBlockEquals)
}
private def assertBlockEquals(b1: (BlockId, BlockStatus), b2: (BlockId, BlockStatus)): Unit = {
assert(b1 === b2)
}
private def assertStackTraceElementEquals(ste1: StackTraceElement,
ste2: StackTraceElement): Unit = {
// This mimics the equals() method from Java 8 and earlier. Java 9 adds checks for
// class loader and module, which will cause them to be not equal, when we don't
// care about those
assert(ste1.getClassName === ste2.getClassName)
assert(ste1.getMethodName === ste2.getMethodName)
assert(ste1.getLineNumber === ste2.getLineNumber)
assert(ste1.getFileName === ste2.getFileName)
}
private def assertEquals(rp1: ResourceProfile, rp2: ResourceProfile): Unit = {
assert(rp1 === rp2)
}
/** ----------------------------------- *
| Util methods for constructing events |
* ------------------------------------ */
private val properties = {
val p = new Properties
p.setProperty("Ukraine", "Kiev")
p.setProperty("Russia", "Moscow")
p.setProperty("France", "Paris")
p.setProperty("Germany", "Berlin")
p
}
private val stackTrace = {
Array[StackTraceElement](
new StackTraceElement("Apollo", "Venus", "Mercury", 42),
new StackTraceElement("Afollo", "Vemus", "Mercurry", 420),
new StackTraceElement("Ayollo", "Vesus", "Blackberry", 4200)
)
}
private def makeRddInfo(a: Int, b: Int, c: Int, d: Long, e: Long) = {
val r =
new RDDInfo(a, "mayor", b, StorageLevel.MEMORY_AND_DISK, false, Seq(1, 4, 7), a.toString)
r.numCachedPartitions = c
r.memSize = d
r.diskSize = e
r
}
private def makeStageInfo(
a: Int,
b: Int,
c: Int,
d: Long,
e: Long,
rpId: Int = ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID) = {
val rddInfos = (0 until a % 5).map { i => makeRddInfo(a + i, b + i, c + i, d + i, e + i) }
val stageInfo = new StageInfo(a, 0, "greetings", b, rddInfos, Seq(100, 200, 300), "details",
resourceProfileId = rpId)
val (acc1, acc2) = (makeAccumulableInfo(1), makeAccumulableInfo(2))
stageInfo.accumulables(acc1.id) = acc1
stageInfo.accumulables(acc2.id) = acc2
stageInfo
}
private def makeTaskInfo(a: Long, b: Int, c: Int, d: Long, speculative: Boolean) = {
val taskInfo = new TaskInfo(a, b, c, d, "executor", "your kind sir", TaskLocality.NODE_LOCAL,
speculative)
taskInfo.setAccumulables(
List(makeAccumulableInfo(1), makeAccumulableInfo(2), makeAccumulableInfo(3, internal = true)))
taskInfo
}
private def makeAccumulableInfo(
id: Int,
internal: Boolean = false,
countFailedValues: Boolean = false,
metadata: Option[String] = None): AccumulableInfo =
new AccumulableInfo(id, Some(s"Accumulable$id"), Some(s"delta$id"), Some(s"val$id"),
internal, countFailedValues, metadata)
/** Creates an SparkListenerExecutorMetricsUpdate event */
private def makeExecutorMetricsUpdate(
execId: String,
includeTaskMetrics: Boolean,
includeExecutorMetrics: Boolean): SparkListenerExecutorMetricsUpdate = {
val taskMetrics =
if (includeTaskMetrics) {
Seq((1L, 1, 1, Seq(makeAccumulableInfo(1, false, false, None),
makeAccumulableInfo(2, false, false, None))))
} else {
Seq()
}
val executorMetricsUpdate: Map[(Int, Int), ExecutorMetrics] =
if (includeExecutorMetrics) {
Map((0, 0) -> new ExecutorMetrics(Array(123456L, 543L, 0L, 0L, 0L, 0L, 0L,
0L, 0L, 0L, 256912L, 123456L, 123456L, 61728L, 30364L, 15182L, 10L, 90L, 2L, 20L)))
} else {
Map.empty
}
SparkListenerExecutorMetricsUpdate(execId, taskMetrics, executorMetricsUpdate)
}
/**
* Creates a TaskMetrics object describing a task that read data from Hadoop (if hasHadoopInput is
* set to true) or read data from a shuffle otherwise.
*/
private def makeTaskMetrics(
a: Long,
b: Long,
c: Long,
d: Long,
e: Int,
f: Int,
hasHadoopInput: Boolean,
hasOutput: Boolean,
hasRecords: Boolean = true) = {
val t = TaskMetrics.registered
// Set CPU times same as wall times for testing purpose
t.setExecutorDeserializeTime(a)
t.setExecutorDeserializeCpuTime(a)
t.setExecutorRunTime(b)
t.setExecutorCpuTime(b)
t.setPeakExecutionMemory(c)
t.setResultSize(c)
t.setJvmGCTime(d)
t.setResultSerializationTime(a + b)
t.incMemoryBytesSpilled(a + c)
if (hasHadoopInput) {
val inputMetrics = t.inputMetrics
inputMetrics.setBytesRead(d + e + f)
inputMetrics.incRecordsRead(if (hasRecords) (d + e + f) / 100 else -1)
} else {
val sr = t.createTempShuffleReadMetrics()
sr.incRemoteBytesRead(b + d)
sr.incRemoteBytesReadToDisk(b)
sr.incLocalBlocksFetched(e)
sr.incFetchWaitTime(a + d)
sr.incRemoteBlocksFetched(f)
sr.incRecordsRead(if (hasRecords) (b + d) / 100 else -1)
sr.incLocalBytesRead(a + f)
t.mergeShuffleReadMetrics()
}
if (hasOutput) {
t.outputMetrics.setBytesWritten(a + b + c)
t.outputMetrics.setRecordsWritten(if (hasRecords) (a + b + c) / 100 else -1)
} else {
val sw = t.shuffleWriteMetrics
sw.incBytesWritten(a + b + c)
sw.incWriteTime(b + c + d)
sw.incRecordsWritten(if (hasRecords) (a + b + c) / 100 else -1)
}
// Make at most 6 blocks
t.setUpdatedBlockStatuses((1 to (e % 5 + 1)).map { i =>
(RDDBlockId(e % i, f % i), BlockStatus(StorageLevel.MEMORY_AND_DISK_SER_2, a % i, b % i))
}.toSeq)
t
}
/** --------------------------------------- *
| JSON string representation of each event |
* ---------------------------------------- */
private val stageSubmittedJsonString =
"""
|{
| "Event": "SparkListenerStageSubmitted",
| "Stage Info": {
| "Stage ID": 100,
| "Stage Attempt ID": 0,
| "Stage Name": "greetings",
| "Number of Tasks": 200,
| "RDD Info": [],
| "Parent IDs" : [100, 200, 300],
| "Details": "details",
| "Accumulables": [
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| }
| ],
| "Resource Profile Id" : 0
| },
| "Properties": {
| "France": "Paris",
| "Germany": "Berlin",
| "Russia": "Moscow",
| "Ukraine": "Kiev"
| }
|}
""".stripMargin
private val stageCompletedJsonString =
"""
|{
| "Event": "SparkListenerStageCompleted",
| "Stage Info": {
| "Stage ID": 101,
| "Stage Attempt ID": 0,
| "Stage Name": "greetings",
| "Number of Tasks": 201,
| "RDD Info": [
| {
| "RDD ID": 101,
| "Name": "mayor",
| "Callsite": "101",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 201,
| "Number of Cached Partitions": 301,
| "Memory Size": 401,
| "Disk Size": 501
| }
| ],
| "Parent IDs" : [100, 200, 300],
| "Details": "details",
| "Accumulables": [
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| }
| ],
| "Resource Profile Id" : 0
| }
|}
""".stripMargin
private val taskStartJsonString =
"""
|{
| "Event": "SparkListenerTaskStart",
| "Stage ID": 111,
| "Stage Attempt ID": 0,
| "Task Info": {
| "Task ID": 222,
| "Index": 333,
| "Attempt": 1,
| "Launch Time": 444,
| "Executor ID": "executor",
| "Host": "your kind sir",
| "Locality": "NODE_LOCAL",
| "Speculative": false,
| "Getting Result Time": 0,
| "Finish Time": 0,
| "Failed": false,
| "Killed": false,
| "Accumulables": [
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 3,
| "Name": "Accumulable3",
| "Update": "delta3",
| "Value": "val3",
| "Internal": true,
| "Count Failed Values": false
| }
| ]
| }
|}
""".stripMargin
private val taskGettingResultJsonString =
"""
|{
| "Event": "SparkListenerTaskGettingResult",
| "Task Info": {
| "Task ID": 1000,
| "Index": 2000,
| "Attempt": 5,
| "Launch Time": 3000,
| "Executor ID": "executor",
| "Host": "your kind sir",
| "Locality": "NODE_LOCAL",
| "Speculative": true,
| "Getting Result Time": 0,
| "Finish Time": 0,
| "Failed": false,
| "Killed": false,
| "Accumulables": [
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 3,
| "Name": "Accumulable3",
| "Update": "delta3",
| "Value": "val3",
| "Internal": true,
| "Count Failed Values": false
| }
| ]
| }
|}
""".stripMargin
private val taskEndJsonString =
"""
|{
| "Event": "SparkListenerTaskEnd",
| "Stage ID": 1,
| "Stage Attempt ID": 0,
| "Task Type": "ShuffleMapTask",
| "Task End Reason": {
| "Reason": "Success"
| },
| "Task Info": {
| "Task ID": 123,
| "Index": 234,
| "Attempt": 67,
| "Launch Time": 345,
| "Executor ID": "executor",
| "Host": "your kind sir",
| "Locality": "NODE_LOCAL",
| "Speculative": false,
| "Getting Result Time": 0,
| "Finish Time": 0,
| "Failed": false,
| "Killed": false,
| "Accumulables": [
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 3,
| "Name": "Accumulable3",
| "Update": "delta3",
| "Value": "val3",
| "Internal": true,
| "Count Failed Values": false
| }
| ]
| },
| "Task Executor Metrics" : {
| "JVMHeapMemory" : 543,
| "JVMOffHeapMemory" : 123456,
| "OnHeapExecutionMemory" : 12345,
| "OffHeapExecutionMemory" : 1234,
| "OnHeapStorageMemory" : 123,
| "OffHeapStorageMemory" : 12,
| "OnHeapUnifiedMemory" : 432,
| "OffHeapUnifiedMemory" : 321,
| "DirectPoolMemory" : 654,
| "MappedPoolMemory" : 765,
| "ProcessTreeJVMVMemory": 256912,
| "ProcessTreeJVMRSSMemory": 123456,
| "ProcessTreePythonVMemory": 123456,
| "ProcessTreePythonRSSMemory": 61728,
| "ProcessTreeOtherVMemory": 30364,
| "ProcessTreeOtherRSSMemory": 15182,
| "MinorGCCount" : 0,
| "MinorGCTime" : 0,
| "MajorGCCount" : 0,
| "MajorGCTime" : 0
| },
| "Task Metrics": {
| "Executor Deserialize Time": 300,
| "Executor Deserialize CPU Time": 300,
| "Executor Run Time": 400,
| "Executor CPU Time": 400,
| "Peak Execution Memory": 500,
| "Result Size": 500,
| "JVM GC Time": 600,
| "Result Serialization Time": 700,
| "Memory Bytes Spilled": 800,
| "Disk Bytes Spilled": 0,
| "Shuffle Read Metrics": {
| "Remote Blocks Fetched": 800,
| "Local Blocks Fetched": 700,
| "Fetch Wait Time": 900,
| "Remote Bytes Read": 1000,
| "Remote Bytes Read To Disk": 400,
| "Local Bytes Read": 1100,
| "Total Records Read": 10
| },
| "Shuffle Write Metrics": {
| "Shuffle Bytes Written": 1200,
| "Shuffle Write Time": 1500,
| "Shuffle Records Written": 12
| },
| "Input Metrics" : {
| "Bytes Read" : 0,
| "Records Read" : 0
| },
| "Output Metrics" : {
| "Bytes Written" : 0,
| "Records Written" : 0
| },
| "Updated Blocks": [
| {
| "Block ID": "rdd_0_0",
| "Status": {
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": false,
| "Replication": 2
| },
| "Memory Size": 0,
| "Disk Size": 0
| }
| }
| ]
| }
|}
""".stripMargin
private val taskEndWithHadoopInputJsonString =
"""
|{
| "Event": "SparkListenerTaskEnd",
| "Stage ID": 1,
| "Stage Attempt ID": 0,
| "Task Type": "ShuffleMapTask",
| "Task End Reason": {
| "Reason": "Success"
| },
| "Task Info": {
| "Task ID": 123,
| "Index": 234,
| "Attempt": 67,
| "Launch Time": 345,
| "Executor ID": "executor",
| "Host": "your kind sir",
| "Locality": "NODE_LOCAL",
| "Speculative": false,
| "Getting Result Time": 0,
| "Finish Time": 0,
| "Failed": false,
| "Killed": false,
| "Accumulables": [
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 3,
| "Name": "Accumulable3",
| "Update": "delta3",
| "Value": "val3",
| "Internal": true,
| "Count Failed Values": false
| }
| ]
| },
| "Task Executor Metrics" : {
| "JVMHeapMemory" : 543,
| "JVMOffHeapMemory" : 123456,
| "OnHeapExecutionMemory" : 12345,
| "OffHeapExecutionMemory" : 1234,
| "OnHeapStorageMemory" : 123,
| "OffHeapStorageMemory" : 12,
| "OnHeapUnifiedMemory" : 432,
| "OffHeapUnifiedMemory" : 321,
| "DirectPoolMemory" : 654,
| "MappedPoolMemory" : 765,
| "ProcessTreeJVMVMemory": 256912,
| "ProcessTreeJVMRSSMemory": 123456,
| "ProcessTreePythonVMemory": 123456,
| "ProcessTreePythonRSSMemory": 61728,
| "ProcessTreeOtherVMemory": 30364,
| "ProcessTreeOtherRSSMemory": 15182,
| "MinorGCCount" : 0,
| "MinorGCTime" : 0,
| "MajorGCCount" : 0,
| "MajorGCTime" : 0
| },
| "Task Metrics": {
| "Executor Deserialize Time": 300,
| "Executor Deserialize CPU Time": 300,
| "Executor Run Time": 400,
| "Executor CPU Time": 400,
| "Peak Execution Memory": 500,
| "Result Size": 500,
| "JVM GC Time": 600,
| "Result Serialization Time": 700,
| "Memory Bytes Spilled": 800,
| "Disk Bytes Spilled": 0,
| "Shuffle Read Metrics" : {
| "Remote Blocks Fetched" : 0,
| "Local Blocks Fetched" : 0,
| "Fetch Wait Time" : 0,
| "Remote Bytes Read" : 0,
| "Remote Bytes Read To Disk" : 0,
| "Local Bytes Read" : 0,
| "Total Records Read" : 0
| },
| "Shuffle Write Metrics": {
| "Shuffle Bytes Written": 1200,
| "Shuffle Write Time": 1500,
| "Shuffle Records Written": 12
| },
| "Input Metrics": {
| "Bytes Read": 2100,
| "Records Read": 21
| },
| "Output Metrics" : {
| "Bytes Written" : 0,
| "Records Written" : 0
| },
| "Updated Blocks": [
| {
| "Block ID": "rdd_0_0",
| "Status": {
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": false,
| "Replication": 2
| },
| "Memory Size": 0,
| "Disk Size": 0
| }
| }
| ]
| }
|}
""".stripMargin
private val taskEndWithOutputJsonString =
"""
|{
| "Event": "SparkListenerTaskEnd",
| "Stage ID": 1,
| "Stage Attempt ID": 0,
| "Task Type": "ResultTask",
| "Task End Reason": {
| "Reason": "Success"
| },
| "Task Info": {
| "Task ID": 123,
| "Index": 234,
| "Attempt": 67,
| "Launch Time": 345,
| "Executor ID": "executor",
| "Host": "your kind sir",
| "Locality": "NODE_LOCAL",
| "Speculative": false,
| "Getting Result Time": 0,
| "Finish Time": 0,
| "Failed": false,
| "Killed": false,
| "Accumulables": [
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 3,
| "Name": "Accumulable3",
| "Update": "delta3",
| "Value": "val3",
| "Internal": true,
| "Count Failed Values": false
| }
| ]
| },
| "Task Executor Metrics" : {
| "JVMHeapMemory" : 543,
| "JVMOffHeapMemory" : 123456,
| "OnHeapExecutionMemory" : 12345,
| "OffHeapExecutionMemory" : 1234,
| "OnHeapStorageMemory" : 123,
| "OffHeapStorageMemory" : 12,
| "OnHeapUnifiedMemory" : 432,
| "OffHeapUnifiedMemory" : 321,
| "DirectPoolMemory" : 654,
| "MappedPoolMemory" : 765,
| "ProcessTreeJVMVMemory": 256912,
| "ProcessTreeJVMRSSMemory": 123456,
| "ProcessTreePythonVMemory": 123456,
| "ProcessTreePythonRSSMemory": 61728,
| "ProcessTreeOtherVMemory": 30364,
| "ProcessTreeOtherRSSMemory": 15182,
| "MinorGCCount" : 0,
| "MinorGCTime" : 0,
| "MajorGCCount" : 0,
| "MajorGCTime" : 0
| },
| "Task Metrics": {
| "Executor Deserialize Time": 300,
| "Executor Deserialize CPU Time": 300,
| "Executor Run Time": 400,
| "Executor CPU Time": 400,
| "Peak Execution Memory": 500,
| "Result Size": 500,
| "JVM GC Time": 600,
| "Result Serialization Time": 700,
| "Memory Bytes Spilled": 800,
| "Disk Bytes Spilled": 0,
| "Shuffle Read Metrics" : {
| "Remote Blocks Fetched" : 0,
| "Local Blocks Fetched" : 0,
| "Fetch Wait Time" : 0,
| "Remote Bytes Read" : 0,
| "Remote Bytes Read To Disk" : 0,
| "Local Bytes Read" : 0,
| "Total Records Read" : 0
| },
| "Shuffle Write Metrics": {
| "Shuffle Bytes Written" : 0,
| "Shuffle Write Time" : 0,
| "Shuffle Records Written" : 0
| },
| "Input Metrics": {
| "Bytes Read": 2100,
| "Records Read": 21
| },
| "Output Metrics": {
| "Bytes Written": 1200,
| "Records Written": 12
| },
| "Updated Blocks": [
| {
| "Block ID": "rdd_0_0",
| "Status": {
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": false,
| "Replication": 2
| },
| "Memory Size": 0,
| "Disk Size": 0
| }
| }
| ]
| }
|}
""".stripMargin
private val jobStartJsonString =
"""
|{
| "Event": "SparkListenerJobStart",
| "Job ID": 10,
| "Submission Time": 1421191042750,
| "Stage Infos": [
| {
| "Stage ID": 1,
| "Stage Attempt ID": 0,
| "Stage Name": "greetings",
| "Number of Tasks": 200,
| "RDD Info": [
| {
| "RDD ID": 1,
| "Name": "mayor",
| "Callsite": "1",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 200,
| "Number of Cached Partitions": 300,
| "Memory Size": 400,
| "Disk Size": 500
| }
| ],
| "Parent IDs" : [100, 200, 300],
| "Details": "details",
| "Accumulables": [
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| }
| ],
| "Resource Profile Id" : 0
| },
| {
| "Stage ID": 2,
| "Stage Attempt ID": 0,
| "Stage Name": "greetings",
| "Number of Tasks": 400,
| "RDD Info": [
| {
| "RDD ID": 2,
| "Name": "mayor",
| "Callsite": "2",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 400,
| "Number of Cached Partitions": 600,
| "Memory Size": 800,
| "Disk Size": 1000
| },
| {
| "RDD ID": 3,
| "Name": "mayor",
| "Callsite": "3",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 401,
| "Number of Cached Partitions": 601,
| "Memory Size": 801,
| "Disk Size": 1001
| }
| ],
| "Parent IDs" : [100, 200, 300],
| "Details": "details",
| "Accumulables": [
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| }
| ],
| "Resource Profile Id" : 0
| },
| {
| "Stage ID": 3,
| "Stage Attempt ID": 0,
| "Stage Name": "greetings",
| "Number of Tasks": 600,
| "RDD Info": [
| {
| "RDD ID": 3,
| "Name": "mayor",
| "Callsite": "3",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 600,
| "Number of Cached Partitions": 900,
| "Memory Size": 1200,
| "Disk Size": 1500
| },
| {
| "RDD ID": 4,
| "Name": "mayor",
| "Callsite": "4",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 601,
| "Number of Cached Partitions": 901,
| "Memory Size": 1201,
| "Disk Size": 1501
| },
| {
| "RDD ID": 5,
| "Name": "mayor",
| "Callsite": "5",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 602,
| "Number of Cached Partitions": 902,
| "Memory Size": 1202,
| "Disk Size": 1502
| }
| ],
| "Parent IDs" : [100, 200, 300],
| "Details": "details",
| "Accumulables": [
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| }
| ],
| "Resource Profile Id" : 0
| },
| {
| "Stage ID": 4,
| "Stage Attempt ID": 0,
| "Stage Name": "greetings",
| "Number of Tasks": 800,
| "RDD Info": [
| {
| "RDD ID": 4,
| "Name": "mayor",
| "Callsite": "4",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 800,
| "Number of Cached Partitions": 1200,
| "Memory Size": 1600,
| "Disk Size": 2000
| },
| {
| "RDD ID": 5,
| "Name": "mayor",
| "Callsite": "5",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 801,
| "Number of Cached Partitions": 1201,
| "Memory Size": 1601,
| "Disk Size": 2001
| },
| {
| "RDD ID": 6,
| "Name": "mayor",
| "Callsite": "6",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 802,
| "Number of Cached Partitions": 1202,
| "Memory Size": 1602,
| "Disk Size": 2002
| },
| {
| "RDD ID": 7,
| "Name": "mayor",
| "Callsite": "7",
| "Parent IDs": [1, 4, 7],
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Barrier" : false,
| "Number of Partitions": 803,
| "Number of Cached Partitions": 1203,
| "Memory Size": 1603,
| "Disk Size": 2003
| }
| ],
| "Parent IDs" : [100, 200, 300],
| "Details": "details",
| "Accumulables": [
| {
| "ID": 2,
| "Name": "Accumulable2",
| "Update": "delta2",
| "Value": "val2",
| "Internal": false,
| "Count Failed Values": false
| },
| {
| "ID": 1,
| "Name": "Accumulable1",
| "Update": "delta1",
| "Value": "val1",
| "Internal": false,
| "Count Failed Values": false
| }
| ],
| "Resource Profile Id" : 0
| }
| ],
| "Stage IDs": [
| 1,
| 2,
| 3,
| 4
| ],
| "Properties": {
| "France": "Paris",
| "Germany": "Berlin",
| "Russia": "Moscow",
| "Ukraine": "Kiev"
| }
|}
""".stripMargin
private val jobEndJsonString =
"""
|{
| "Event": "SparkListenerJobEnd",
| "Job ID": 20,
| "Completion Time": 1421191296660,
| "Job Result": {
| "Result": "JobSucceeded"
| }
|}
""".stripMargin
private val environmentUpdateJsonString =
"""
|{
| "Event": "SparkListenerEnvironmentUpdate",
| "JVM Information": {
| "GC speed": "9999 objects/s",
| "Java home": "Land of coffee"
| },
| "Spark Properties": {
| "Job throughput": "80000 jobs/s, regardless of job type"
| },
| "Hadoop Properties": {
| "hadoop.tmp.dir": "/usr/local/hadoop/tmp"
| },
| "System Properties": {
| "Username": "guest",
| "Password": "guest"
| },
| "Classpath Entries": {
| "Super library": "/tmp/super_library"
| }
|}
""".stripMargin
private val blockManagerAddedJsonString =
"""
|{
| "Event": "SparkListenerBlockManagerAdded",
| "Block Manager ID": {
| "Executor ID": "Stars",
| "Host": "In your multitude...",
| "Port": 300
| },
| "Maximum Memory": 500,
| "Timestamp": 1
|}
""".stripMargin
private val blockManagerRemovedJsonString =
"""
|{
| "Event": "SparkListenerBlockManagerRemoved",
| "Block Manager ID": {
| "Executor ID": "Scarce",
| "Host": "to be counted...",
| "Port": 100
| },
| "Timestamp": 2
|}
""".stripMargin
private val unpersistRDDJsonString =
"""
|{
| "Event": "SparkListenerUnpersistRDD",
| "RDD ID": 12345
|}
""".stripMargin
private val applicationStartJsonString =
"""
|{
| "Event": "SparkListenerApplicationStart",
| "App Name": "The winner of all",
| "App ID": "appId",
| "Timestamp": 42,
| "User": "Garfield",
| "App Attempt ID": "appAttempt"
|}
""".stripMargin
private val applicationStartJsonWithLogUrlsString =
"""
|{
| "Event": "SparkListenerApplicationStart",
| "App Name": "The winner of all",
| "App ID": "appId",
| "Timestamp": 42,
| "User": "Garfield",
| "App Attempt ID": "appAttempt",
| "Driver Logs" : {
| "stderr" : "mystderr",
| "stdout" : "mystdout"
| }
|}
""".stripMargin
private val applicationEndJsonString =
"""
|{
| "Event": "SparkListenerApplicationEnd",
| "Timestamp": 42
|}
""".stripMargin
private val executorAddedJsonString =
s"""
|{
| "Event": "SparkListenerExecutorAdded",
| "Timestamp": ${executorAddedTime},
| "Executor ID": "exec1",
| "Executor Info": {
| "Host": "Hostee.awesome.com",
| "Total Cores": 11,
| "Log Urls" : {
| "stderr" : "mystderr",
| "stdout" : "mystdout"
| },
| "Attributes" : {
| "ContainerId" : "ct1",
| "User" : "spark"
| },
| "Resources" : {
| "gpu" : {
| "name" : "gpu",
| "addresses" : [ "0", "1" ]
| }
| },
| "Resource Profile Id": 4
| }
|}
""".stripMargin
private val executorRemovedJsonString =
s"""
|{
| "Event": "SparkListenerExecutorRemoved",
| "Timestamp": ${executorRemovedTime},
| "Executor ID": "exec2",
| "Removed Reason": "test reason"
|}
""".stripMargin
private val executorMetricsUpdateJsonString =
s"""
|{
| "Event": "SparkListenerExecutorMetricsUpdate",
| "Executor ID": "exec3",
| "Metrics Updated": [
| {
| "Task ID": 1,
| "Stage ID": 2,
| "Stage Attempt ID": 3,
| "Accumulator Updates": [
| {
| "ID": 0,
| "Name": "$EXECUTOR_DESERIALIZE_TIME",
| "Update": 300,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 1,
| "Name": "$EXECUTOR_DESERIALIZE_CPU_TIME",
| "Update": 300,
| "Internal": true,
| "Count Failed Values": true
| },
|
| {
| "ID": 2,
| "Name": "$EXECUTOR_RUN_TIME",
| "Update": 400,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 3,
| "Name": "$EXECUTOR_CPU_TIME",
| "Update": 400,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 4,
| "Name": "$RESULT_SIZE",
| "Update": 500,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 5,
| "Name": "$JVM_GC_TIME",
| "Update": 600,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 6,
| "Name": "$RESULT_SERIALIZATION_TIME",
| "Update": 700,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 7,
| "Name": "$MEMORY_BYTES_SPILLED",
| "Update": 800,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 8,
| "Name": "$DISK_BYTES_SPILLED",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 9,
| "Name": "$PEAK_EXECUTION_MEMORY",
| "Update": 500,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 10,
| "Name": "$UPDATED_BLOCK_STATUSES",
| "Update": [
| {
| "Block ID": "rdd_0_0",
| "Status": {
| "Storage Level": {
| "Use Disk": true,
| "Use Memory": true,
| "Deserialized": false,
| "Replication": 2
| },
| "Memory Size": 0,
| "Disk Size": 0
| }
| }
| ],
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 11,
| "Name": "${shuffleRead.REMOTE_BLOCKS_FETCHED}",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 12,
| "Name": "${shuffleRead.LOCAL_BLOCKS_FETCHED}",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 13,
| "Name": "${shuffleRead.REMOTE_BYTES_READ}",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 14,
| "Name": "${shuffleRead.REMOTE_BYTES_READ_TO_DISK}",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 15,
| "Name": "${shuffleRead.LOCAL_BYTES_READ}",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 16,
| "Name": "${shuffleRead.FETCH_WAIT_TIME}",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 17,
| "Name": "${shuffleRead.RECORDS_READ}",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 18,
| "Name": "${shuffleWrite.BYTES_WRITTEN}",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 19,
| "Name": "${shuffleWrite.RECORDS_WRITTEN}",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 20,
| "Name": "${shuffleWrite.WRITE_TIME}",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 21,
| "Name": "${input.BYTES_READ}",
| "Update": 2100,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 22,
| "Name": "${input.RECORDS_READ}",
| "Update": 21,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 23,
| "Name": "${output.BYTES_WRITTEN}",
| "Update": 1200,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 24,
| "Name": "${output.RECORDS_WRITTEN}",
| "Update": 12,
| "Internal": true,
| "Count Failed Values": true
| },
| {
| "ID": 25,
| "Name": "$TEST_ACCUM",
| "Update": 0,
| "Internal": true,
| "Count Failed Values": true
| }
| ]
| }
| ],
| "Executor Metrics Updated" : [
| {
| "Stage ID" : 0,
| "Stage Attempt ID" : 0,
| "Executor Metrics" : {
| "JVMHeapMemory" : 543,
| "JVMOffHeapMemory" : 123456,
| "OnHeapExecutionMemory" : 12345,
| "OffHeapExecutionMemory" : 1234,
| "OnHeapStorageMemory" : 123,
| "OffHeapStorageMemory" : 12,
| "OnHeapUnifiedMemory" : 432,
| "OffHeapUnifiedMemory" : 321,
| "DirectPoolMemory" : 654,
| "MappedPoolMemory" : 765,
| "ProcessTreeJVMVMemory": 256912,
| "ProcessTreeJVMRSSMemory": 123456,
| "ProcessTreePythonVMemory": 123456,
| "ProcessTreePythonRSSMemory": 61728,
| "ProcessTreeOtherVMemory": 30364,
| "ProcessTreeOtherRSSMemory": 15182,
| "MinorGCCount": 10,
| "MinorGCTime": 90,
| "MajorGCCount": 2,
| "MajorGCTime": 20
| }
| }
| ]
|}
""".stripMargin
private val stageExecutorMetricsJsonString =
"""
|{
| "Event": "SparkListenerStageExecutorMetrics",
| "Executor ID": "1",
| "Stage ID": 2,
| "Stage Attempt ID": 3,
| "Executor Metrics" : {
| "JVMHeapMemory" : 543,
| "JVMOffHeapMemory" : 123456,
| "OnHeapExecutionMemory" : 12345,
| "OffHeapExecutionMemory" : 1234,
| "OnHeapStorageMemory" : 123,
| "OffHeapStorageMemory" : 12,
| "OnHeapUnifiedMemory" : 432,
| "OffHeapUnifiedMemory" : 321,
| "DirectPoolMemory" : 654,
| "MappedPoolMemory" : 765,
| "ProcessTreeJVMVMemory": 256912,
| "ProcessTreeJVMRSSMemory": 123456,
| "ProcessTreePythonVMemory": 123456,
| "ProcessTreePythonRSSMemory": 61728,
| "ProcessTreeOtherVMemory": 30364,
| "ProcessTreeOtherRSSMemory": 15182,
| "MinorGCCount": 10,
| "MinorGCTime": 90,
| "MajorGCCount": 2,
| "MajorGCTime": 20
| }
|}
""".stripMargin
private val blockUpdatedJsonString =
"""
|{
| "Event": "SparkListenerBlockUpdated",
| "Block Updated Info": {
| "Block Manager ID": {
| "Executor ID": "Stars",
| "Host": "In your multitude...",
| "Port": 300
| },
| "Block ID": "rdd_0_0",
| "Storage Level": {
| "Use Disk": false,
| "Use Memory": true,
| "Deserialized": true,
| "Replication": 1
| },
| "Memory Size": 100,
| "Disk Size": 0
| }
|}
""".stripMargin
private val executorBlacklistedJsonString =
s"""
|{
| "Event" : "org.apache.spark.scheduler.SparkListenerExecutorBlacklisted",
| "time" : ${executorBlacklistedTime},
| "executorId" : "exec1",
| "taskFailures" : 22
|}
""".stripMargin
private val executorUnblacklistedJsonString =
s"""
|{
| "Event" : "org.apache.spark.scheduler.SparkListenerExecutorUnblacklisted",
| "time" : ${executorUnblacklistedTime},
| "executorId" : "exec1"
|}
""".stripMargin
private val nodeBlacklistedJsonString =
s"""
|{
| "Event" : "org.apache.spark.scheduler.SparkListenerNodeBlacklisted",
| "time" : ${nodeBlacklistedTime},
| "hostId" : "node1",
| "executorFailures" : 33
|}
""".stripMargin
private val nodeUnblacklistedJsonString =
s"""
|{
| "Event" : "org.apache.spark.scheduler.SparkListenerNodeUnblacklisted",
| "time" : ${nodeUnblacklistedTime},
| "hostId" : "node1"
|}
""".stripMargin
private val resourceProfileJsonString =
"""
|{
| "Event":"SparkListenerResourceProfileAdded",
| "Resource Profile Id":21,
| "Executor Resource Requests":{
| "cores" : {
| "Resource Name":"cores",
| "Amount":2,
| "Discovery Script":"",
| "Vendor":""
| },
| "gpu":{
| "Resource Name":"gpu",
| "Amount":2,
| "Discovery Script":"myscript",
| "Vendor":""
| }
| },
| "Task Resource Requests":{
| "cpus":{
| "Resource Name":"cpus",
| "Amount":1.0
| },
| "gpu":{
| "Resource Name":"gpu",
| "Amount":1.0
| }
| }
|}
""".stripMargin
}
case class TestListenerEvent(foo: String, bar: Int) extends SparkListenerEvent
|
ConeyLiu/spark
|
core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
|
Scala
|
apache-2.0
| 91,637 |
package com.jarhart.scat
import org.scalatest.FreeSpec
import org.scalatest.prop._
import shapeless._
class StackPrimitivesSpec extends FreeSpec with PropertyChecks with ArbitraryStacks {
import StackPrimitives._
"dup duplicates the top value on the stack" in {
forAll { (x: Int, stack: HList) =>
assert(
dup.run(x :: stack)._2 === x :: x :: stack
)
}
}
"swap swaps the top two values on the stack" in {
forAll { (x: Int, y: String, stack: HList) =>
assert(
swap.run(x :: y :: stack)._2 === y :: x :: stack
)
}
}
}
|
jarhart/scat
|
src/test/scala/com/jarhart/scat/StackPrimitivesSpec.scala
|
Scala
|
mit
| 587 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.integrationtest
import scala.collection.JavaConverters._
import io.fabric8.kubernetes.api.model.Pod
import org.scalatest.concurrent.Eventually
import org.scalatest.matchers.should.Matchers._
import org.apache.spark.{SparkFunSuite, TestUtils}
import org.apache.spark.launcher.SparkLauncher
private[spark] trait BasicTestsSuite { k8sSuite: KubernetesSuite =>
import BasicTestsSuite._
import KubernetesSuite.k8sTestTag
import KubernetesSuite.{TIMEOUT, INTERVAL}
test("Run SparkPi with no resources", k8sTestTag) {
runSparkPiAndVerifyCompletion()
}
test("Run SparkPi with no resources & statefulset allocation", k8sTestTag) {
sparkAppConf.set("spark.kubernetes.allocation.pods.allocator", "statefulset")
runSparkPiAndVerifyCompletion()
// Verify there is no dangling statefulset
// This depends on the garbage collection happening inside of K8s so give it some time.
Eventually.eventually(TIMEOUT, INTERVAL) {
val sets = kubernetesTestComponents.kubernetesClient.apps().statefulSets().list().getItems
val scalaSets = sets.asScala
scalaSets.size shouldBe (0)
}
}
test("Run SparkPi with a very long application name.", k8sTestTag) {
sparkAppConf.set("spark.app.name", "long" * 40)
runSparkPiAndVerifyCompletion()
}
test("Use SparkLauncher.NO_RESOURCE", k8sTestTag) {
sparkAppConf.setJars(Seq(containerLocalSparkDistroExamplesJar))
runSparkPiAndVerifyCompletion(
appResource = SparkLauncher.NO_RESOURCE)
}
test("Run SparkPi with a master URL without a scheme.", k8sTestTag) {
val url = kubernetesTestComponents.kubernetesClient.getMasterUrl
val k8sMasterUrl = if (url.getPort < 0) {
s"k8s://${url.getHost}"
} else {
s"k8s://${url.getHost}:${url.getPort}"
}
sparkAppConf.set("spark.master", k8sMasterUrl)
runSparkPiAndVerifyCompletion()
}
test("Run SparkPi with an argument.", k8sTestTag) {
// This additional configuration with snappy is for SPARK-26995
sparkAppConf
.set("spark.io.compression.codec", "snappy")
runSparkPiAndVerifyCompletion(appArgs = Array("5"))
}
test("Run SparkPi with custom labels, annotations, and environment variables.", k8sTestTag) {
sparkAppConf
.set("spark.kubernetes.driver.label.label1", "label1-value")
.set("spark.kubernetes.driver.label.label2", "label2-value")
.set("spark.kubernetes.driver.annotation.annotation1", "annotation1-value")
.set("spark.kubernetes.driver.annotation.annotation2", "annotation2-value")
.set("spark.kubernetes.driverEnv.ENV1", "VALUE1")
.set("spark.kubernetes.driverEnv.ENV2", "VALUE2")
.set("spark.kubernetes.executor.label.label1", "label1-value")
.set("spark.kubernetes.executor.label.label2", "label2-value")
.set("spark.kubernetes.executor.annotation.annotation1", "annotation1-value")
.set("spark.kubernetes.executor.annotation.annotation2", "annotation2-value")
.set("spark.executorEnv.ENV1", "VALUE1")
.set("spark.executorEnv.ENV2", "VALUE2")
runSparkPiAndVerifyCompletion(
driverPodChecker = (driverPod: Pod) => {
doBasicDriverPodCheck(driverPod)
checkCustomSettings(driverPod)
},
executorPodChecker = (executorPod: Pod) => {
doBasicExecutorPodCheck(executorPod)
checkCustomSettings(executorPod)
})
}
test("All pods have the same service account by default", k8sTestTag) {
runSparkPiAndVerifyCompletion(
executorPodChecker = (executorPod: Pod) => {
doExecutorServiceAccountCheck(executorPod, kubernetesTestComponents.serviceAccountName)
})
}
test("Run extraJVMOptions check on driver", k8sTestTag) {
sparkAppConf
.set("spark.driver.extraJavaOptions", "-Dspark.test.foo=spark.test.bar")
runSparkJVMCheckAndVerifyCompletion(
expectedJVMValue = Seq("(spark.test.foo,spark.test.bar)"))
}
test("Run SparkRemoteFileTest using a remote data file", k8sTestTag) {
assert(sys.props.contains("spark.test.home"), "spark.test.home is not set!")
TestUtils.withHttpServer(sys.props("spark.test.home")) { baseURL =>
sparkAppConf
.set("spark.files", baseURL.toString + REMOTE_PAGE_RANK_DATA_FILE)
runSparkRemoteCheckAndVerifyCompletion(appArgs = Array(REMOTE_PAGE_RANK_FILE_NAME))
}
}
}
private[spark] object BasicTestsSuite extends SparkFunSuite {
val SPARK_PAGE_RANK_MAIN_CLASS: String = "org.apache.spark.examples.SparkPageRank"
val CONTAINER_LOCAL_FILE_DOWNLOAD_PATH = "/var/spark-data/spark-files"
val CONTAINER_LOCAL_DOWNLOADED_PAGE_RANK_DATA_FILE =
s"$CONTAINER_LOCAL_FILE_DOWNLOAD_PATH/pagerank_data.txt"
val REMOTE_PAGE_RANK_DATA_FILE = getTestResourcePath("pagerank_data.txt")
val REMOTE_PAGE_RANK_FILE_NAME = "pagerank_data.txt"
}
|
WeichenXu123/spark
|
resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/BasicTestsSuite.scala
|
Scala
|
apache-2.0
| 5,636 |
package eva4s
package recombining
import language.higherKinds
import scala.util.Random
import scalaz.Functor
import scalaz.Length
import scalaz.Unzip
import scalaz.Zip
/** One point crossover randomly selects a crossover point and interchanges the two parents at this
* point to produce two new children.
*/
case class OnePointCrossover[F[_],A](implicit val fitness: Fitness[F[A]], F: Functor[F], L: Length[F], U: Unzip[F], Z: Zip[F])
extends CrossoverRecombinator[F[A]] {
override def recombine(g1: F[A], g2: F[A]): GenomeP[F[A]] = {
val size = L.length(g1)
val point = Random.nextInt(size - 1) + 1
var current = 0
val cs = Z.zipWith(g1,g2) { (gene1, gene2) ⇒
val p = if (current < point) (gene1,gene2) else (gene2,gene1)
current += 1
p
}
U.unzip(cs)
}
}
|
wookietreiber/eva4s-old
|
core/main/scala/recombining/OnePointCrossover.scala
|
Scala
|
gpl-3.0
| 820 |
/*
* Copyright 2022 Typelevel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.typelevel.sbt.kernel
import scala.util.Try
import scala.sys.process._
private[sbt] object GitHelper {
/**
* Returns a list of strictly previous releases (i.e. ignores tags on HEAD).
* @param fromHead
* if `true`, only tags reachable from HEAD's history. If `false`, all tags in the repo.
*/
def previousReleases(fromHead: Boolean = false, strict: Boolean = true): List[V] =
Try {
val merged = if (fromHead) " --merged HEAD" else ""
// --no-contains omits tags on HEAD
val noContains = if (strict) " --no-contains HEAD" else ""
s"git -c versionsort.suffix=- tag$noContains$merged --sort=-v:refname" // reverse
.!!
.split("\\n")
.toList
.map(_.trim)
.collect { case V.Tag(version) => version }
}.getOrElse(List.empty)
def getTagOrHash(tags: Seq[String], hash: Option[String]): Option[String] =
tags.collectFirst { case v @ V.Tag(_) => v }.orElse(hash)
}
|
typelevel/sbt-typelevel
|
kernel/src/main/scala/org/typelevel/sbt/kernel/GitHelper.scala
|
Scala
|
apache-2.0
| 1,559 |
package com.ereisman.esurient.examples
import org.apache.hadoop.conf.Configuration
import org.apache.log4j.Logger
import com.ereisman.esurient.EsurientConstants._
object EsurientDefaultTask {
val LOG = Logger.getLogger(this.getClass())
}
class EsurientDefaultTask extends com.ereisman.esurient.EsurientTask {
import com.ereisman.esurient.examples.EsurientDefaultTask._
override def execute: Unit = {
// the unique task id you can use to assign work deterministically to each task
val taskId = context.getConfiguration.getInt(ES_THIS_TASK_ID, ES_ERROR_CODE)
// the total # of tasks launched in this job
val totalTasks = context.getConfiguration.getInt(ES_TASK_COUNT, ES_ERROR_CODE)
// the Hadoop Configuration for this job full of useful metadata, values from cmd line args etc.
val conf = context.getConfiguration
// do some hello world stuff and bail out
LOG.info("Hi, This is EsurientDefaultTask #" + taskId + " of " + totalTasks)
if (taskId == 7) {
LOG.warn("You better watch out, I'm Task ID #7")
}
}
}
|
initialcontext/esurient
|
src/main/scala/com/ereisman/esurient/examples/EsurientDefaultTask.scala
|
Scala
|
apache-2.0
| 1,072 |
package models.street
import models.utils.MyPostgresDriver.simple._
import play.api.Play.current
case class StreetEdgeStreetNode(streetEdgeStreetNodeId: Int, streetEdgeId: Int, streetNodeId: Int)
class StreetEdgeStreetNodeTable(tag: Tag) extends Table[StreetEdgeStreetNode](tag, Some("sidewalk"), "street_edge_parent_edge") {
def streetEdgeStreetNodeId = column[Int]("street_edge_street_node_id", O.PrimaryKey)
def streetEdgeId = column[Int]("street_edge_id")
def streetNodeId = column[Int]("street_node_id")
def * = (streetEdgeStreetNodeId, streetEdgeId, streetNodeId) <> ((StreetEdgeStreetNode.apply _).tupled, StreetEdgeStreetNode.unapply)
}
object StreetEdgeStreetNodeTable {
val db = play.api.db.slick.DB
val streetEdgeStreetNodes = TableQuery[StreetEdgeStreetNodeTable]
}
|
danZzyy/SidewalkWebpage
|
sidewalk-webpage/app/models/street/StreetEdgeStreetNodeTable.scala
|
Scala
|
mit
| 796 |
package com.github.simonedeponti.play26lettuce
import play.api.Configuration
import play.api.cache.SyncCacheApi
/** Dependency-injection provider for [[play.api.cache.SyncCacheApi]]
*
* @param configuration The application configuration
* @param name The cache name (if not provided uses "default")
*/
class SyncWrapperProvider(val configuration: Configuration, val name: String = "default") extends BaseClientProvider[SyncCacheApi] {
lazy val get: SyncCacheApi = {
new SyncWrapper(getLettuceApi(name), configuration)(ec)
}
}
|
simonedeponti/play26-lettuce
|
src/main/scala/com/github/simonedeponti/play26lettuce/SyncWrapperProvider.scala
|
Scala
|
bsd-3-clause
| 547 |
package org.bitcoins.core.protocol.tlv
import org.bitcoins.testkitcore.gen.LnMessageGen
import org.bitcoins.testkitcore.util.BitcoinSUnitTest
import scodec.bits._
import scala.util.Try
class LnMessageTest extends BitcoinSUnitTest {
"LnMessage" must "have serialization symmetry" in {
forAll(LnMessageGen.lnMessage) { msg =>
assert(LnMessage(msg.bytes) == msg)
}
}
"UnknownMessage" must "have serialization symmetry" in {
forAll(LnMessageGen.unknownMessage) { unknown =>
assert(LnMessage(unknown.bytes) == unknown)
}
}
"InitMessage" must "have serialization symmetry" in {
forAll(LnMessageGen.initMessage) { initMessage =>
assert(LnMessage(initMessage.bytes) == initMessage)
assert(LnMessageFactory(InitTLV)(initMessage.bytes) == initMessage)
}
}
"InitMessage" must "parse correctly" in {
assert(Try(LnMessage(
"001000022200000302aaa2012006226e46111a0b59caaf126043eb5bbf28c34f3a5e332a1fc7b2b73cf188910f")).isSuccess)
}
/** @see https://github.com/lightningnetwork/lightning-rfc/blob/master/01-messaging.md#appendix-c-message-extension */
"InitMessage" must "pass static test vectors" in {
assert(Try(LnMessageFactory(InitTLV)(hex"001000000000")).isSuccess)
assert(
Try(LnMessageFactory(InitTLV)(hex"00100000000001012a030104")).isSuccess)
assert(Try(LnMessageFactory(InitTLV)(hex"00100000000001")).isFailure)
assert(Try(LnMessageFactory(InitTLV)(hex"00100000000002012a")).isFailure)
assert(
Try(LnMessageFactory(InitTLV)(hex"001000000000010101010102")).isFailure)
}
"ErrorMessage" must "have serialization symmetry" in {
forAll(LnMessageGen.errorMessage) { error =>
assert(LnMessage(error.bytes) == error)
}
}
"PingMessage" must "have serialization symmetry" in {
forAll(LnMessageGen.pingMessage) { ping =>
assert(LnMessage(ping.bytes) == ping)
}
}
"PongMessage" must "have serialization symmetry" in {
forAll(LnMessageGen.pongMessage) { pong =>
assert(LnMessage(pong.bytes) == pong)
}
}
"PongMessage" must "parse correctly" in {
assert(
LnMessage("001300020000") == LnMessage(
PongTLV.forIgnored(ByteVector.fromValidHex("0000"))))
}
"DLCOfferMessage" must "have serialization symmetry" in {
forAll(LnMessageGen.dlcOfferMessage) { dlcOffer =>
assert(LnMessage(dlcOffer.bytes) == dlcOffer)
}
}
"DLCAcceptMessage" must "have serialization symmetry" in {
forAll(LnMessageGen.dlcAcceptMessage) { dlcAccept =>
assert(LnMessage(dlcAccept.bytes) == dlcAccept)
}
}
"DLCSignMessage" must "have serialization symmetry" in {
forAll(LnMessageGen.dlcSignMessage) { dlcSign =>
assert(LnMessage(dlcSign.bytes) == dlcSign)
}
}
}
|
bitcoin-s/bitcoin-s
|
core-test/src/test/scala/org/bitcoins/core/protocol/tlv/LnMessageTest.scala
|
Scala
|
mit
| 2,769 |
package net.gree.aurora.scala.domain.clustergroup
import java.util.{List => JList}
import net.gree.aurora.domain.clustergroup.{ClusterGroup => JClusterGroup}
import net.gree.aurora.scala.domain.cluster.{Cluster, ClusterId}
import org.sisioh.dddbase.utils.{Try => JTry}
import scala.collection.JavaConverters._
import scala.util.{Failure, Success, Try}
private[domain]
class ClusterGroupImpl
(val underlying: JClusterGroup) extends ClusterGroup {
val identity = ClusterGroupId(underlying.getIdentity.getValue)
def resolveCluster(shardId: ClusterId): Try[Cluster] = {
val result = underlying.resolveCluster(shardId)
result match {
case success: JTry.Success[_] =>
Success(Cluster(success.get().asInstanceOf[Cluster]))
case failure: JTry[_] =>
Failure(failure.getCause)
}
}
def clusters: Try[Seq[Cluster]] = {
if (underlying.getClusters.isSuccess) {
val clusters = underlying.getClusters.get()
Success(clusters.asScala.map(Cluster(_)))
} else {
Failure(underlying.getClusters.getCause)
}
}
}
|
gree/aurora
|
aurora-scala/src/main/scala/net/gree/aurora/scala/domain/clustergroup/ClusterGroupImpl.scala
|
Scala
|
mit
| 1,075 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013--2015, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.scalalib
import scala.language.implicitConversions
import scala.reflect._
import scala.collection.mutable.ArrayBuilder
import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.Platform._
class ArrayBuilderTest {
@noinline
def erase(x: Any): Any = x
@inline
def makeNoInline[T: ClassTag](): ArrayBuilder[T] = {
@noinline def ct = implicitly[ClassTag[T]]
ArrayBuilder.make[T]()(ct)
}
@inline
def zerosInline[T: ClassTag](length: Int): Array[T] =
Array.fill(length)(null.asInstanceOf[T])
@noinline
def zerosNoInline[T: ClassTag](length: Int): Array[T] =
Array.fill(length)(null.asInstanceOf[T])
@noinline def someInt: Int = 53
@noinline def someChar: Char = 'S'
@noinline def someBoolean: Boolean = false
@noinline def someString: String = "world"
@Test def Int_normal_case_inline(): Unit = {
val b = ArrayBuilder.make[Int]()
b += 42
b += someInt
val a = b.result()
assertSame(classOf[Array[Int]], a.getClass)
assertEquals(2, a.length)
assertTrue(erase(a(0)).isInstanceOf[Int])
assertEquals(42, erase(a(0)))
assertEquals(53, erase(a(1)))
}
@Test def Int_normal_case_noinline(): Unit = {
val b = makeNoInline[Int]()
b += 42
b += someInt
val a = b.result()
assertSame(classOf[Array[Int]], a.getClass)
assertEquals(2, a.length)
assertTrue(erase(a(0)).isInstanceOf[Int])
assertEquals(42, erase(a(0)))
assertEquals(53, erase(a(1)))
}
@Test def Int_zeros_inline(): Unit = {
val a = zerosInline[Int](3)
assertSame(classOf[Array[Int]], a.getClass)
assertEquals(3, a.length)
assertTrue(erase(a(0)).isInstanceOf[Int])
assertEquals(0, erase(a(0)))
}
@Test def Int_zeros_noinline(): Unit = {
val a = zerosNoInline[Int](3)
assertSame(classOf[Array[Int]], a.getClass)
assertEquals(3, a.length)
assertTrue(erase(a(0)).isInstanceOf[Int])
assertEquals(0, erase(a(0)))
}
@Test def Char_normal_case_inline(): Unit = {
val b = ArrayBuilder.make[Char]()
b += 'A'
b += someChar
val a = b.result()
assertSame(classOf[Array[Char]], a.getClass)
assertEquals(2, a.length)
assertTrue(erase(a(0)).isInstanceOf[Char])
assertEquals('A', erase(a(0)))
assertEquals('S', erase(a(1)))
}
@Test def Char_normal_case_noinline(): Unit = {
val b = makeNoInline[Char]()
b += 'A'
b += someChar
val a = b.result()
assertSame(classOf[Array[Char]], a.getClass)
assertEquals(2, a.length)
assertTrue(erase(a(0)).isInstanceOf[Char])
assertEquals('A', erase(a(0)))
assertEquals('S', erase(a(1)))
}
@Test def Char_zeros_inline(): Unit = {
val a = zerosInline[Char](3)
assertSame(classOf[Array[Char]], a.getClass)
assertEquals(3, a.length)
assertTrue(erase(a(0)).isInstanceOf[Char])
assertEquals('\\0', erase(a(0)))
}
@Test def Char_zeros_noinline(): Unit = {
val a = zerosNoInline[Char](3)
assertSame(classOf[Array[Char]], a.getClass)
assertEquals(3, a.length)
assertTrue(erase(a(0)).isInstanceOf[Char])
assertEquals('\\0', erase(a(0)))
}
@Test def Boolean_normal_case_inline(): Unit = {
val b = ArrayBuilder.make[Boolean]()
b += true
b += someBoolean
val a = b.result()
assertSame(classOf[Array[Boolean]], a.getClass)
assertEquals(2, a.length)
assertTrue(erase(a(0)).isInstanceOf[Boolean])
assertEquals(true, erase(a(0)))
assertEquals(false, erase(a(1)))
}
@Test def Boolean_normal_case_noinline(): Unit = {
val b = makeNoInline[Boolean]()
b += true
b += someBoolean
val a = b.result()
assertSame(classOf[Array[Boolean]], a.getClass)
assertEquals(2, a.length)
assertTrue(erase(a(0)).isInstanceOf[Boolean])
assertEquals(true, erase(a(0)))
assertEquals(false, erase(a(1)))
}
@Test def Boolean_zeros_inline(): Unit = {
val a = zerosInline[Boolean](3)
assertSame(classOf[Array[Boolean]], a.getClass)
assertEquals(3, a.length)
assertTrue(erase(a(0)).isInstanceOf[Boolean])
assertEquals(false, erase(a(0)))
}
@Test def Boolean_zeros_noinline(): Unit = {
val a = zerosNoInline[Boolean](3)
assertSame(classOf[Array[Boolean]], a.getClass)
assertEquals(3, a.length)
assertTrue(erase(a(0)).isInstanceOf[Boolean])
assertEquals(false, erase(a(0)))
}
@Test def Unit_normal_case_inline(): Unit = {
val b = ArrayBuilder.make[Unit]()
b += ()
val a = b.result()
assertSame(classOf[Array[Unit]], a.getClass)
assertEquals(1, a.length)
assertTrue(erase(a(0)).isInstanceOf[Unit])
assertEquals((), erase(a(0)))
}
@Test def Unit_normal_case_noinline(): Unit = {
val b = makeNoInline[Unit]()
b += ()
val a = b.result()
assertSame(classOf[Array[Unit]], a.getClass)
assertEquals(1, a.length)
assertTrue(erase(a(0)).isInstanceOf[Unit])
assertEquals((), erase(a(0)))
}
@Test def Unit_zeros_inline(): Unit = {
val a = zerosInline[Unit](3)
assertSame(classOf[Array[Unit]], a.getClass)
assertEquals(3, a.length)
if (!executingInJVM) {
assertTrue(erase(a(0)).isInstanceOf[Unit])
assertTrue(() == erase(a(0)))
} else {
assertFalse(erase(a(0)).isInstanceOf[Unit])
assertFalse(() == erase(a(0)))
}
}
@Test def Unit_zeros_noinline(): Unit = {
val a = zerosNoInline[Unit](3)
assertSame(classOf[Array[Unit]], a.getClass)
assertEquals(3, a.length)
if (!executingInJVM) {
assertTrue(erase(a(0)).isInstanceOf[Unit])
assertTrue(() == erase(a(0)))
} else {
assertFalse(erase(a(0)).isInstanceOf[Unit])
assertFalse(() == erase(a(0)))
}
}
@Test def String_normal_case_inline(): Unit = {
val b = ArrayBuilder.make[String]()
b += "hello"
b += someString
val a = b.result()
assertSame(classOf[Array[String]], a.getClass)
assertEquals(2, a.length)
assertTrue(erase(a(0)).isInstanceOf[String])
assertEquals("hello", erase(a(0)))
assertEquals("world", erase(a(1)))
}
@Test def String_normal_case_noinline(): Unit = {
val b = makeNoInline[String]()
b += "hello"
b += someString
val a = b.result()
assertSame(classOf[Array[String]], a.getClass)
assertEquals(2, a.length)
assertTrue(erase(a(0)).isInstanceOf[String])
assertEquals("hello", erase(a(0)))
assertEquals("world", erase(a(1)))
}
@Test def String_zeros_inline(): Unit = {
val a = zerosInline[String](3)
assertSame(classOf[Array[String]], a.getClass)
assertEquals(3, a.length)
assertEquals(null, erase(a(0)))
}
@Test def String_zeros_noinline(): Unit = {
val a = zerosNoInline[String](3)
assertSame(classOf[Array[String]], a.getClass)
assertEquals(3, a.length)
assertEquals(null, erase(a(0)))
}
@Test def Nothing_and_Null(): Unit = {
assertSame(classOf[Array[Nothing]], ArrayBuilder.make[Nothing]().result().getClass)
assertSame(classOf[Array[Null]], ArrayBuilder.make[Null]().result().getClass)
assertSame(classOf[Array[Nothing]], makeNoInline[Nothing]().result().getClass)
assertSame(classOf[Array[Null]], makeNoInline[Null]().result().getClass)
}
}
|
mdedetrich/scala-js
|
test-suite/shared/src/test/scala/org/scalajs/testsuite/scalalib/ArrayBuilderTest.scala
|
Scala
|
bsd-3-clause
| 7,758 |
/*
* Copyright 2015 Magnus Madsen.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dk.umonitor.runtime.connector
import dk.umonitor.runtime.Event
import dk.umonitor.runtime.connector.Action.{Connect, Disconnect, _}
import dk.umonitor.util.PropertyMap
object SshConnector {
val DefaultPort = 22
def steps: List[Action] = List(
Connect,
ReadLine(Match.Prefix("SSH-2")),
WriteLine("SSH-2.0-OpenSSH_5.1p1 Debian-5"),
Disconnect
)
def connect(name: String, host: String, port: Option[Int] = None, opts: PropertyMap = PropertyMap.empty): Event =
TcpConnector.run(name, host, port.getOrElse(DefaultPort), steps, opts)
}
|
magnus-madsen/umonitor
|
src/dk/umonitor/runtime/connector/SshConnector.scala
|
Scala
|
apache-2.0
| 1,171 |
package spark.broadcast
import java.io._
import java.net._
import java.util.{BitSet, Comparator, Random, Timer, TimerTask, UUID}
import java.util.concurrent.atomic.AtomicInteger
import scala.collection.mutable.{ListBuffer, Map, Set}
import scala.math
import spark._
class BitTorrentBroadcast[T](@transient var value_ : T, isLocal: Boolean)
extends Broadcast[T] with Logging with Serializable {
def value = value_
BitTorrentBroadcast.synchronized {
BitTorrentBroadcast.values.put(uuid, 0, value_)
}
@transient var arrayOfBlocks: Array[BroadcastBlock] = null
@transient var hasBlocksBitVector: BitSet = null
@transient var numCopiesSent: Array[Int] = null
@transient var totalBytes = -1
@transient var totalBlocks = -1
@transient var hasBlocks = new AtomicInteger(0)
// CHANGED: BlockSize in the Broadcast object is expected to change over time
@transient var blockSize = Broadcast.BlockSize
// Used ONLY by Master to track how many unique blocks have been sent out
@transient var sentBlocks = new AtomicInteger(0)
@transient var listenPortLock = new Object
@transient var guidePortLock = new Object
@transient var totalBlocksLock = new Object
@transient var listOfSources = ListBuffer[SourceInfo]()
@transient var serveMR: ServeMultipleRequests = null
// Used only in Master
@transient var guideMR: GuideMultipleRequests = null
// Used only in Workers
@transient var ttGuide: TalkToGuide = null
@transient var rxSpeeds = new SpeedTracker
@transient var txSpeeds = new SpeedTracker
@transient var hostAddress = Utils.localIpAddress
@transient var listenPort = -1
@transient var guidePort = -1
@transient var hasCopyInHDFS = false
@transient var stopBroadcast = false
// Must call this after all the variables have been created/initialized
if (!isLocal) {
sendBroadcast
}
def sendBroadcast(): Unit = {
logInfo("Local host address: " + hostAddress)
// Store a persistent copy in HDFS
// TODO: Turned OFF for now. Related to persistence
// val out = new ObjectOutputStream(BroadcastCH.openFileForWriting(uuid))
// out.writeObject(value_)
// out.close()
// FIXME: Fix this at some point
hasCopyInHDFS = true
// Create a variableInfo object and store it in valueInfos
var variableInfo = Broadcast.blockifyObject(value_)
// Prepare the value being broadcasted
// TODO: Refactoring and clean-up required here
arrayOfBlocks = variableInfo.arrayOfBlocks
totalBytes = variableInfo.totalBytes
totalBlocks = variableInfo.totalBlocks
hasBlocks.set(variableInfo.totalBlocks)
// Guide has all the blocks
hasBlocksBitVector = new BitSet(totalBlocks)
hasBlocksBitVector.set(0, totalBlocks)
// Guide still hasn't sent any block
numCopiesSent = new Array[Int](totalBlocks)
guideMR = new GuideMultipleRequests
guideMR.setDaemon(true)
guideMR.start()
logInfo("GuideMultipleRequests started...")
// Must always come AFTER guideMR is created
while (guidePort == -1) {
guidePortLock.synchronized {
guidePortLock.wait
}
}
serveMR = new ServeMultipleRequests
serveMR.setDaemon(true)
serveMR.start()
logInfo("ServeMultipleRequests started...")
// Must always come AFTER serveMR is created
while (listenPort == -1) {
listenPortLock.synchronized {
listenPortLock.wait
}
}
// Must always come AFTER listenPort is created
val masterSource =
SourceInfo(hostAddress, listenPort, totalBlocks, totalBytes, blockSize)
hasBlocksBitVector.synchronized {
masterSource.hasBlocksBitVector = hasBlocksBitVector
}
// In the beginning, this is the only known source to Guide
listOfSources += masterSource
// Register with the Tracker
registerBroadcast(uuid,
SourceInfo(hostAddress, guidePort, totalBlocks, totalBytes, blockSize))
}
private def readObject(in: ObjectInputStream): Unit = {
in.defaultReadObject
BitTorrentBroadcast.synchronized {
val cachedVal = BitTorrentBroadcast.values.get(uuid, 0)
if (cachedVal != null) {
value_ = cachedVal.asInstanceOf[T]
} else {
// Only the first worker in a node can ever be inside this 'else'
initializeWorkerVariables
logInfo("Local host address: " + hostAddress)
// Start local ServeMultipleRequests thread first
serveMR = new ServeMultipleRequests
serveMR.setDaemon(true)
serveMR.start()
logInfo("ServeMultipleRequests started...")
val start = System.nanoTime
val receptionSucceeded = receiveBroadcast(uuid)
// If does not succeed, then get from HDFS copy
if (receptionSucceeded) {
value_ = Broadcast.unBlockifyObject[T](arrayOfBlocks, totalBytes, totalBlocks)
BitTorrentBroadcast.values.put(uuid, 0, value_)
} else {
// TODO: This part won't work, cause HDFS writing is turned OFF
val fileIn = new ObjectInputStream(DfsBroadcast.openFileForReading(uuid))
value_ = fileIn.readObject.asInstanceOf[T]
BitTorrentBroadcast.values.put(uuid, 0, value_)
fileIn.close()
}
val time = (System.nanoTime - start) / 1e9
logInfo("Reading Broadcasted variable " + uuid + " took " + time + " s")
}
}
}
// Initialize variables in the worker node. Master sends everything as 0/null
private def initializeWorkerVariables: Unit = {
arrayOfBlocks = null
hasBlocksBitVector = null
numCopiesSent = null
totalBytes = -1
totalBlocks = -1
hasBlocks = new AtomicInteger(0)
blockSize = -1
listenPortLock = new Object
totalBlocksLock = new Object
serveMR = null
ttGuide = null
rxSpeeds = new SpeedTracker
txSpeeds = new SpeedTracker
hostAddress = Utils.localIpAddress
listenPort = -1
listOfSources = ListBuffer[SourceInfo]()
stopBroadcast = false
}
private def registerBroadcast(uuid: UUID, gInfo: SourceInfo): Unit = {
val socket = new Socket(Broadcast.MasterHostAddress,
Broadcast.MasterTrackerPort)
val oosST = new ObjectOutputStream(socket.getOutputStream)
oosST.flush()
val oisST = new ObjectInputStream(socket.getInputStream)
// Send messageType/intention
oosST.writeObject(Broadcast.REGISTER_BROADCAST_TRACKER)
oosST.flush()
// Send UUID of this broadcast
oosST.writeObject(uuid)
oosST.flush()
// Send this tracker's information
oosST.writeObject(gInfo)
oosST.flush()
// Receive ACK and throw it away
oisST.readObject.asInstanceOf[Int]
// Shut stuff down
oisST.close()
oosST.close()
socket.close()
}
private def unregisterBroadcast(uuid: UUID): Unit = {
val socket = new Socket(Broadcast.MasterHostAddress,
Broadcast.MasterTrackerPort)
val oosST = new ObjectOutputStream(socket.getOutputStream)
oosST.flush()
val oisST = new ObjectInputStream(socket.getInputStream)
// Send messageType/intention
oosST.writeObject(Broadcast.UNREGISTER_BROADCAST_TRACKER)
oosST.flush()
// Send UUID of this broadcast
oosST.writeObject(uuid)
oosST.flush()
// Receive ACK and throw it away
oisST.readObject.asInstanceOf[Int]
// Shut stuff down
oisST.close()
oosST.close()
socket.close()
}
private def getLocalSourceInfo: SourceInfo = {
// Wait till hostName and listenPort are OK
while (listenPort == -1) {
listenPortLock.synchronized {
listenPortLock.wait
}
}
// Wait till totalBlocks and totalBytes are OK
while (totalBlocks == -1) {
totalBlocksLock.synchronized {
totalBlocksLock.wait
}
}
var localSourceInfo = SourceInfo(
hostAddress, listenPort, totalBlocks, totalBytes, blockSize)
localSourceInfo.hasBlocks = hasBlocks.get
hasBlocksBitVector.synchronized {
localSourceInfo.hasBlocksBitVector = hasBlocksBitVector
}
return localSourceInfo
}
// Add new SourceInfo to the listOfSources. Update if it exists already.
// TODO: Optimizing just by OR-ing the BitVectors was BAD for performance
private def addToListOfSources(newSourceInfo: SourceInfo): Unit = {
listOfSources.synchronized {
if (listOfSources.contains(newSourceInfo)) {
listOfSources = listOfSources - newSourceInfo
}
listOfSources += newSourceInfo
}
}
private def addToListOfSources(newSourceInfos: ListBuffer[SourceInfo]): Unit = {
newSourceInfos.foreach { newSourceInfo =>
addToListOfSources(newSourceInfo)
}
}
class TalkToGuide(gInfo: SourceInfo)
extends Thread with Logging {
override def run: Unit = {
// Keep exchaning information until all blocks have been received
while (hasBlocks.get < totalBlocks) {
talkOnce
Thread.sleep(BitTorrentBroadcast.ranGen.nextInt(
Broadcast.MaxKnockInterval - Broadcast.MinKnockInterval) +
Broadcast.MinKnockInterval)
}
// Talk one more time to let the Guide know of reception completion
talkOnce
}
// Connect to Guide and send this worker's information
private def talkOnce: Unit = {
var clientSocketToGuide: Socket = null
var oosGuide: ObjectOutputStream = null
var oisGuide: ObjectInputStream = null
clientSocketToGuide = new Socket(gInfo.hostAddress, gInfo.listenPort)
oosGuide = new ObjectOutputStream(clientSocketToGuide.getOutputStream)
oosGuide.flush()
oisGuide = new ObjectInputStream(clientSocketToGuide.getInputStream)
// Send local information
oosGuide.writeObject(getLocalSourceInfo)
oosGuide.flush()
// Receive source information from Guide
var suitableSources =
oisGuide.readObject.asInstanceOf[ListBuffer[SourceInfo]]
logInfo("Received suitableSources from Master " + suitableSources)
addToListOfSources(suitableSources)
oisGuide.close()
oosGuide.close()
clientSocketToGuide.close()
}
}
def getGuideInfo(variableUUID: UUID): SourceInfo = {
var clientSocketToTracker: Socket = null
var oosTracker: ObjectOutputStream = null
var oisTracker: ObjectInputStream = null
var gInfo: SourceInfo = SourceInfo("", SourceInfo.TxOverGoToHDFS)
var retriesLeft = Broadcast.MaxRetryCount
do {
try {
// Connect to the tracker to find out GuideInfo
clientSocketToTracker =
new Socket(Broadcast.MasterHostAddress, Broadcast.MasterTrackerPort)
oosTracker =
new ObjectOutputStream(clientSocketToTracker.getOutputStream)
oosTracker.flush()
oisTracker =
new ObjectInputStream(clientSocketToTracker.getInputStream)
// Send messageType/intention
oosTracker.writeObject(Broadcast.FIND_BROADCAST_TRACKER)
oosTracker.flush()
// Send UUID and receive GuideInfo
oosTracker.writeObject(uuid)
oosTracker.flush()
gInfo = oisTracker.readObject.asInstanceOf[SourceInfo]
} catch {
case e: Exception => {
logInfo("getGuideInfo had a " + e)
}
} finally {
if (oisTracker != null) {
oisTracker.close()
}
if (oosTracker != null) {
oosTracker.close()
}
if (clientSocketToTracker != null) {
clientSocketToTracker.close()
}
}
Thread.sleep(BitTorrentBroadcast.ranGen.nextInt(
Broadcast.MaxKnockInterval - Broadcast.MinKnockInterval) +
Broadcast.MinKnockInterval)
retriesLeft -= 1
} while (retriesLeft > 0 && gInfo.listenPort == SourceInfo.TxNotStartedRetry)
logInfo("Got this guidePort from Tracker: " + gInfo.listenPort)
return gInfo
}
def receiveBroadcast(variableUUID: UUID): Boolean = {
val gInfo = getGuideInfo(variableUUID)
if (gInfo.listenPort == SourceInfo.TxOverGoToHDFS ||
gInfo.listenPort == SourceInfo.TxNotStartedRetry) {
// TODO: SourceInfo.TxNotStartedRetry is not really in use because we go
// to HDFS anyway when receiveBroadcast returns false
return false
}
// Wait until hostAddress and listenPort are created by the
// ServeMultipleRequests thread
while (listenPort == -1) {
listenPortLock.synchronized {
listenPortLock.wait
}
}
// Setup initial states of variables
totalBlocks = gInfo.totalBlocks
arrayOfBlocks = new Array[BroadcastBlock](totalBlocks)
hasBlocksBitVector = new BitSet(totalBlocks)
numCopiesSent = new Array[Int](totalBlocks)
totalBlocksLock.synchronized {
totalBlocksLock.notifyAll
}
totalBytes = gInfo.totalBytes
blockSize = gInfo.blockSize
// Start ttGuide to periodically talk to the Guide
var ttGuide = new TalkToGuide(gInfo)
ttGuide.setDaemon(true)
ttGuide.start()
logInfo("TalkToGuide started...")
// Start pController to run TalkToPeer threads
var pcController = new PeerChatterController
pcController.setDaemon(true)
pcController.start()
logInfo("PeerChatterController started...")
// FIXME: Must fix this. This might never break if broadcast fails.
// We should be able to break and send false. Also need to kill threads
while (hasBlocks.get < totalBlocks) {
Thread.sleep(Broadcast.MaxKnockInterval)
}
return true
}
class PeerChatterController
extends Thread with Logging {
private var peersNowTalking = ListBuffer[SourceInfo]()
// TODO: There is a possible bug with blocksInRequestBitVector when a
// certain bit is NOT unset upon failure resulting in an infinite loop.
private var blocksInRequestBitVector = new BitSet(totalBlocks)
override def run: Unit = {
var threadPool = Utils.newDaemonFixedThreadPool(Broadcast.MaxRxSlots)
while (hasBlocks.get < totalBlocks) {
var numThreadsToCreate =
math.min(listOfSources.size, Broadcast.MaxRxSlots) -
threadPool.getActiveCount
while (hasBlocks.get < totalBlocks && numThreadsToCreate > 0) {
var peerToTalkTo = pickPeerToTalkToRandom
if (peerToTalkTo != null)
logInfo("Peer chosen: " + peerToTalkTo + " with " + peerToTalkTo.hasBlocksBitVector)
else
logInfo("No peer chosen...")
if (peerToTalkTo != null) {
threadPool.execute(new TalkToPeer(peerToTalkTo))
// Add to peersNowTalking. Remove in the thread. We have to do this
// ASAP, otherwise pickPeerToTalkTo picks the same peer more than once
peersNowTalking.synchronized {
peersNowTalking += peerToTalkTo
}
}
numThreadsToCreate = numThreadsToCreate - 1
}
// Sleep for a while before starting some more threads
Thread.sleep(Broadcast.MinKnockInterval)
}
// Shutdown the thread pool
threadPool.shutdown()
}
// Right now picking the one that has the most blocks this peer wants
// Also picking peer randomly if no one has anything interesting
private def pickPeerToTalkToRandom: SourceInfo = {
var curPeer: SourceInfo = null
var curMax = 0
logInfo("Picking peers to talk to...")
// Find peers that are not connected right now
var peersNotInUse = ListBuffer[SourceInfo]()
listOfSources.synchronized {
peersNowTalking.synchronized {
peersNotInUse = listOfSources -- peersNowTalking
}
}
// Select the peer that has the most blocks that this receiver does not
peersNotInUse.foreach { eachSource =>
var tempHasBlocksBitVector: BitSet = null
hasBlocksBitVector.synchronized {
tempHasBlocksBitVector = hasBlocksBitVector.clone.asInstanceOf[BitSet]
}
tempHasBlocksBitVector.flip(0, tempHasBlocksBitVector.size)
tempHasBlocksBitVector.and(eachSource.hasBlocksBitVector)
if (tempHasBlocksBitVector.cardinality > curMax) {
curPeer = eachSource
curMax = tempHasBlocksBitVector.cardinality
}
}
// TODO: Always pick randomly or randomly pick randomly?
// Now always picking randomly
if (curPeer == null && peersNotInUse.size > 0) {
// Pick uniformly the i'th required peer
var i = BitTorrentBroadcast.ranGen.nextInt(peersNotInUse.size)
var peerIter = peersNotInUse.iterator
curPeer = peerIter.next
while (i > 0) {
curPeer = peerIter.next
i = i - 1
}
}
return curPeer
}
// Picking peer with the weight of rare blocks it has
private def pickPeerToTalkToRarestFirst: SourceInfo = {
// Find peers that are not connected right now
var peersNotInUse = ListBuffer[SourceInfo]()
listOfSources.synchronized {
peersNowTalking.synchronized {
peersNotInUse = listOfSources -- peersNowTalking
}
}
// Count the number of copies of each block in the neighborhood
var numCopiesPerBlock = Array.tabulate [Int](totalBlocks)(_ => 0)
listOfSources.synchronized {
listOfSources.foreach { eachSource =>
for (i <- 0 until totalBlocks) {
numCopiesPerBlock(i) +=
( if (eachSource.hasBlocksBitVector.get(i)) 1 else 0 )
}
}
}
// TODO: A block is rare if there are at most 2 copies of that block
// TODO: This CONSTANT could be a function of the neighborhood size
var rareBlocksIndices = ListBuffer[Int]()
for (i <- 0 until totalBlocks) {
if (numCopiesPerBlock(i) > 0 && numCopiesPerBlock(i) <= 2) {
rareBlocksIndices += i
}
}
// Find peers with rare blocks
var peersWithRareBlocks = ListBuffer[(SourceInfo, Int)]()
var totalRareBlocks = 0
peersNotInUse.foreach { eachPeer =>
var hasRareBlocks = 0
rareBlocksIndices.foreach { rareBlock =>
if (eachPeer.hasBlocksBitVector.get(rareBlock)) {
hasRareBlocks += 1
}
}
if (hasRareBlocks > 0) {
peersWithRareBlocks += ((eachPeer, hasRareBlocks))
}
totalRareBlocks += hasRareBlocks
}
// Select a peer from peersWithRareBlocks based on weight calculated from
// unique rare blocks
var selectedPeerToTalkTo: SourceInfo = null
if (peersWithRareBlocks.size > 0) {
// Sort the peers based on how many rare blocks they have
peersWithRareBlocks.sortBy(_._2)
var randomNumber = BitTorrentBroadcast.ranGen.nextDouble
var tempSum = 0.0
var i = 0
do {
tempSum += (1.0 * peersWithRareBlocks(i)._2 / totalRareBlocks)
if (tempSum >= randomNumber) {
selectedPeerToTalkTo = peersWithRareBlocks(i)._1
}
i += 1
} while (i < peersWithRareBlocks.size && selectedPeerToTalkTo == null)
}
if (selectedPeerToTalkTo == null) {
selectedPeerToTalkTo = pickPeerToTalkToRandom
}
return selectedPeerToTalkTo
}
class TalkToPeer(peerToTalkTo: SourceInfo)
extends Thread with Logging {
private var peerSocketToSource: Socket = null
private var oosSource: ObjectOutputStream = null
private var oisSource: ObjectInputStream = null
override def run: Unit = {
// TODO: There is a possible bug here regarding blocksInRequestBitVector
var blockToAskFor = -1
// Setup the timeout mechanism
var timeOutTask = new TimerTask {
override def run: Unit = {
cleanUpConnections()
}
}
var timeOutTimer = new Timer
timeOutTimer.schedule(timeOutTask, Broadcast.MaxKnockInterval)
logInfo("TalkToPeer started... => " + peerToTalkTo)
try {
// Connect to the source
peerSocketToSource =
new Socket(peerToTalkTo.hostAddress, peerToTalkTo.listenPort)
oosSource =
new ObjectOutputStream(peerSocketToSource.getOutputStream)
oosSource.flush()
oisSource =
new ObjectInputStream(peerSocketToSource.getInputStream)
// Receive latest SourceInfo from peerToTalkTo
var newPeerToTalkTo = oisSource.readObject.asInstanceOf[SourceInfo]
// Update listOfSources
addToListOfSources(newPeerToTalkTo)
// Turn the timer OFF, if the sender responds before timeout
timeOutTimer.cancel
// Send the latest SourceInfo
oosSource.writeObject(getLocalSourceInfo)
oosSource.flush()
var keepReceiving = true
while (hasBlocks.get < totalBlocks && keepReceiving) {
blockToAskFor =
pickBlockRandom(newPeerToTalkTo.hasBlocksBitVector)
// No block to request
if (blockToAskFor < 0) {
// Nothing to receive from newPeerToTalkTo
keepReceiving = false
} else {
// Let other threads know that blockToAskFor is being requested
blocksInRequestBitVector.synchronized {
blocksInRequestBitVector.set(blockToAskFor)
}
// Start with sending the blockID
oosSource.writeObject(blockToAskFor)
oosSource.flush()
// CHANGED: Master might send some other block than the one
// requested to ensure fast spreading of all blocks.
val recvStartTime = System.currentTimeMillis
val bcBlock = oisSource.readObject.asInstanceOf[BroadcastBlock]
val receptionTime = (System.currentTimeMillis - recvStartTime)
logInfo("Received block: " + bcBlock.blockID + " from " + peerToTalkTo + " in " + receptionTime + " millis.")
if (!hasBlocksBitVector.get(bcBlock.blockID)) {
arrayOfBlocks(bcBlock.blockID) = bcBlock
// Update the hasBlocksBitVector first
hasBlocksBitVector.synchronized {
hasBlocksBitVector.set(bcBlock.blockID)
hasBlocks.getAndIncrement
}
rxSpeeds.addDataPoint(peerToTalkTo, receptionTime)
// Some block(may NOT be blockToAskFor) has arrived.
// In any case, blockToAskFor is not in request any more
blocksInRequestBitVector.synchronized {
blocksInRequestBitVector.set(blockToAskFor, false)
}
// Reset blockToAskFor to -1. Else it will be considered missing
blockToAskFor = -1
}
// Send the latest SourceInfo
oosSource.writeObject(getLocalSourceInfo)
oosSource.flush()
}
}
} catch {
// EOFException is expected to happen because sender can break
// connection due to timeout
case eofe: java.io.EOFException => { }
case e: Exception => {
logInfo("TalktoPeer had a " + e)
// FIXME: Remove 'newPeerToTalkTo' from listOfSources
// We probably should have the following in some form, but not
// really here. This exception can happen if the sender just breaks connection
// listOfSources.synchronized {
// logInfo("Exception in TalkToPeer. Removing source: " + peerToTalkTo)
// listOfSources = listOfSources - peerToTalkTo
// }
}
} finally {
// blockToAskFor != -1 => there was an exception
if (blockToAskFor != -1) {
blocksInRequestBitVector.synchronized {
blocksInRequestBitVector.set(blockToAskFor, false)
}
}
cleanUpConnections()
}
}
// Right now it picks a block uniformly that this peer does not have
private def pickBlockRandom(txHasBlocksBitVector: BitSet): Int = {
var needBlocksBitVector: BitSet = null
// Blocks already present
hasBlocksBitVector.synchronized {
needBlocksBitVector = hasBlocksBitVector.clone.asInstanceOf[BitSet]
}
// Include blocks already in transmission ONLY IF
// BitTorrentBroadcast.EndGameFraction has NOT been achieved
if ((1.0 * hasBlocks.get / totalBlocks) < Broadcast.EndGameFraction) {
blocksInRequestBitVector.synchronized {
needBlocksBitVector.or(blocksInRequestBitVector)
}
}
// Find blocks that are neither here nor in transit
needBlocksBitVector.flip(0, needBlocksBitVector.size)
// Blocks that should/can be requested
needBlocksBitVector.and(txHasBlocksBitVector)
if (needBlocksBitVector.cardinality == 0) {
return -1
} else {
// Pick uniformly the i'th required block
var i = BitTorrentBroadcast.ranGen.nextInt(needBlocksBitVector.cardinality)
var pickedBlockIndex = needBlocksBitVector.nextSetBit(0)
while (i > 0) {
pickedBlockIndex =
needBlocksBitVector.nextSetBit(pickedBlockIndex + 1)
i -= 1
}
return pickedBlockIndex
}
}
// Pick the block that seems to be the rarest across sources
private def pickBlockRarestFirst(txHasBlocksBitVector: BitSet): Int = {
var needBlocksBitVector: BitSet = null
// Blocks already present
hasBlocksBitVector.synchronized {
needBlocksBitVector = hasBlocksBitVector.clone.asInstanceOf[BitSet]
}
// Include blocks already in transmission ONLY IF
// BitTorrentBroadcast.EndGameFraction has NOT been achieved
if ((1.0 * hasBlocks.get / totalBlocks) < Broadcast.EndGameFraction) {
blocksInRequestBitVector.synchronized {
needBlocksBitVector.or(blocksInRequestBitVector)
}
}
// Find blocks that are neither here nor in transit
needBlocksBitVector.flip(0, needBlocksBitVector.size)
// Blocks that should/can be requested
needBlocksBitVector.and(txHasBlocksBitVector)
if (needBlocksBitVector.cardinality == 0) {
return -1
} else {
// Count the number of copies for each block across all sources
var numCopiesPerBlock = Array.tabulate [Int](totalBlocks)(_ => 0)
listOfSources.synchronized {
listOfSources.foreach { eachSource =>
for (i <- 0 until totalBlocks) {
numCopiesPerBlock(i) +=
( if (eachSource.hasBlocksBitVector.get(i)) 1 else 0 )
}
}
}
// Find the minimum
var minVal = Integer.MAX_VALUE
for (i <- 0 until totalBlocks) {
if (numCopiesPerBlock(i) > 0 && numCopiesPerBlock(i) < minVal) {
minVal = numCopiesPerBlock(i)
}
}
// Find the blocks with the least copies that this peer does not have
var minBlocksIndices = ListBuffer[Int]()
for (i <- 0 until totalBlocks) {
if (needBlocksBitVector.get(i) && numCopiesPerBlock(i) == minVal) {
minBlocksIndices += i
}
}
// Now select a random index from minBlocksIndices
if (minBlocksIndices.size == 0) {
return -1
} else {
// Pick uniformly the i'th index
var i = BitTorrentBroadcast.ranGen.nextInt(minBlocksIndices.size)
return minBlocksIndices(i)
}
}
}
private def cleanUpConnections(): Unit = {
if (oisSource != null) {
oisSource.close()
}
if (oosSource != null) {
oosSource.close()
}
if (peerSocketToSource != null) {
peerSocketToSource.close()
}
// Delete from peersNowTalking
peersNowTalking.synchronized {
peersNowTalking = peersNowTalking - peerToTalkTo
}
}
}
}
class GuideMultipleRequests
extends Thread with Logging {
// Keep track of sources that have completed reception
private var setOfCompletedSources = Set[SourceInfo]()
override def run: Unit = {
var threadPool = Utils.newDaemonCachedThreadPool()
var serverSocket: ServerSocket = null
serverSocket = new ServerSocket(0)
guidePort = serverSocket.getLocalPort
logInfo("GuideMultipleRequests => " + serverSocket + " " + guidePort)
guidePortLock.synchronized {
guidePortLock.notifyAll
}
try {
// Don't stop until there is a copy in HDFS
while (!stopBroadcast || !hasCopyInHDFS) {
var clientSocket: Socket = null
try {
serverSocket.setSoTimeout(Broadcast.ServerSocketTimeout)
clientSocket = serverSocket.accept()
} catch {
case e: Exception => {
logInfo("GuideMultipleRequests Timeout.")
// Stop broadcast if at least one worker has connected and
// everyone connected so far are done. Comparing with
// listOfSources.size - 1, because it includes the Guide itself
if (listOfSources.size > 1 &&
setOfCompletedSources.size == listOfSources.size - 1) {
stopBroadcast = true
}
}
}
if (clientSocket != null) {
logInfo("Guide: Accepted new client connection:" + clientSocket)
try {
threadPool.execute(new GuideSingleRequest(clientSocket))
} catch {
// In failure, close the socket here; else, thread will close it
case ioe: IOException => {
clientSocket.close()
}
}
}
}
// Shutdown the thread pool
threadPool.shutdown()
logInfo("Sending stopBroadcast notifications...")
sendStopBroadcastNotifications
unregisterBroadcast(uuid)
} finally {
if (serverSocket != null) {
logInfo("GuideMultipleRequests now stopping...")
serverSocket.close()
}
}
}
private def sendStopBroadcastNotifications: Unit = {
listOfSources.synchronized {
listOfSources.foreach { sourceInfo =>
var guideSocketToSource: Socket = null
var gosSource: ObjectOutputStream = null
var gisSource: ObjectInputStream = null
try {
// Connect to the source
guideSocketToSource =
new Socket(sourceInfo.hostAddress, sourceInfo.listenPort)
gosSource =
new ObjectOutputStream(guideSocketToSource.getOutputStream)
gosSource.flush()
gisSource =
new ObjectInputStream(guideSocketToSource.getInputStream)
// Throw away whatever comes in
gisSource.readObject.asInstanceOf[SourceInfo]
// Send stopBroadcast signal. listenPort = SourceInfo.StopBroadcast
gosSource.writeObject(SourceInfo("", SourceInfo.StopBroadcast))
gosSource.flush()
} catch {
case e: Exception => {
logInfo("sendStopBroadcastNotifications had a " + e)
}
} finally {
if (gisSource != null) {
gisSource.close()
}
if (gosSource != null) {
gosSource.close()
}
if (guideSocketToSource != null) {
guideSocketToSource.close()
}
}
}
}
}
class GuideSingleRequest(val clientSocket: Socket)
extends Thread with Logging {
private val oos = new ObjectOutputStream(clientSocket.getOutputStream)
oos.flush()
private val ois = new ObjectInputStream(clientSocket.getInputStream)
private var sourceInfo: SourceInfo = null
private var selectedSources: ListBuffer[SourceInfo] = null
override def run: Unit = {
try {
logInfo("new GuideSingleRequest is running")
// Connecting worker is sending in its information
sourceInfo = ois.readObject.asInstanceOf[SourceInfo]
// Select a suitable source and send it back to the worker
selectedSources = selectSuitableSources(sourceInfo)
logInfo("Sending selectedSources:" + selectedSources)
oos.writeObject(selectedSources)
oos.flush()
// Add this source to the listOfSources
addToListOfSources(sourceInfo)
} catch {
case e: Exception => {
// Assuming exception caused by receiver failure: remove
if (listOfSources != null) {
listOfSources.synchronized {
listOfSources = listOfSources - sourceInfo
}
}
}
} finally {
ois.close()
oos.close()
clientSocket.close()
}
}
// Randomly select some sources to send back
private def selectSuitableSources(skipSourceInfo: SourceInfo): ListBuffer[SourceInfo] = {
var selectedSources = ListBuffer[SourceInfo]()
// If skipSourceInfo.hasBlocksBitVector has all bits set to 'true'
// then add skipSourceInfo to setOfCompletedSources. Return blank.
if (skipSourceInfo.hasBlocks == totalBlocks) {
setOfCompletedSources.synchronized {
setOfCompletedSources += skipSourceInfo
}
return selectedSources
}
listOfSources.synchronized {
if (listOfSources.size <= Broadcast.MaxPeersInGuideResponse) {
selectedSources = listOfSources.clone
} else {
var picksLeft = Broadcast.MaxPeersInGuideResponse
var alreadyPicked = new BitSet(listOfSources.size)
while (picksLeft > 0) {
var i = -1
do {
i = BitTorrentBroadcast.ranGen.nextInt(listOfSources.size)
} while (alreadyPicked.get(i))
var peerIter = listOfSources.iterator
var curPeer = peerIter.next
// Set the BitSet before i is decremented
alreadyPicked.set(i)
while (i > 0) {
curPeer = peerIter.next
i = i - 1
}
selectedSources += curPeer
picksLeft = picksLeft - 1
}
}
}
// Remove the receiving source (if present)
selectedSources = selectedSources - skipSourceInfo
return selectedSources
}
}
}
class ServeMultipleRequests
extends Thread with Logging {
// Server at most Broadcast.MaxTxSlots peers
var threadPool = Utils.newDaemonFixedThreadPool(Broadcast.MaxTxSlots)
override def run: Unit = {
var serverSocket = new ServerSocket(0)
listenPort = serverSocket.getLocalPort
logInfo("ServeMultipleRequests started with " + serverSocket)
listenPortLock.synchronized {
listenPortLock.notifyAll
}
try {
while (!stopBroadcast) {
var clientSocket: Socket = null
try {
serverSocket.setSoTimeout(Broadcast.ServerSocketTimeout)
clientSocket = serverSocket.accept()
} catch {
case e: Exception => {
logInfo("ServeMultipleRequests Timeout.")
}
}
if (clientSocket != null) {
logInfo("Serve: Accepted new client connection:" + clientSocket)
try {
threadPool.execute(new ServeSingleRequest(clientSocket))
} catch {
// In failure, close socket here; else, the thread will close it
case ioe: IOException => {
clientSocket.close()
}
}
}
}
} finally {
if (serverSocket != null) {
logInfo("ServeMultipleRequests now stopping...")
serverSocket.close()
}
}
// Shutdown the thread pool
threadPool.shutdown()
}
class ServeSingleRequest(val clientSocket: Socket)
extends Thread with Logging {
private val oos = new ObjectOutputStream(clientSocket.getOutputStream)
oos.flush()
private val ois = new ObjectInputStream(clientSocket.getInputStream)
logInfo("new ServeSingleRequest is running")
override def run: Unit = {
try {
// Send latest local SourceInfo to the receiver
// In the case of receiver timeout and connection close, this will
// throw a java.net.SocketException: Broken pipe
oos.writeObject(getLocalSourceInfo)
oos.flush()
// Receive latest SourceInfo from the receiver
var rxSourceInfo = ois.readObject.asInstanceOf[SourceInfo]
if (rxSourceInfo.listenPort == SourceInfo.StopBroadcast) {
stopBroadcast = true
} else {
// Carry on
addToListOfSources(rxSourceInfo)
}
val startTime = System.currentTimeMillis
var curTime = startTime
var keepSending = true
var numBlocksToSend = Broadcast.MaxChatBlocks
while (!stopBroadcast && keepSending && numBlocksToSend > 0) {
// Receive which block to send
var blockToSend = ois.readObject.asInstanceOf[Int]
// If it is master AND at least one copy of each block has not been
// sent out already, MODIFY blockToSend
if (BitTorrentBroadcast.isMaster && sentBlocks.get < totalBlocks) {
blockToSend = sentBlocks.getAndIncrement
}
// Send the block
sendBlock(blockToSend)
rxSourceInfo.hasBlocksBitVector.set(blockToSend)
numBlocksToSend -= 1
// Receive latest SourceInfo from the receiver
rxSourceInfo = ois.readObject.asInstanceOf[SourceInfo]
// logInfo("rxSourceInfo: " + rxSourceInfo + " with " + rxSourceInfo.hasBlocksBitVector)
addToListOfSources(rxSourceInfo)
curTime = System.currentTimeMillis
// Revoke sending only if there is anyone waiting in the queue
if (curTime - startTime >= Broadcast.MaxChatTime &&
threadPool.getQueue.size > 0) {
keepSending = false
}
}
} catch {
// If something went wrong, e.g., the worker at the other end died etc.
// then close everything up
// Exception can happen if the receiver stops receiving
case e: Exception => {
logInfo("ServeSingleRequest had a " + e)
}
} finally {
logInfo("ServeSingleRequest is closing streams and sockets")
ois.close()
// TODO: The following line causes a "java.net.SocketException: Socket closed"
oos.close()
clientSocket.close()
}
}
private def sendBlock(blockToSend: Int): Unit = {
try {
oos.writeObject(arrayOfBlocks(blockToSend))
oos.flush()
} catch {
case e: Exception => {
logInfo("sendBlock had a " + e)
}
}
logInfo("Sent block: " + blockToSend + " to " + clientSocket)
}
}
}
}
class BitTorrentBroadcastFactory
extends BroadcastFactory {
def initialize(isMaster: Boolean) = {
BitTorrentBroadcast.initialize(isMaster)
}
def newBroadcast[T](value_ : T, isLocal: Boolean) =
new BitTorrentBroadcast[T](value_, isLocal)
}
private object BitTorrentBroadcast
extends Logging {
val values = SparkEnv.get.cache.newKeySpace()
var valueToGuideMap = Map[UUID, SourceInfo]()
// Random number generator
var ranGen = new Random
private var initialized = false
private var isMaster_ = false
private var trackMV: TrackMultipleValues = null
def initialize(isMaster__ : Boolean): Unit = {
synchronized {
if (!initialized) {
isMaster_ = isMaster__
if (isMaster) {
trackMV = new TrackMultipleValues
trackMV.setDaemon(true)
trackMV.start()
// TODO: Logging the following line makes the Spark framework ID not
// getting logged, cause it calls logInfo before log4j is initialized
logInfo("TrackMultipleValues started...")
}
// Initialize DfsBroadcast to be used for broadcast variable persistence
// TODO: Think about persistence
DfsBroadcast.initialize
initialized = true
}
}
}
def isMaster = isMaster_
class TrackMultipleValues
extends Thread with Logging {
override def run: Unit = {
var threadPool = Utils.newDaemonCachedThreadPool()
var serverSocket: ServerSocket = null
serverSocket = new ServerSocket(Broadcast.MasterTrackerPort)
logInfo("TrackMultipleValues" + serverSocket)
try {
while (true) {
var clientSocket: Socket = null
try {
serverSocket.setSoTimeout(Broadcast.TrackerSocketTimeout)
clientSocket = serverSocket.accept()
} catch {
case e: Exception => {
logInfo("TrackMultipleValues Timeout. Stopping listening...")
}
}
if (clientSocket != null) {
try {
threadPool.execute(new Thread {
override def run: Unit = {
val oos = new ObjectOutputStream(clientSocket.getOutputStream)
oos.flush()
val ois = new ObjectInputStream(clientSocket.getInputStream)
try {
// First, read message type
val messageType = ois.readObject.asInstanceOf[Int]
if (messageType == Broadcast.REGISTER_BROADCAST_TRACKER) {
// Receive UUID
val uuid = ois.readObject.asInstanceOf[UUID]
// Receive hostAddress and listenPort
val gInfo = ois.readObject.asInstanceOf[SourceInfo]
// Add to the map
valueToGuideMap.synchronized {
valueToGuideMap += (uuid -> gInfo)
}
logInfo ("New broadcast registered with TrackMultipleValues " + uuid + " " + valueToGuideMap)
// Send dummy ACK
oos.writeObject(-1)
oos.flush()
} else if (messageType == Broadcast.UNREGISTER_BROADCAST_TRACKER) {
// Receive UUID
val uuid = ois.readObject.asInstanceOf[UUID]
// Remove from the map
valueToGuideMap.synchronized {
valueToGuideMap(uuid) = SourceInfo("", SourceInfo.TxOverGoToHDFS)
logInfo("Value unregistered from the Tracker " + valueToGuideMap)
}
logInfo ("Broadcast unregistered from TrackMultipleValues " + uuid + " " + valueToGuideMap)
// Send dummy ACK
oos.writeObject(-1)
oos.flush()
} else if (messageType == Broadcast.FIND_BROADCAST_TRACKER) {
// Receive UUID
val uuid = ois.readObject.asInstanceOf[UUID]
var gInfo =
if (valueToGuideMap.contains(uuid)) valueToGuideMap(uuid)
else SourceInfo("", SourceInfo.TxNotStartedRetry)
logInfo("TrackMultipleValues: Got new request: " + clientSocket + " for " + uuid + " : " + gInfo.listenPort)
// Send reply back
oos.writeObject(gInfo)
oos.flush()
} else if (messageType == Broadcast.GET_UPDATED_SHARE) {
// TODO: Not implemented
} else {
throw new SparkException("Undefined messageType at TrackMultipleValues")
}
} catch {
case e: Exception => {
logInfo("TrackMultipleValues had a " + e)
}
} finally {
ois.close()
oos.close()
clientSocket.close()
}
}
})
} catch {
// In failure, close socket here; else, client thread will close
case ioe: IOException => {
clientSocket.close()
}
}
}
}
} finally {
serverSocket.close()
}
// Shutdown the thread pool
threadPool.shutdown()
}
}
}
|
javelinjs/spark
|
core/src/main/scala/spark/broadcast/BitTorrentBroadcast.scala
|
Scala
|
bsd-3-clause
| 45,047 |
/*
*************************************************************************************
* Copyright 2014 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.reports
import net.liftweb.common._
import net.liftweb.json._
/**
* Define level of compliance:
*
* - compliance: full compliance, check success report (historical Rudder way)
* - error_only: only report for repaired and error reports.
*/
sealed trait ComplianceModeName {
val name : String
}
case object FullCompliance extends ComplianceModeName {
val name = "full-compliance"
}
case object ChangesOnly extends ComplianceModeName {
val name = "changes-only"
}
case object ReportsDisabled extends ComplianceModeName {
val name = "reports-disabled"
}
object ComplianceModeName {
val allModes : List[ComplianceModeName] = FullCompliance :: ChangesOnly :: ReportsDisabled :: Nil
def parse (value : String) : Box[ComplianceModeName] = {
allModes.find { _.name == value } match {
case None =>
Failure(s"Unable to parse the compliance mode name '${value}'. was expecting ${allModes.map(_.name).mkString("'", "' or '", "'")}.")
case Some(mode) =>
Full(mode)
}
}
}
sealed trait ComplianceMode {
def mode: ComplianceModeName
def heartbeatPeriod : Int
val name = mode.name
}
case class GlobalComplianceMode (
mode : ComplianceModeName
, heartbeatPeriod : Int
) extends ComplianceMode
case class NodeComplianceMode (
mode : ComplianceModeName
, heartbeatPeriod : Int
, overrideGlobal : Boolean
) extends ComplianceMode
trait ComplianceModeService {
def getGlobalComplianceMode : Box[GlobalComplianceMode]
}
class ComplianceModeServiceImpl (
readComplianceMode : () => Box[String]
, readHeartbeatFreq : () => Box[Int]
) extends ComplianceModeService {
def getGlobalComplianceMode : Box[GlobalComplianceMode] = {
for {
modeName <- readComplianceMode()
mode <- ComplianceModeName.parse(modeName)
heartbeat <- readHeartbeatFreq()
} yield {
GlobalComplianceMode(
mode
, heartbeat
)
}
}
}
|
armeniaca/rudder
|
rudder-core/src/main/scala/com/normation/rudder/reports/ComplianceMode.scala
|
Scala
|
gpl-3.0
| 3,677 |
/*
* Copyright 2014 Intelix Pty Ltd
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package common.actors
import akka.actor.{ActorRefFactory, Props}
import com.typesafe.config.Config
trait ActorObjWithConfig extends ActorObj {
def props(implicit config: Config): Props
def start(implicit f: ActorRefFactory, config: Config) = f.actorOf(props, id)
}
|
mglukh/ehub
|
modules/core/src/main/scala/common/actors/ActorObjWithConfig.scala
|
Scala
|
apache-2.0
| 872 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.hmrcemailrenderer.templates.awrs
import uk.gov.hmrc.hmrcemailrenderer.domain.{MessagePriority, MessageTemplate}
import uk.gov.hmrc.hmrcemailrenderer.templates.ServiceIdentifier.AlcoholWholesalerRegistrationScheme
import uk.gov.hmrc.hmrcemailrenderer.templates.FromAddress.govUkTeamAddress
object AwrsTemplates {
val templates = Seq(
MessageTemplate.create(
templateId = "awrs_notification_template_reg_change",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "AWRS registration change",
plainTemplate = txt.awrsNotificationEmailRegChange.f,
htmlTemplate = html.awrsNotificationEmailRegChange.f),
MessageTemplate.create(
templateId = "awrs_notification_template_app_change",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "AWRS application change",
plainTemplate = txt.awrsNotificationEmailAppChange.f,
htmlTemplate = html.awrsNotificationEmailAppChange.f),
MessageTemplate.create(
templateId = "awrs_notification_template_comfirmation_api4",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "AWRS application confirmation",
plainTemplate = txt.awrsNotificationConfirmationAPI4.f,
htmlTemplate = html.awrsNotificationConfirmationAPI4.f),
MessageTemplate.create(
templateId = "awrs_notification_template_comfirmation_api4_new_business",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "AWRS application confirmation",
plainTemplate = txt.awrsNotificationConfirmationAPI4NewBusiness.f,
htmlTemplate = html.awrsNotificationConfirmationAPI4NewBusiness.f),
MessageTemplate.create(
templateId = "awrs_notification_template_comfirmation_api6",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "AWRS amendment confirmation",
plainTemplate = txt.awrsNotificationConfirmationAPI6.f,
htmlTemplate = html.awrsNotificationConfirmationAPI6.f),
MessageTemplate.create(
templateId = "awrs_notification_template_comfirmation_api6_pending",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "AWRS amendment confirmation",
plainTemplate = txt.awrsNotificationConfirmationAPI6Pending.f,
htmlTemplate = html.awrsNotificationConfirmationAPI6Pending.f),
MessageTemplate.create(
templateId = "awrs_notification_template_comfirmation_api6_new_business",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "AWRS amendment confirmation",
plainTemplate = txt.awrsNotificationConfirmationAPI6NewBusiness.f,
htmlTemplate = html.awrsNotificationConfirmationAPI6NewBusiness.f),
MessageTemplate.create(
templateId = "awrs_notification_template_comfirmation_api6_new_business_pending",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "AWRS amendment confirmation",
plainTemplate = txt.awrsNotificationConfirmationAPI6NewBusinessPending.f,
htmlTemplate = html.awrsNotificationConfirmationAPI6NewBusinessPending.f),
MessageTemplate.create(
templateId = "awrs_email_verification",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "Verify your email address",
plainTemplate = txt.awrsEmailVerification.f,
htmlTemplate = html.awrsEmailVerification.f,
priority = Some(MessagePriority.Urgent)),
MessageTemplate.create(
templateId = "awrs_notification_template_cancellation_api10",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "AWRS cancellation request",
plainTemplate = txt.awrsNotificationCancellationAPI10.f,
htmlTemplate = html.awrsNotificationCancellationAPI10.f),
MessageTemplate.create(
templateId = "awrs_notification_template_withdrawn_api8",
fromAddress = govUkTeamAddress,
service = AlcoholWholesalerRegistrationScheme,
subject = "AWRS withdrawal request",
plainTemplate = txt.awrsNotificationWithdrawalAPI8.f,
htmlTemplate = html.awrsNotificationWithdrawalAPI8.f)
)
}
|
saurabharora80/hmrc-email-renderer
|
app/uk/gov/hmrc/hmrcemailrenderer/templates/awrs/AwrsTemplates.scala
|
Scala
|
apache-2.0
| 5,022 |
object Versions {
val ScalaVer = "2.11.6"
val JDKVer = "1.7"
val TypesafeConfigVer = "1.2.1"
val ScalaTestVer = "2.1.7"
val ScalaCheckVer = "1.11.4"
val JunitVer = "4.10"
val AbideCoreVer = "0.1-SNAPSHOT"
val AbideExtraVer = "0.1-SNAPSHOT"
val WartremoverVer = "0.10"
val LogbackVer = "1.1.2"
}
|
codejitsu/flower
|
project/Versions.scala
|
Scala
|
apache-2.0
| 377 |
package org.jetbrains.plugins.scala.format
import com.intellij.psi.{PsiClass, PsiElement, PsiMethod}
import org.jetbrains.plugins.scala.extensions.{&&, ContainingClass, PsiClassExt, PsiReferenceEx}
import org.jetbrains.plugins.scala.lang.psi.api.base.ScLiteral
import org.jetbrains.plugins.scala.lang.psi.api.expr.{MethodInvocation, ScExpression}
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunction
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject
/**
* Pavel Fatin
*/
object PrintStringParser extends StringParser {
def parse(element: PsiElement): Option[Seq[StringPart]] = {
extractPrintCall(element).map(p => FormattedStringParser.parseFormatCall(p._1, p._2))
}
def extractPrintCall(element: PsiElement): Option[(ScLiteral, Seq[ScExpression])] = Some(element) collect {
// printf("%d", 1)
case MethodInvocation(PsiReferenceEx.resolve((f: ScFunction) &&
ContainingClass(owner: ScObject)), Seq(literal: ScLiteral, args @ _*))
if literal.isString && isPrintfMethod(owner.qualifiedName, f.name) =>
(literal, args)
// System.out.printf("%d", 1)
case MethodInvocation(PsiReferenceEx.resolve((f: PsiMethod) &&
ContainingClass(owner: PsiClass)), Seq(literal: ScLiteral, args @ _*))
if literal.isString && isPrintStreamPrintfMethod(owner.qualifiedName, f.getName) =>
(literal, args)
}
private def isPrintStreamPrintfMethod(holder: String, method: String) =
holder == "java.io.PrintStream" && method == "printf"
private def isPrintfMethod(holder: String, method: String) =
holder == "scala.Predef" && method == "printf"
}
|
ilinum/intellij-scala
|
src/org/jetbrains/plugins/scala/format/PrintStringParser.scala
|
Scala
|
apache-2.0
| 1,657 |
package org.photon.login
import com.typesafe.config.ConfigFactory
import java.io.File
import org.photon.common.components._
trait LoginServerComponent extends AnyRef
with ConfigurationComponent
with ServiceManagerComponent
with ExecutorComponentImpl
with DatabaseComponentImpl
with UserRepositoryComponentImpl
with UserAuthenticationComponentImpl
with NetworkComponentImpl
with RealmManagerComponentImpl
with HandlerComponentImpl
object LoginServer extends LoginServerComponent with BootableComponent {
lazy val config = sys.props.get("photon.config")
.map { file => ConfigFactory.parseFile(new File(file)) }
.getOrElse(ConfigFactory.empty())
.withFallback(ConfigFactory.load())
lazy val databaseUrl = config.getString("photon.database.url")
lazy val databaseDriver = config.getString("photon.database.driver")
def main(args: Array[String]) = boot()
}
|
Emudofus/Photon
|
login/main/src/org/photon/login/LoginServer.scala
|
Scala
|
mit
| 876 |
/*
Copyright (c) 2013-2016 Karol M. Stasiak
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package io.github.karols.units.internal
object SingleUnits {
import Bools._
import Strings._
import language.{higherKinds, implicitConversions}
/** Represents a defined unit raised to some non-zero power. */
trait TSingleUnit {
type Name <: TString
// type Get[V<:TSingleUnit] = If[V#Name === Name,
// P1,
// _0,
// Nat]
type Equals[That<:TSingleUnit] <: TBool
type LessEqualGreater[That<:TSingleUnit, Less<:Result, Equal<:Result, Greater<:Result, Result] <: Result
}
trait ASingleUnit[N<:TString] extends TSingleUnit {
type Name = N
type Equals[That<:TSingleUnit] = N === That#Name
type LessEqualGreater[That<:TSingleUnit, Less<:Result, Equal<:Result, Greater<:Result, Result] =
(N===That#Name)#DoIf[
Equal,
(N#LessOrEqual[That#Name])#DoIf2[
Less,
Greater,
Result],
Result]
}
type ====[X<:TSingleUnit,Y<:TSingleUnit] = X#Equals[Y]
}
|
KarolS/units
|
units/src/main/scala/io/github/karols/units/internal/SingleUnits.scala
|
Scala
|
mit
| 1,973 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import java.io.IOException
import scala.reflect.ClassTag
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark._
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.deploy.SparkHadoopUtil
private[spark] class CheckpointRDDPartition(val index: Int) extends Partition {}
/**
* This RDD represents a RDD checkpoint file (similar to HadoopRDD).
*/
private[spark]
class CheckpointRDD[T: ClassTag](sc: SparkContext, val checkpointPath: String)
extends RDD[T](sc, Nil) {
val broadcastedConf = sc.broadcast(new SerializableWritable(sc.hadoopConfiguration))
@transient val fs = new Path(checkpointPath).getFileSystem(sc.hadoopConfiguration)
override def getPartitions: Array[Partition] = {
val cpath = new Path(checkpointPath)
val numPartitions =
// listStatus can throw exception if path does not exist.
if (fs.exists(cpath)) {
val dirContents = fs.listStatus(cpath).map(_.getPath)
val partitionFiles = dirContents.filter(_.getName.startsWith("part-")).map(_.toString).sorted
val numPart = partitionFiles.size
if (numPart > 0 && (! partitionFiles(0).endsWith(CheckpointRDD.splitIdToFile(0)) ||
! partitionFiles(numPart-1).endsWith(CheckpointRDD.splitIdToFile(numPart-1)))) {
throw new SparkException("Invalid checkpoint directory: " + checkpointPath)
}
numPart
} else 0
Array.tabulate(numPartitions)(i => new CheckpointRDDPartition(i))
}
checkpointData = Some(new RDDCheckpointData[T](this))
checkpointData.get.cpFile = Some(checkpointPath)
override def getPreferredLocations(split: Partition): Seq[String] = {
val status = fs.getFileStatus(new Path(checkpointPath,
CheckpointRDD.splitIdToFile(split.index)))
val locations = fs.getFileBlockLocations(status, 0, status.getLen)
locations.headOption.toList.flatMap(_.getHosts).filter(_ != "localhost")
}
override def compute(split: Partition, context: TaskContext): Iterator[T] = {
val file = new Path(checkpointPath, CheckpointRDD.splitIdToFile(split.index))
CheckpointRDD.readFromFile(file, broadcastedConf, context)
}
override def checkpoint() {
// Do nothing. CheckpointRDD should not be checkpointed.
}
}
private[spark] object CheckpointRDD extends Logging {
def splitIdToFile(splitId: Int): String = {
"part-%05d".format(splitId)
}
def writeToFile[T: ClassTag](
path: String,
broadcastedConf: Broadcast[SerializableWritable[Configuration]],
blockSize: Int = -1
)(ctx: TaskContext, iterator: Iterator[T]) {
val env = SparkEnv.get
val outputDir = new Path(path)
val fs = outputDir.getFileSystem(broadcastedConf.value.value)
val finalOutputName = splitIdToFile(ctx.partitionId)
val finalOutputPath = new Path(outputDir, finalOutputName)
val tempOutputPath =
new Path(outputDir, "." + finalOutputName + "-attempt-" + ctx.attemptNumber)
if (fs.exists(tempOutputPath)) {
throw new IOException("Checkpoint failed: temporary path " +
tempOutputPath + " already exists")
}
val bufferSize = env.conf.getInt("spark.buffer.size", 65536)
val fileOutputStream = if (blockSize < 0) {
fs.create(tempOutputPath, false, bufferSize)
} else {
// This is mainly for testing purpose
fs.create(tempOutputPath, false, bufferSize, fs.getDefaultReplication, blockSize)
}
val serializer = env.serializer.newInstance()
val serializeStream = serializer.serializeStream(fileOutputStream)
serializeStream.writeAll(iterator)
serializeStream.close()
if (!fs.rename(tempOutputPath, finalOutputPath)) {
if (!fs.exists(finalOutputPath)) {
logInfo("Deleting tempOutputPath " + tempOutputPath)
fs.delete(tempOutputPath, false)
throw new IOException("Checkpoint failed: failed to save output of task: "
+ ctx.attemptNumber + " and final output path does not exist")
} else {
// Some other copy of this task must've finished before us and renamed it
logInfo("Final output path " + finalOutputPath + " already exists; not overwriting it")
fs.delete(tempOutputPath, false)
}
}
}
def readFromFile[T](
path: Path,
broadcastedConf: Broadcast[SerializableWritable[Configuration]],
context: TaskContext
): Iterator[T] = {
val env = SparkEnv.get
val fs = path.getFileSystem(broadcastedConf.value.value)
val bufferSize = env.conf.getInt("spark.buffer.size", 65536)
val fileInputStream = fs.open(path, bufferSize)
val serializer = env.serializer.newInstance()
val deserializeStream = serializer.deserializeStream(fileInputStream)
// Register an on-task-completion callback to close the input stream.
context.addTaskCompletionListener(context => deserializeStream.close())
deserializeStream.asIterator.asInstanceOf[Iterator[T]]
}
// Test whether CheckpointRDD generate expected number of partitions despite
// each split file having multiple blocks. This needs to be run on a
// cluster (mesos or standalone) using HDFS.
def main(args: Array[String]) {
import org.apache.spark._
val Array(cluster, hdfsPath) = args
val env = SparkEnv.get
val sc = new SparkContext(cluster, "CheckpointRDD Test")
val rdd = sc.makeRDD(1 to 10, 10).flatMap(x => 1 to 10000)
val path = new Path(hdfsPath, "temp")
val conf = SparkHadoopUtil.get.newConfiguration(new SparkConf())
val fs = path.getFileSystem(conf)
val broadcastedConf = sc.broadcast(new SerializableWritable(conf))
sc.runJob(rdd, CheckpointRDD.writeToFile[Int](path.toString, broadcastedConf, 1024) _)
val cpRDD = new CheckpointRDD[Int](sc, path.toString)
assert(cpRDD.partitions.length == rdd.partitions.length, "Number of partitions is not the same")
assert(cpRDD.collect.toList == rdd.collect.toList, "Data of partitions not the same")
fs.delete(path, true)
}
}
|
Dax1n/spark-core
|
core/src/main/scala/org/apache/spark/rdd/CheckpointRDD.scala
|
Scala
|
apache-2.0
| 6,806 |
package com.sksamuel.elastic4s
import com.sksamuel.elastic4s.ElasticDate.{ElasticDateMathShow, TimestampElasticDateShow, UnparsedElasticDateShow}
import com.sksamuel.elastic4s.ext.OptionImplicits._
import java.time.LocalDate
import java.time.format.DateTimeFormatter
import scala.language.implicitConversions
abstract class TimeUnit(val symbol: String)
case object Years extends TimeUnit("y")
case object Months extends TimeUnit("M")
case object Weeks extends TimeUnit("w")
case object Days extends TimeUnit("d")
case object Hours extends TimeUnit("h")
case object Minutes extends TimeUnit("m")
case object Seconds extends TimeUnit("s")
case class Adjustment(value: Int, unit: TimeUnit)
/**
* Implementations of this trait are used to represent dates and date math
* in elasticsearch requests.
*
* see https://www.elastic.co/guide/en/elasticsearch/reference/current/common-options.html#date-math
*
* There are three types of elastic date you can create.
*
* The first is just a wrapped string with no extra help: UnparsedElasticDate("mydate||/d")
*
* The second is a wrapped timestamp: TimestampElasticDate(1113232321L)
*
* The third and most useful is the ElasticDateMath which allows you to programatically add
* or subtract values, as well as add a rounding, and it will create the appropriate date string for you.
* For example, ElasticDate.now.minus(3, Months).add(1, Days).rounding(Weeks)
*/
trait ElasticDate {
def show: String
}
case class ElasticDateMath(base: String, adjustments: Seq[Adjustment] = Nil, rounding: Option[TimeUnit] = None)
extends ElasticDate {
override def show: String = ElasticDateMathShow.show(this)
def add(value: Int, unit: TimeUnit): ElasticDateMath = copy(adjustments = adjustments :+ Adjustment(value, unit))
def minus(value: Int, unit: TimeUnit): ElasticDateMath = subtract(value, unit)
def subtract(value: Int, unit: TimeUnit): ElasticDateMath = add(-value, unit)
def rounding(unit: TimeUnit): ElasticDateMath = copy(rounding = unit.some)
}
case class UnparsedElasticDate(value: String) extends ElasticDate {
override def show: String = UnparsedElasticDateShow.show(this)
}
case class TimestampElasticDate(timestamp: Long) extends ElasticDate {
override def show: String = TimestampElasticDateShow.show(this)
}
object ElasticDate {
implicit object UnparsedElasticDateShow extends Show[UnparsedElasticDate] {
override def show(t: UnparsedElasticDate): String = t.value
}
implicit object TimestampElasticDateShow extends Show[TimestampElasticDate] {
override def show(t: TimestampElasticDate): String = t.timestamp.toString
}
implicit object ElasticDateMathShow extends Show[ElasticDateMath] {
override def show(t: ElasticDateMath): String =
(t.base match {
case "now" => "now"
case date => s"$date||"
}) + t.adjustments
.map { adj =>
val plus = if (adj.value < 0) "" else "+"
s"$plus${adj.value}${adj.unit.symbol}"
}
.mkString("") + t.rounding.fold("")(unit => s"/${unit.symbol}")
}
implicit def fromTimestamp(timestamp: Long): TimestampElasticDate = TimestampElasticDate(timestamp)
implicit def stringToDate(str: String): ElasticDate = UnparsedElasticDate(str)
def apply(str: String): ElasticDateMath = ElasticDateMath(str)
def now: ElasticDateMath = ElasticDateMath("now")
def apply(date: LocalDate): ElasticDateMath = ElasticDateMath(date.format(DateTimeFormatter.ISO_LOCAL_DATE))
}
|
sksamuel/elastic4s
|
elastic4s-domain/src/main/scala/com/sksamuel/elastic4s/ElasticDate.scala
|
Scala
|
apache-2.0
| 3,592 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.commons.shared.unstable.util
import java.nio.file.Paths
import io.gatling.commons.shared.unstable.util.PathHelper._
import org.scalatest.flatspec.AnyFlatSpecLike
import org.scalatest.matchers.should.Matchers
class PathHelperSpec extends AnyFlatSpecLike with Matchers {
private val root = Paths.get("foo")
"ancestor" should "throw an IllegalArgumentException when ancestor rank is negative" in {
an[IllegalArgumentException] should be thrownBy root.ancestor(-1)
}
it should "throw an IllegalArgumentException when asked rank > nb of parents" in {
an[IllegalArgumentException] should be thrownBy (root / "bar").ancestor(3)
}
it should "get the parent of rank n otherwise" in {
(root / "foo" / "bar").ancestor(1) shouldBe (root / "foo")
}
"extension" should "return an empty String when the specified path has no extension" in {
root.extension shouldBe ""
}
it should "return the file extension if the specified path has one" in {
(root / "foo.json").extension shouldBe "json"
}
"hasExtension" should "return true if the file has one of the specified extension, ignoring case" in {
(root / "foo.json").hasExtension("json") shouldBe true
(root / "foo.json").hasExtension("JSON") shouldBe true
}
it should "return false if the file has none of the specified extensions" in {
(root / "foo.json").hasExtension("sql") shouldBe false
}
"stripExtension" should "not modify the path if it has no extension" in {
root.stripExtension shouldBe "foo"
}
it should "remove the file extension if the specified path has one" in {
Paths.get("foo.json").stripExtension shouldBe "foo"
}
}
|
gatling/gatling
|
gatling-commons-shared-unstable/src/test/scala/io/gatling/commons/shared/unstable/util/PathHelperSpec.scala
|
Scala
|
apache-2.0
| 2,297 |
package com.fuscus.seien.infra.core
import play.api.Play.current
import play.api.libs.concurrent.Akka
import scala.concurrent.ExecutionContext
/**
* Custom context implicit val list.
*
* Created by watawuwu on 2014/09/11.
*/
object AppContext {
implicit val jobExecutionContext: ExecutionContext = Akka.system.dispatchers.lookup("context.job")
implicit val expensiveOnlineExecutionContext: ExecutionContext = Akka.system.dispatchers.lookup("context.expensive-online")
}
|
watawuwu/seien-backend
|
modules/infra/app/com/fuscus/seien/infra/core/AppContext.scala
|
Scala
|
mit
| 481 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.