code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package com.edropple.velvetrope.user import roles.Role /** * Goes beyond the basic RoleOwner to define a consistent interface for * adding and removing roles. This is separate from RoleOwner in order to * allow for "User" and "MutableUser" being separate concepts (a pattern * I personally prefer). * * @author eropple * @since initial release */ trait MutableRoleOwner extends RoleOwner { /** * Adds a role to this user. * @param role The role to add to this user. */ def addRole(role: Role) /** * Removes a role from this user. * @param role The role to remove from this user. * @return True if a role was actually removed from the user, false otherwise. */ def removeRole(role: Role): Boolean }
eropple/velvetrope
src/main/scala/com/edropple/velvetrope/user/MutableRoleOwner.scala
Scala
mit
762
package net.opentsdb.kafka.consumer.modules import com.google.inject.{Singleton, Provides, AbstractModule} import net.codingwell.scalaguice.ScalaModule import java.util.Properties import com.google.inject.name.{Named, Names} import java.util.concurrent.{LinkedBlockingQueue, ArrayBlockingQueue} import org.hbase.async.HBaseClient import net.opentsdb.core.TSDB class ConsumerModule(props: Properties) extends AbstractModule with ScalaModule { def configure() { Names.bindProperties(binder(), props) } @Provides @Singleton def provideProperties: Properties = props @Provides @Singleton def provideHBaseClient(@Named("hbase.zookeeper") zk: String): HBaseClient = { val client = new HBaseClient(zk) client.setFlushInterval(500) client } @Provides @Singleton def provideTsdbClient(@Named("tsdb.table") tsdbTable: String, @Named("tsdb.uidtable") uidTable: String, client: HBaseClient): TSDB = new TSDB(client, tsdbTable, uidTable) }
nikore/opentsdb-contrib
kafka-consumer/src/main/scala/net/opentsdb/kafka/consumer/modules/ConsumerModule.scala
Scala
apache-2.0
966
/** * Copyright (c) 2002-2012 "Neo Technology," * Network Engine for Objects in Lund AB [http://neotechnology.com] * * This file is part of Neo4j. * * Neo4j is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.neo4j.cypher.internal.executionplan.builders import org.neo4j.cypher.internal.executionplan.PartiallySolvedQuery import org.junit.Test import org.junit.Assert._ import org.neo4j.cypher.internal.mutation.DeleteEntityAction import org.neo4j.cypher.internal.commands.expressions.Identifier class DeleteAndPropertySetBuilderTest extends BuilderTest { val builder = new UpdateActionBuilder(null) @Test def does_not_offer_to_solve_done_queries() { val q = PartiallySolvedQuery(). copy(updates = Seq(Solved(DeleteEntityAction(Identifier("x"))))) assertFalse("Should not be able to build on this", builder.canWorkWith(plan(q))) } @Test def offers_to_solve_queries() { val q = PartiallySolvedQuery(). copy(updates = Seq(Unsolved(DeleteEntityAction(Identifier("x"))))) val pipe = createPipe(nodes = Seq("x")) val executionPlan = plan(pipe, q) assertTrue("Should accept this", builder.canWorkWith(executionPlan)) val resultPlan = builder(executionPlan) val resultQ = resultPlan.query assert(resultQ === q.copy(updates = q.updates.map(_.solve))) assertTrue("Execution plan should contain transaction", resultPlan.containsTransaction) } @Test def does_not_offer_to_delete_something_not_yet_there() { val q = PartiallySolvedQuery(). copy(updates = Seq(Unsolved(DeleteEntityAction(Identifier("x"))))) val executionPlan = plan(q) assertFalse("Should not accept this", builder.canWorkWith(executionPlan)) } }
dksaputra/community
cypher/src/test/scala/org/neo4j/cypher/internal/executionplan/builders/DeleteAndPropertySetBuilderTest.scala
Scala
gpl-3.0
2,301
/* * Copyright (C) 2016 Typesafe Inc. <http://www.typesafe.com> */ package akka.persistence.cassandra.journal import scala.concurrent._ import java.lang.{ Long => JLong } import akka.actor.ActorLogging import akka.persistence.PersistentRepr import akka.stream.ActorMaterializer import akka.persistence.cassandra.listenableFutureToFuture trait CassandraRecovery extends ActorLogging { this: CassandraJournal => import config._ import context.dispatcher override def asyncReplayMessages( persistenceId: String, fromSequenceNr: Long, toSequenceNr: Long, max: Long )(replayCallback: (PersistentRepr) => Unit): Future[Unit] = queries .eventsByPersistenceId( persistenceId, fromSequenceNr, toSequenceNr, max, replayMaxResultSize, None, "asyncReplayMessages", someReadConsistency, someReadRetryPolicy ) .runForeach(replayCallback) .map(_ => ()) override def asyncReadHighestSequenceNr(persistenceId: String, fromSequenceNr: Long): Future[Long] = asyncHighestDeletedSequenceNumber(persistenceId).flatMap { h => asyncFindHighestSequenceNr(persistenceId, math.max(fromSequenceNr, h)) } private def asyncFindHighestSequenceNr(persistenceId: String, fromSequenceNr: Long): Future[Long] = { def find(currentPnr: Long, currentSnr: Long): Future[Long] = { // if every message has been deleted and thus no sequence_nr the driver gives us back 0 for "null" :( val boundSelectHighestSequenceNr = preparedSelectHighestSequenceNr.map(_.bind(persistenceId, currentPnr: JLong)) boundSelectHighestSequenceNr.flatMap(session.selectResultSet) .map { rs => Option(rs.one()).map { row => (row.getBool("used"), row.getLong("sequence_nr")) } } .flatMap { // never been to this partition case None => Future.successful(currentSnr) // don't currently explicitly set false case Some((false, _)) => Future.successful(currentSnr) // everything deleted in this partition, move to the next case Some((true, 0)) => find(currentPnr + 1, currentSnr) case Some((_, nextHighest)) => find(currentPnr + 1, nextHighest) } } find(partitionNr(fromSequenceNr), fromSequenceNr) } def asyncHighestDeletedSequenceNumber(persistenceId: String): Future[Long] = { val boundSelectDeletedTo = preparedSelectDeletedTo.map(_.bind(persistenceId)) boundSelectDeletedTo.flatMap(session.selectResultSet) .map(r => Option(r.one()).map(_.getLong("deleted_to")).getOrElse(0)) } }
kpbochenek/akka-persistence-cassandra
src/main/scala/akka/persistence/cassandra/journal/CassandraRecovery.scala
Scala
apache-2.0
2,699
package org.jetbrains.plugins.scala package base import java.io.File import com.intellij.openapi.module.Module import com.intellij.openapi.project.Project import com.intellij.openapi.projectRoots.{JavaSdk, Sdk} import com.intellij.openapi.roots._ import com.intellij.openapi.roots.libraries.Library import com.intellij.openapi.util.io.FileUtil import com.intellij.openapi.vfs.impl.VirtualFilePointerManagerImpl import com.intellij.openapi.vfs.newvfs.impl.VfsRootAccess import com.intellij.openapi.vfs.pointers.VirtualFilePointerManager import com.intellij.openapi.vfs.{JarFileSystem, LocalFileSystem, VfsUtil, VirtualFile} import com.intellij.testFramework.PsiTestUtil import org.jetbrains.plugins.scala.extensions._ import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.SyntheticClasses import org.jetbrains.plugins.scala.project._ import org.jetbrains.plugins.scala.project.template.Artifact import org.jetbrains.plugins.scala.util.TestUtils import org.jetbrains.plugins.scala.util.TestUtils.ScalaSdkVersion import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer /** * Nikolay.Tropin * 5/29/13 */ class ScalaLibraryLoader(project: Project, module: Module, rootPath: String, isIncludeScalazLibrary: Boolean = false, isIncludeReflectLibrary: Boolean = false, isIncludeSprayLibrary: Boolean = false, javaSdk: Option[Sdk] = None) { private val addedLibraries = ArrayBuffer[Library]() def loadScala(libVersion: TestUtils.ScalaSdkVersion) { initScalaComponents() addSyntheticClasses() VfsRootAccess.allowRootAccess(TestUtils.getTestDataPath) if (rootPath != null) { FileUtil.createIfDoesntExist(new File(rootPath)) val testDataRoot: VirtualFile = LocalFileSystem.getInstance.refreshAndFindFileByPath(rootPath) assert(testDataRoot != null) PsiTestUtil.addSourceRoot(module, testDataRoot) } addScalaSdk(module, libVersion, isIncludeReflectLibrary) if (isIncludeScalazLibrary) addLibrary(module, "scalaz", TestUtils.getMockScalazLib(libVersion)) if (isIncludeSprayLibrary) addLibrary(module, "spray", TestUtils.getMockSprayLib(libVersion)) javaSdk.foreach { sdk => val rootModel = ModuleRootManager.getInstance(module).getModifiableModel rootModel.setSdk(sdk) inWriteAction(rootModel.commit()) } } def initScalaComponents(): Unit = { ScalaLoader.loadScala() } def addSyntheticClasses(): Unit = { val syntheticClasses: SyntheticClasses = project.getComponent(classOf[SyntheticClasses]) if (!syntheticClasses.isClassesRegistered) { syntheticClasses.registerClasses() } } def clean() { if (rootPath != null) { val testDataRoot: VirtualFile = LocalFileSystem.getInstance.refreshAndFindFileByPath(rootPath) PsiTestUtil.removeSourceRoot(module, testDataRoot) } inWriteAction { addedLibraries.foreach(module.detach) } } def addScalaSdk(module: Module, sdkVersion: ScalaSdkVersion, loadReflect: Boolean) = { val compilerPath = TestUtils.getScalaCompilerPath(sdkVersion) val libraryPath = TestUtils.getScalaLibraryPath(sdkVersion) val reflectPath = TestUtils.getScalaReflectPath(sdkVersion) val scalaSdkJars = Seq(libraryPath, compilerPath) ++ (if (loadReflect) Seq(reflectPath) else Seq.empty) val classRoots = scalaSdkJars.map(path => JarFileSystem.getInstance.refreshAndFindFileByPath(path + "!/")).asJava val scalaLibrarySrc = TestUtils.getScalaLibrarySrc(sdkVersion) val srcsRoots = Seq(JarFileSystem.getInstance.refreshAndFindFileByPath(scalaLibrarySrc + "!/")).asJava val scalaSdkLib = PsiTestUtil.addProjectLibrary(module, "scala-sdk", classRoots, srcsRoots) val languageLevel = Artifact.ScalaCompiler.versionOf(new File(compilerPath)) .flatMap(ScalaLanguageLevel.from).getOrElse(ScalaLanguageLevel.Default) inWriteAction { scalaSdkLib.convertToScalaSdkWith(languageLevel, scalaSdkJars.map(new File(_))) module.attach(scalaSdkLib) addedLibraries += scalaSdkLib } VirtualFilePointerManager.getInstance.asInstanceOf[VirtualFilePointerManagerImpl].storePointers() } private def addLibrary(module: Module, libraryName: String, mockLib: String): Unit = { if (module.libraries.exists(_.getName == libraryName)) return VfsRootAccess.allowRootAccess(mockLib) val rootModel = ModuleRootManager.getInstance(module).getModifiableModel val libraryTable = rootModel.getModuleLibraryTable val library = libraryTable.createLibrary(libraryName) val libModel = library.getModifiableModel val libRoot: File = new File(mockLib) assert(libRoot.exists) libModel.addRoot(VfsUtil.getUrlForLibraryRoot(libRoot), OrderRootType.CLASSES) inWriteAction { libModel.commit() rootModel.commit() } VirtualFilePointerManager.getInstance.asInstanceOf[VirtualFilePointerManagerImpl].storePointers() } } object ScalaLibraryLoader { def getSdkNone: Option[Sdk] = None def withMockJdk(project: Project, module: Module, rootPath: String, isIncludeScalazLibrary: Boolean = false, isIncludeReflectLibrary: Boolean = false, isIncludeSprayLibrary: Boolean = false): ScalaLibraryLoader = { val mockJdk = TestUtils.getDefaultJdk VfsRootAccess.allowRootAccess(mockJdk) val javaSdk = Some(JavaSdk.getInstance.createJdk("java sdk", mockJdk, false)) new ScalaLibraryLoader(project, module, rootPath, isIncludeScalazLibrary, isIncludeReflectLibrary, isIncludeSprayLibrary, javaSdk) } }
advancedxy/intellij-scala
test/org/jetbrains/plugins/scala/base/ScalaLibraryLoader.scala
Scala
apache-2.0
5,615
/** This file is part of Nexus, which is Copyright 2012 Johannes Åman Pohjola. * * Nexus is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, version 3. * * Nexus is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Nexus. If not, see <http://www.gnu.org/licenses/>. */ package nexus /** A SemiMove represents a move, with the information about said move that is available * to the player in the moment he/she commits to the move. */ sealed abstract class SemiMove { /** Get the squares upon which new tiles are placed by this move. * @param b board on which the move is played. * @return the squares upon which new tiles are placed by this move. */ def touches(b:Board) : List[Square] /** Check if move is played on a start tile. * @param b board on which the move is played. * @return true iff this move is played on a start square. */ def onStart(b:Board) : Boolean = (this touches b) exists {_.start} /** This move including a randomly selected tile draw which may result from it. * @param b board on which the move is played. * @return the move. */ def toMove(g:Game) : Move def score(b:Board,l:Language,rs:Int) : Int = 0 } case object SPass extends SemiMove { override def touches(b:Board) : List[Square] = Nil override def toMove(g:Game) : Move = Move(SPass,g.racks(g.currentPlayer),Nil) } case class SSwap(tiles: List[DrawTile]) extends SemiMove { override def touches(b:Board) : List[Square] = Nil override def toMove(g:Game) : Move = Move(this, (Multiset.fromTraversable(g.racks(g.currentPlayer)) -- tiles).get.toList, g.bag.random(tiles length)._1 ) } case class SPlay(horizontal : Boolean, coordinate : (Int,Int), tiles : List[Tile]) extends SemiMove { override def touches(b:Board) : List[Square] = { val line = { if(horizontal) b.horizontalLine(coordinate._2).drop(coordinate._1) else b.verticalLine(coordinate._1).drop(coordinate._2)} line.filter{_._2.isEmpty}.take(tiles length).map{_._1} } private def touchesTilesAux(b:Board) : List[Tile] = { b tile coordinate match { case None => if(tiles isEmpty) Nil else SPlay(horizontal, if(horizontal) (coordinate._1 + 1, coordinate._2) else (coordinate._1, coordinate._2 + 1), tiles tail) touchesTilesAux b case Some(t) => t::(SPlay(horizontal, if(horizontal) (coordinate._1 + 1, coordinate._2) else (coordinate._1, coordinate._2 + 1), tiles) touchesTilesAux b) } } def touchesTiles(b:Board) : List[Tile] = { val previousCoord = { if(horizontal) (coordinate._1 - 1, coordinate._2) else (coordinate._1, coordinate._2 -1)} if((b tile previousCoord) isDefined) SPlay(horizontal,previousCoord, tiles) touchesTiles b else this touchesTilesAux b } def touchesCoords(b:Board) : List[(Int,Int)] = { (tiles,b square coordinate, b tile coordinate) match { case (Nil,_,_) => Nil case (_,None,_) => Nil case (_::r,Some(_),None) => coordinate :: (SPlay(horizontal, if(horizontal) (coordinate._1 +1,coordinate._2) else (coordinate._1, coordinate._2 + 1), r) touchesCoords b) case (l,_,Some(_)) => SPlay(horizontal, if(horizontal) (coordinate._1 +1,coordinate._2) else (coordinate._1, coordinate._2 + 1), l) touchesCoords b } } override def toMove(g:Game) : Move = Move(this, (Multiset.fromTraversable(g.racks(g.currentPlayer)) -- (tiles map (_ toDrawTile))).get.toList, g.bag.random(tiles length)._1 ) private def hull(b:Board) = { if(horizontal) { b.horizontalLine(coordinate._2).drop(coordinate._1 - 1).take((tiles length) + (2 min (coordinate._1 +1))) ++ b.horizontalLine(coordinate._2 + 1).drop(coordinate._1).take(tiles length) ++ b.horizontalLine(coordinate._2 - 1).drop(coordinate._1).take(tiles length) } else { b.verticalLine(coordinate._1).drop(coordinate._2 - 1).take((tiles length) + (2 min (coordinate._2 + 1))) ++ b.verticalLine(coordinate._1 + 1).drop(coordinate._2).take(tiles length) ++ b.verticalLine(coordinate._1 - 1).drop(coordinate._2).take(tiles length) } } /** Check if move connects to already played tiles. * @param b board on which the move is played. * @return true iff this move is connected to already played tiles. */ def hooks(b:Board) : Boolean = { hull(b) exists {_._2 isDefined} } def valid(b:Board) = { if(hooks(b)) (touches(b).length == tiles.length) && b.tile(coordinate).isEmpty else onStart(b) && (tiles.length > 1) && (touches(b).length == tiles.length) && b.tile(coordinate).isEmpty } override def score(b:Board,l:Language,rs:Int) : Int = { val hookscores = (tiles zip (this touchesCoords b)).foldRight(0){ case ((t,c),n) => SPlay(!horizontal,c,t::Nil) touchesTiles b match { case Nil => n case tiles2 => { val oldtilescore = tiles2.foldRight(0){case (t,n) => n + (l.tilevalue(t))} val newtilescore = b.square(c).get.tilebonus * l.tilevalue(t) val multiplier = b.square(c).get.wordbonus n + ((oldtilescore+newtilescore)*multiplier) } } } val ttiles = touchesTiles(b) if((tiles.length > 1) || !(touchesTiles(b).isEmpty)) { val tsquares = touches(b) val newtilescore = (tiles zip tsquares).foldRight(0){case ((t,s),n) => n + (s.tilebonus * l.tilevalue(t))} val oldtilescore = ttiles.foldRight(0){case (t,n) => n + (l tilevalue t)} val multiplier = tsquares.foldRight(1){case (s,n) => n * s.wordbonus} val bingo = if(tiles.size == rs) 50 else 0 ((newtilescore+oldtilescore)*multiplier) + bingo + hookscores } else hookscores } } object SemiMove { def swap(tiles: List[DrawTile]) = SSwap(tiles sortWith (_ < _)) }
Sen045/nexus
src/nexus/semimove.scala
Scala
gpl-3.0
6,272
/* sbt -- Simple Build Tool * Copyright 2009 Mark Harrah */ package xsbt import java.io.{ BufferedReader, BufferedWriter, InputStreamReader, OutputStreamWriter } import java.net.{ InetAddress, ServerSocket, Socket } import scala.util.control.NonFatal object IPC { private val portMin = 1025 private val portMax = 65536 private val loopback = InetAddress.getByName(null) // loopback def client[T](port: Int)(f: IPC => T): T = ipc(new Socket(loopback, port))(f) def pullServer[T](f: Server => T): T = { val server = makeServer try { f(new Server(server)) } finally { server.close() } } def unmanagedServer: Server = new Server(makeServer) def makeServer: ServerSocket = { val random = new java.util.Random def nextPort = random.nextInt(portMax - portMin + 1) + portMin def createServer(attempts: Int): ServerSocket = if (attempts > 0) try { new ServerSocket(nextPort, 1, loopback) } catch { case NonFatal(e) => createServer(attempts - 1) } else sys.error("Could not connect to socket: maximum attempts exceeded") createServer(10) } def server[T](f: IPC => Option[T]): T = serverImpl(makeServer, f) def server[T](port: Int)(f: IPC => Option[T]): T = serverImpl(new ServerSocket(port, 1, loopback), f) private def serverImpl[T](server: ServerSocket, f: IPC => Option[T]): T = { def listen(): T = { ipc(server.accept())(f) match { case Some(done) => done case None => listen() } } try { listen() } finally { server.close() } } private def ipc[T](s: Socket)(f: IPC => T): T = try { f(new IPC(s)) } finally { s.close() } final class Server private[IPC] (s: ServerSocket) { def port = s.getLocalPort def close() = s.close() def isClosed: Boolean = s.isClosed def connection[T](f: IPC => T): T = IPC.ipc(s.accept())(f) } } final class IPC private (s: Socket) { def port = s.getLocalPort private val in = new BufferedReader(new InputStreamReader(s.getInputStream)) private val out = new BufferedWriter(new OutputStreamWriter(s.getOutputStream)) def send(s: String) = { out.write(s); out.newLine(); out.flush() } def receive: String = in.readLine() }
Duhemm/sbt
main-command/src/main/scala/xsbt/IPC.scala
Scala
bsd-3-clause
2,230
package com.github.tototoshi.play2.auth.test import play.api.test._ import play.api.mvc.Cookie import com.github.tototoshi.play2.auth.AuthConfig import play.api.libs.Crypto import scala.concurrent.Await import scala.concurrent.duration._ import scala.concurrent.ExecutionContext.Implicits.global trait Helpers { implicit class AuthFakeRequest[A](fakeRequest: FakeRequest[A]) { def withLoggedIn(implicit config: AuthConfig): config.Id => FakeRequest[A] = { id => val token = Await.result(config.idContainer.startNewSession(id, config.sessionTimeoutInSeconds)(fakeRequest, global), 10.seconds) fakeRequest.withHeaders("PLAY2_AUTH_TEST_TOKEN" -> token) } } } object Helpers extends Helpers
tototoshi/play2-auth
test/src/main/scala/com/github/tototoshi/play2/auth/test/Helpers.scala
Scala
apache-2.0
717
package net.flatmap.cobra import akka.actor.{Actor, ActorLogging, ActorRef, Props, Terminated} import net.flatmap.collaboration.{Annotations, Document, Server} import scala.util.{Failure, Success} object SnippetServer { def props(env: Map[String,String]) = Props(classOf[SnippetServer],env) val services: Map[Mode,LanguageService] = Map.empty } /** * Created by martin on 10.05.16. */ class SnippetServer(env: Map[String,String]) extends Actor with ActorLogging { def receive = { case InitDoc(id,content,mode) => val service = SnippetServer.services.get(mode).map { ls => val src = context.actorOf(ls.props(env),mode.name) src ! ResetSnippet(id,content,0) src } context.become(initialized(mode, new Server(Document(content)), Set(sender) ++ service,Map.empty)) context.watch(sender) } def initialized(mode: Mode, server: Server[Char], listeners: Set[ActorRef], annotations: Map[String,Annotations]): Receive = { case Terminated(listener) => log.debug("client disconnected") context.become(initialized(mode,server,listeners - listener,annotations)) case InitDoc(id,content,mode) => log.debug("client connected") context.watch(sender) context.become(initialized(mode,server,listeners + sender,annotations)) if (server.revision > 0) { sender ! CombinedRemoteEdit(id, server.getCombinedHistory, server.revision) annotations.foreach { case (aid,as) => sender ! RemoteAnnotations(id,aid,as) } } case Annotate(id,aid,as,rev) => context.become(initialized(mode,server,listeners,annotations + (aid -> as))) (listeners - sender).foreach(_ ! RemoteAnnotations(id,aid,as)) case Edit(id,op,rev) => log.debug("applying edit") server.applyOperation(op,rev) match { case Success(op) => context.become(initialized(mode,server,listeners,annotations.map { case (aid,as) => aid -> server.transformAnnotation(server.revision - 1, as).get })) sender ! AcknowledgeEdit(id) (listeners - sender).foreach(_ ! RemoteEdit(id,op)) case Failure(e) => log.error(e,"could not apply operation") sender ! ResetSnippet(id,server.text.mkString,server.revision) } case msg: SnippetMessage => (listeners - sender).foreach(_ ! msg) case msg: RequestInfo => (listeners - sender).foreach(_ ! msg) case msg: Information => (listeners - sender).foreach(_ ! msg) } }
flatmap/cobra
modules/cobra-server/src/main/scala/net/flatmap/cobra/SnippetServer.scala
Scala
lgpl-3.0
2,537
package masterleague4s package net import io.circe.Decoder import fs2._ import data._ import matryoshka.data.Fix import shapeless.tag.@@ import spinoco.fs2.http._ import spinoco.protocol.http.Uri //import spinoco.protocol.http.header.Authorization import data.Serialized._ import codec.CirceSupport._ import codec.FDecoders._ import fs2.util.Catchable import cats.implicits._ import authorization.Token //import spinoco.protocol.http.header.value.HttpCredentials.OAuth2BearerToken object UnfoldApiResult { type StreamRunnable[F[_], A] = ClientRunnable[F, Stream[F, A]] type RunnableApiStream[F[_], U, A] = StreamRunnable[F, APIResultF[A, U]] type RunnableResult[F[_], A] = Fix[({ type l[a] = RunnableApiStream[F, a, A] })#l] def unfoldApiResult[F[_]: Catchable, A: Decoder](uri: Uri @@ A, sleep: Stream[F, Unit], token: Option[Token]): RunnableResult[F, A] = { implicit val bodyDecoder = circeDecoder[UriApiResult[A]](decodeAPICall) import spinoco.protocol.http.header.value.ContentType import spinoco.protocol.http.header.value.MediaType def urimap(uriresult: UriApiResult[A]): APIResultF[A, RunnableResult[F, A]] = uriresult.bimap(id => id, uri => unfoldApiResult(uri, sleep, token)) def streammap(str: Stream[F, UriApiResult[A]]): Stream[F, APIResultF[A, RunnableResult[F, A]]] = str.map(urimap) val r: HttpRequest[F] = token.foldLeft(HttpRequest.get[F](uri))((req, tok) => req.appendHeader(authorization.Auth.authheader(tok))) val one: HttpClient[F] => Stream[F, UriApiResult[A]] = (client: HttpClient[F]) => for { response <- (sleep >> client.request(r)) status = response.header.status body <- { if (status.isSuccess) Stream.eval(response.bodyAs[UriApiResult[A]]).map(_.require) else { val textbody = Stream.eval(response.withContentType(ContentType(MediaType.`text/plain`, None, None)).bodyAsString) textbody.map(body => { val error = s"Unexpected network response: status ${status.code} - ${status.longDescription} CONTENTS: $body" throw new Exception(error) }) } } } yield body val lifted = ClientRunnable.lift(one) type Unfix[X] = ClientRunnable[F, Stream[F, APIResultF[A, X]]] Fix[Unfix](ClientRunnable.instances.map(lifted)(streammap)) } def linearizeApiResult[F[_]: Catchable, A: Decoder]( uri: Uri @@ A, sleep: Stream[F, Unit], token: Option[Token]): ClientRunnable[F, Stream[F, APIResultF[A, Unit]]] = { val init = unfoldApiResult(uri, sleep, token) val run = (client: HttpClient[F]) => { def rec(fix: RunnableResult[F, A]): Stream[F, APIResultF[A, Unit]] = { val runnable = fix.unFix val pages = runnable.run(client) val these: Stream[F, APIResultF[A, Unit]] = pages.map(_.bimap(id => id, _ => ())) //'t was nice knowing you, stack (TODO: Stack-safety(?)) val those: Stream[F, APIResultF[A, Unit]] = pages.flatMap(page => page.next match { case None => Stream.empty case Some(n) => rec(n) }) these ++ those } rec(init) } ClientRunnable.lift(run) } def singlePage[F[_]: Catchable, A: Decoder](uri: Uri @@ A, sleep: Stream[F, Unit], token: Option[Token]): ClientRunnable[F, Stream[F, List[A]]] = { implicit val bodyDecoder = circeDecoder[List[A]] import spinoco.protocol.http.header.value.ContentType import spinoco.protocol.http.header.value.MediaType val r: HttpRequest[F] = token.foldLeft(HttpRequest.get[F](uri))((req, tok) => req.appendHeader(authorization.Auth.authheader(tok))) val one: HttpClient[F] => Stream[F, List[A]] = (client: HttpClient[F]) => for { response <- (sleep >> client.request(r)) status = response.header.status body <- { if (status.isSuccess) Stream.eval(response.bodyAs[List[A]]).map(_.require) else { val textbody = Stream.eval(response.withContentType(ContentType(MediaType.`text/plain`, None, None)).bodyAsString) textbody.map(body => { val error = s"Unexpected network response: status ${status.code} - ${status.longDescription} CONTENTS: $body" throw new Exception(error) }) } } } yield body ClientRunnable.lift(one) } }
martijnhoekstra/masterleague4s
src/main/scala/net/UnfoldApiResult.scala
Scala
gpl-3.0
4,730
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.storage import java.io.{BufferedOutputStream, FileOutputStream, File, OutputStream} import java.nio.channels.FileChannel import org.apache.spark.Logging import org.apache.spark.serializer.{SerializerInstance, SerializationStream} import org.apache.spark.executor.ShuffleWriteMetrics import org.apache.spark.util.Utils /** * A class for writing JVM objects directly to a file on disk. This class allows data to be appended * to an existing block and can guarantee atomicity in the case of faults as it allows the caller to * revert partial writes. * * This class does not support concurrent writes. Also, once the writer has been opened it cannot be * reopened again. */ private[spark] class DiskBlockObjectWriter( file: File, serializerInstance: SerializerInstance, bufferSize: Int, compressStream: OutputStream => OutputStream, syncWrites: Boolean, // These write metrics concurrently shared with other active DiskBlockObjectWriters who // are themselves performing writes. All updates must be relative. writeMetrics: ShuffleWriteMetrics) extends OutputStream with Logging { /** The file channel, used for repositioning / truncating the file. */ private var channel: FileChannel = null private var bs: OutputStream = null private var fos: FileOutputStream = null private var ts: TimeTrackingOutputStream = null private var objOut: SerializationStream = null private var initialized = false private var hasBeenClosed = false private var commitAndCloseHasBeenCalled = false /** * Cursors used to represent positions in the file. * * xxxxxxxx|--------|--- | * ^ ^ ^ * | | finalPosition * | reportedPosition * initialPosition * * initialPosition: Offset in the file where we start writing. Immutable. * reportedPosition: Position at the time of the last update to the write metrics. * finalPosition: Offset where we stopped writing. Set on closeAndCommit() then never changed. * -----: Current writes to the underlying file. * xxxxx: Existing contents of the file. */ private val initialPosition = file.length() private var finalPosition: Long = -1 private var reportedPosition = initialPosition /** * Keep track of number of records written and also use this to periodically * output bytes written since the latter is expensive to do for each record. */ private var numRecordsWritten = 0 def open(): DiskBlockObjectWriter = { if (hasBeenClosed) { throw new IllegalStateException("Writer already closed. Cannot be reopened.") } fos = new FileOutputStream(file, true) ts = new TimeTrackingOutputStream(writeMetrics, fos) channel = fos.getChannel() bs = compressStream(new BufferedOutputStream(ts, bufferSize)) objOut = serializerInstance.serializeStream(bs) initialized = true this } override def close() { if (initialized) { Utils.tryWithSafeFinally { if (syncWrites) { // Force outstanding writes to disk and track how long it takes objOut.flush() val start = System.nanoTime() fos.getFD.sync() writeMetrics.incShuffleWriteTime(System.nanoTime() - start) } } { objOut.close() } channel = null bs = null fos = null ts = null objOut = null initialized = false hasBeenClosed = true } } def isOpen: Boolean = objOut != null /** * Flush the partial writes and commit them as a single atomic block. */ def commitAndClose(): Unit = { if (initialized) { // NOTE: Because Kryo doesn't flush the underlying stream we explicitly flush both the // serializer stream and the lower level stream. objOut.flush() bs.flush() close() finalPosition = file.length() // In certain compression codecs, more bytes are written after close() is called writeMetrics.incShuffleBytesWritten(finalPosition - reportedPosition) } else { finalPosition = file.length() } commitAndCloseHasBeenCalled = true } /** * Reverts writes that haven't been flushed yet. Callers should invoke this function * when there are runtime exceptions. This method will not throw, though it may be * unsuccessful in truncating written data. * * @return the file that this DiskBlockObjectWriter wrote to. */ def revertPartialWritesAndClose(): File = { // Discard current writes. We do this by flushing the outstanding writes and then // truncating the file to its initial position. try { if (initialized) { writeMetrics.decShuffleBytesWritten(reportedPosition - initialPosition) writeMetrics.decShuffleRecordsWritten(numRecordsWritten) objOut.flush() bs.flush() close() } val truncateStream = new FileOutputStream(file, true) try { truncateStream.getChannel.truncate(initialPosition) file } finally { truncateStream.close() } } catch { case e: Exception => logError("Uncaught exception while reverting partial writes to file " + file, e) file } } /** * Writes a key-value pair. */ def write(key: Any, value: Any) { if (!initialized) { open() } objOut.writeKey(key) objOut.writeValue(value) recordWritten() } override def write(b: Int): Unit = throw new UnsupportedOperationException() override def write(kvBytes: Array[Byte], offs: Int, len: Int): Unit = { if (!initialized) { open() } bs.write(kvBytes, offs, len) } /** * Notify the writer that a record worth of bytes has been written with OutputStream#write. */ def recordWritten(): Unit = { numRecordsWritten += 1 writeMetrics.incShuffleRecordsWritten(1) if (numRecordsWritten % 32 == 0) { updateBytesWritten() } } /** * Returns the file segment of committed data that this Writer has written. * This is only valid after commitAndClose() has been called. */ def fileSegment(): FileSegment = { if (!commitAndCloseHasBeenCalled) { throw new IllegalStateException( "fileSegment() is only valid after commitAndClose() has been called") } new FileSegment(file, initialPosition, finalPosition - initialPosition) } /** * Report the number of bytes written in this writer's shuffle write metrics. * Note that this is only valid before the underlying streams are closed. */ private def updateBytesWritten() { val pos = channel.position() writeMetrics.incShuffleBytesWritten(pos - reportedPosition) reportedPosition = pos } // For testing private[spark] override def flush() { objOut.flush() bs.flush() } }
pronix/spark
core/src/main/scala/org/apache/spark/storage/DiskBlockObjectWriter.scala
Scala
apache-2.0
7,667
/* * Copyright 2011-2022 GatlingCorp (https://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.commons.validation object Validation { val unit: Validation[Unit] = ().success val TrueSuccess: Validation[Boolean] = true.success val FalseSuccess: Validation[Boolean] = false.success val NoneSuccess: Validation[None.type] = None.success val NullStringSuccess: Validation[String] = "null".success } sealed trait Validation[@specialized(Short, Int, Long, Double, Char, Boolean) +T] { def map[A](f: T => A): Validation[A] def flatMap[A](f: T => Validation[A]): Validation[A] def mapFailure(f: String => String): Validation[T] def foreach(f: T => Any): Unit = onSuccess(f) def withFilter(p: T => Boolean): Validation[T] = filter(p) def filter(p: T => Boolean): Validation[T] def onSuccess(f: T => Any): Unit def onFailure(f: String => Any): Unit def recover[A >: T](v: => A): Validation[A] def toOption: Option[T] } final case class Success[+T](value: T) extends Validation[T] { override def map[A](f: T => A): Validation[A] = Success(f(value)) override def flatMap[A](f: T => Validation[A]): Validation[A] = f(value) override def mapFailure(f: String => String): Validation[T] = this override def filter(p: T => Boolean): Validation[T] = if (p(value)) this else Failure("Predicate does not hold for " + value.toString) override def onSuccess(f: T => Any): Unit = f(value) override def onFailure(f: String => Any): Unit = () override def recover[A >: T](v: => A): Validation[A] = this override def toOption: Option[T] = Some(value) } final case class Failure(message: String) extends Validation[Nothing] { override def map[A](f: Nothing => A): Validation[A] = this override def flatMap[A](f: Nothing => Validation[A]): Validation[A] = this override def mapFailure(f: String => String): Validation[Nothing] = Failure(f(message)) override def filter(p: Nothing => Boolean): Failure = this override def onSuccess(f: Nothing => Any): Unit = () override def onFailure(f: String => Any): Unit = f(message) override def recover[A >: Nothing](v: => A): Validation[A] = v.success override def toOption: Option[Nothing] = None }
gatling/gatling
gatling-commons/src/main/scala/io/gatling/commons/validation/Validation.scala
Scala
apache-2.0
2,721
package com.gilt.nlp sealed abstract class PartOfSpeech private[nlp](val name: String) object PartOfSpeech { case object Verb extends PartOfSpeech("verb") case object Noun extends PartOfSpeech("noun") case object Adjective extends PartOfSpeech("adjective") case object Adverb extends PartOfSpeech("adverb") case object Unknown extends PartOfSpeech("<unknown>") } /** * based on subset of Penn English Treebank Parts of Speech (POS) tags. * * [http://www.cis.upenn.edu/~treebank], more specifically [ftp://ftp.cis.upenn.edu/pub/treebank/doc/tagguide.ps.gz] */ sealed abstract class POS private[nlp](val tag: String, val summaryPartOfSpeech: Option[PartOfSpeech] = None) object POS { case object CoordinatingConjunction extends POS("CC") case object CardinalNumber extends POS("CN") case object Determiner extends POS("DT") case object ExistentialThere extends POS("EX") case object ForeignWord extends POS("FW") case object Preposition extends POS("IN") case object Adjective extends POS("JJ", Some(PartOfSpeech.Adjective)) case object ComparativeAdjective extends POS("JJR", Some(PartOfSpeech.Adjective)) case object SupurlativeAdjective extends POS("JJS", Some(PartOfSpeech.Adjective)) case object ListModalMarker extends POS("LS") case object Modal extends POS("MD") case object SingularNoun extends POS("NN", Some(PartOfSpeech.Noun)) case object PluralNoun extends POS("NNS", Some(PartOfSpeech.Noun)) case object SingularProperNoun extends POS("NNP", Some(PartOfSpeech.Noun)) case object PluralProperNoun extends POS("NNPS", Some(PartOfSpeech.Noun)) case object Predeterminer extends POS("PDT") case object PossessiveEnding extends POS("POS") case object PersonalPronoun extends POS("PRP") case object PossessivePronoun extends POS("PRP$") case object Adverb extends POS("RB", Some(PartOfSpeech.Adverb)) case object ComparativeAdverb extends POS("RBR", Some(PartOfSpeech.Adverb)) case object SuperlativeAdverb extends POS("RBS", Some(PartOfSpeech.Adverb)) case object Particle extends POS("RP") case object Symbol extends POS("SYM") case object To extends POS("TO") case object Interjection extends POS("UH") case object Verb extends POS("VB", Some(PartOfSpeech.Verb)) case object PastTenseVerb extends POS("VBD", Some(PartOfSpeech.Verb)) case object PresentParticipleVerb extends POS("VBG", Some(PartOfSpeech.Verb)) case object PastParticipleVerb extends POS("VBN", Some(PartOfSpeech.Verb)) case object NonThirdPersonSingularPresentVerb extends POS("VBP", Some(PartOfSpeech.Verb)) case object ThirdPersonSingularPresentVerb extends POS("VBZ", Some(PartOfSpeech.Verb)) case object Whdeterminer extends POS("WDT") case object Whpronoun extends POS("WP") case object PossessiveWhpronoun extends POS("WP$") case object Whadverb extends POS("WRB") private val ByTag = Seq( CoordinatingConjunction, CardinalNumber, Determiner, ExistentialThere, ForeignWord, Preposition, Adjective, ComparativeAdjective, SupurlativeAdjective, ListModalMarker, Modal, SingularNoun, PluralNoun, SingularProperNoun, PluralProperNoun, Predeterminer, PossessiveEnding, PersonalPronoun, PossessivePronoun, Adverb, ComparativeAdverb, SuperlativeAdverb, Particle, Symbol, Interjection, Verb, PastTenseVerb, PresentParticipleVerb, PastParticipleVerb, NonThirdPersonSingularPresentVerb, ThirdPersonSingularPresentVerb, Whdeterminer, Whpronoun, PossessiveWhpronoun, Whadverb ).map(pos => pos.tag -> pos).toMap def apply(tag: String): POS = ByTag.getOrElse(tag, Unknown) case object Unknown extends POS("UNKNOWN") }
gilt/lib-nlp
src/main/scala/com/gilt/nlp/POS.scala
Scala
mit
3,630
/* ASIB - A Scala IRC Bot Copyright (C) 2012 Iain Cambridge This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package asib.command.user.news import asib.Asib import asib.command.user.AbstractUserCommand import asib.command.user.news.search._ class Search extends AbstractUserCommand { val helpMessage = "Searches the news for the keyword - !nsearch <site> <phrase> - Do " + "!nsearch list to list news site." val newsSites = List("guardian") def execute(username: String, channel: String, args: String) = { val argsSplit = args.split(" ") if ((argsSplit.isDefinedAt(0) == false || argsSplit(0) == "") || (argsSplit.isDefinedAt(1) == false && argsSplit(0) != "list")) { Asib.sendMsg(channel, username + ", the usage of this command is !nsearch <site> " + "<phrase> do !nsearch list to list the sites") } else { val siteName = argsSplit(0).toLowerCase val phrase = argsSplit.reverse.dropRight(1).reverse.reduceRight(_ + " " + _) siteName match { case "guardian" => doSearch(channel, username, new Guardian, phrase) case "list" => list(username) case _ => Asib.sendMsg(channel, username + ", Invalid site \\"" + siteName + "\\"") } } } def list(username: String) = { newsSites foreach { siteName => Asib.sendNotice(username, "- " + siteName) } } def doSearch(channel: String, username: String, site: AbstractSite, phrase: String) = { val results = site.search(phrase) if (results.size == 0) { Asib.sendMsg(channel, username + ", there were no results for " + phrase) } else { Asib.sendMsg(channel, username + ", There was " + results.size + " results for \\"" + phrase + "\\" which will be send via notice.") results foreach { item => Asib.sendNotice(username, item.get("title").get + " - " + item.get("url").get) } } } }
icambridge-old/asib
src/main/scala/asib/command/user/news/Search.scala
Scala
gpl-3.0
2,443
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.plan.nodes.physical import org.apache.flink.table.plan.nodes.FlinkRelNode import org.apache.calcite.plan.RelTraitSet import org.apache.calcite.rel.RelNode /** * Base class for flink physical relational expression. */ trait FlinkPhysicalRel extends FlinkRelNode { /** * Try to satisfy required traits by descendant of current node. If descendant can satisfy * required traits, and current node will not destroy it, then returns the new node with * converted inputs. * * @param requiredTraitSet required traits * @return A converted node which satisfy required traits by inputs node of current node. * Returns None if required traits cannot be satisfied. */ def satisfyTraits(requiredTraitSet: RelTraitSet): Option[RelNode] = None }
shaoxuan-wang/flink
flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/plan/nodes/physical/FlinkPhysicalRel.scala
Scala
apache-2.0
1,622
import sbt._ import Keys._ object PlayFlowBuild extends Build { import uk.gov.hmrc.DefaultBuildSettings import DefaultBuildSettings._ import uk.gov.hmrc.{SbtBuildInfo, ShellPrompt} val nameApp = "play-flow" val versionApp = "0.1.0" val appDependencies = { import Dependencies._ Seq( Compile.play, Test.scalaTest, Test.pegdown ) } lazy val playFlow = Project(nameApp, file(".")) .settings(version := versionApp) .settings(scalaSettings : _*) .settings(defaultSettings() : _*) .settings( targetJvm := "jvm-1.7", shellPrompt := ShellPrompt(versionApp), libraryDependencies ++= appDependencies, resolvers := Seq( Opts.resolver.sonatypeReleases, Opts.resolver.sonatypeSnapshots, "typesafe-releases" at "http://repo.typesafe.com/typesafe/releases/", "typesafe-snapshots" at "http://repo.typesafe.com/typesafe/snapshots/" ), crossScalaVersions := Seq("2.11.4", "2.10.4"), publishArtifact := true, publishArtifact in Test := true ) .settings(SbtBuildInfo(): _*) .settings(SonatypeBuild(): _*) } object Dependencies { object Compile { val play = "com.typesafe.play" %% "play-json" % "2.3.2" % "provided" } sealed abstract class Test(scope: String) { val scalaTest = "org.scalatest" %% "scalatest" % "2.2.0" % scope val scalaCheck = "org.scalacheck" %% "scalacheck" % "1.11.5" % scope val pegdown = "org.pegdown" % "pegdown" % "1.4.2" % scope } object Test extends Test("test") object IntegrationTest extends Test("it") } object SonatypeBuild { import xerial.sbt.Sonatype._ def apply() = { sonatypeSettings ++ Seq( pomExtra := (<url>http://liquid-armour.co.uk</url> <licenses> <license> <name>Apache 2</name> <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url> </license> </licenses> <scm> <connection>scm:[email protected]:liquidarmour/play-flow.git</connection> <developerConnection>scm:[email protected]:liquidarmour/play-flow.git</developerConnection> <url>[email protected]:liquidarmour/play-flow.git</url> </scm> <developers> <developer> <id>liquidarmour</id> <name>James Williams</name> <url>http://liquid-armour.co.uk</url> </developer> </developers>) ) } }
liquidarmour/play-flow
project/PlayFlowBuild.scala
Scala
apache-2.0
2,450
/* * Copyright 2016 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.ct.computations import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Input} case class CP502(value: Option[Int]) extends CtBoxIdentifier("Ancillary income") with CtOptionalInteger with Input
ahudspith-equalexperts/ct-calculations
src/main/scala/uk/gov/hmrc/ct/computations/CP502.scala
Scala
apache-2.0
828
package java.util import scala.collection.mutable class LinkedHashMap[K, V] private (inner: mutable.LinkedHashMap[Box[K], V], accessOrder: Boolean) extends HashMap[K, V](inner) { self => def this() = this(mutable.LinkedHashMap.empty[Box[K], V], false) def this(initialCapacity: Int, loadFactor: Float, accessOrder: Boolean) = { this(mutable.LinkedHashMap.empty[Box[K], V], accessOrder) if (initialCapacity < 0) throw new IllegalArgumentException("initialCapacity < 0") else if (loadFactor < 0.0) throw new IllegalArgumentException("loadFactor <= 0.0") } def this(initialCapacity: Int, loadFactor: Float) = this(initialCapacity, loadFactor, false) def this(initialCapacity: Int) = this(initialCapacity, LinkedHashMap.DEFAULT_LOAD_FACTOR) def this(m: Map[_ <: K, _ <: V]) = { this() putAll(m) } override def get(key: scala.Any): V = { val value = super.get(key) if (accessOrder) { val boxedKey = Box(key.asInstanceOf[K]) if (value != null || containsKey(boxedKey)) { inner.remove(boxedKey) inner(boxedKey) = value } } value } override def put(key: K, value: V): V = { val oldValue = { if (accessOrder) { val old = remove(key) super.put(key, value) old } else { super.put(key, value) } } val iter = entrySet().iterator() if (iter.hasNext && removeEldestEntry(iter.next())) iter.remove() oldValue } protected def removeEldestEntry(eldest: Map.Entry[K, V]): Boolean = false override def clone(): AnyRef = { new LinkedHashMap(inner.clone(), accessOrder) } } object LinkedHashMap { private[LinkedHashMap] final val DEFAULT_INITIAL_CAPACITY = 16 private[LinkedHashMap] final val DEFAULT_LOAD_FACTOR = 0.75f }
lrytz/scala-js
javalib/src/main/scala/java/util/LinkedHashMap.scala
Scala
bsd-3-clause
1,827
/* * Copyright 2011 TomTom International BV * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package tomtom.splitter.config import java.io.{File, StringReader} import org.scalatest.{FlatSpec, Matchers} class ConfigSpec extends FlatSpec with Matchers with ConfigParser { type ? = this.type behavior of "the config parser" it should "behave as expected" in { val testConfig = """ myInt = 7 myBool = false myString = "now is the time" myStringNewline = "now\nis the\ntime" myStringTab = "now\tis the time" myFile = "a.txt" # comment, preceded & followed by blank line mySubConfig { mySubInt = 8 mySubBool = true mySubString = "for all good \"men\" to come to the aid of their country" mySubSubConfig { foo = "bar" } } """ val config = new Config { val configMap = parse(new StringReader(testConfig)) } config.int("myInt") should be(7) config.intOpt("myInt") should be(Some(7)) config.intOpt("yourInt") should be(None) config.bool("myBool") should be(false) config.boolOpt("myBool") should be(Some(false)) config.boolOpt("yourBool") should be(None) config.string("myString") should be("now is the time") config.stringOpt("myString") should be(Some("now is the time")) config.stringOpt("yourString") should be(None) config.file("myFile") should be(new File("a.txt")) config.file("missing file", new File("b.txt")) should be(new File("b.txt")) config.fileOpt("missing file") should be(None) config.fileOpt("myFile") should be(Some(new File("a.txt"))) config.string("myStringNewline") should be( """|now |is the |time""".stripMargin) config.string("myStringTab") should be("now\tis the time") val subConfig = config.config("mySubConfig") config.configOpt("mySubConfig") should be(Some(subConfig)) config.configOpt("yourSubConfig") should be(None) subConfig.int("mySubInt") should be(8) subConfig.int("mySubIntMissing", 7) should be(7) subConfig.string("mySubString") should be("for all good \"men\" to come to the aid of their country") subConfig.string("missing string", "foo") should be("foo") subConfig.bool("mySubBool") should be(true) subConfig.bool("missing bool", default = false) should be(false) val subsub = subConfig.config("mySubSubConfig") subsub.string("foo") should be("bar") config.string("mySubConfig.mySubSubConfig.foo") should be("bar") a[RuntimeException] shouldBe thrownBy(config.config("")) a[RuntimeException] shouldBe thrownBy(config.config("no such config.bar")) a[RuntimeException] shouldBe thrownBy(config.bool("no such boolean")) } it should "load a global config from various places" in { Config.loadResource("/test.config") Config.config.config("audit").string("level") should be("warn") Config.loadFile(new File("src/test/resources/test.config")) Config.config.config("audit").string("level") should be("warn") Config.loadString( """ |audit { | level = "info" |} """.stripMargin) Config.config.config("audit").string("level") should be("info") } it should "fail predictably when not initialized" in { Config._config.set(null) a[RuntimeException] should be thrownBy Config.config } it should "fail predictably when parsing garbage" in { a[RuntimeException] should be thrownBy new ConfigParser {}.parse(new StringReader("blah")) } it should "missing config should behave with configOpt" in { Config.loadResource("/test.config") Config.config.configOpt("no such config") should be(None) } it should "behave correctly with default values for config values in non-existing subconfigs" in { Config.loadResource("/test.config") Config.config.int("no such config.myInt", 7) should be(7) } }
ebowman/splitter
src/test/scala/tomtom/splitter/config/ConfigSpec.scala
Scala
apache-2.0
4,439
package pdi.jwt import java.time.Clock import io.circe._ class JwtCirceSpec extends JwtJsonCommonSpec[Json] with CirceFixture { override def jwtJsonCommon(clock: Clock) = JwtCirce(clock) }
pauldijou/jwt-scala
json/circe/src/test/scala/JwtCirceSpec.scala
Scala
apache-2.0
193
/* * Copyright (c) 2014 Mario Pastorelli ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package shapeless.examples import shapeless._ import scala.collection.immutable.{:: => Cons} import scala.util.{Try,Success,Failure} /* * This example shows how to createa a serializer/deserializer from CSV to * products. * */ // The class to serialize or deserialize case class Person(name: String, surname: String, age: Int, id: Option[Int], weight: Option[Int], height: Int) object CSVExample extends App { import CSVConverter._ val input = """John,Carmack,23,0,,100 Brian,Fargo,35,,,110 Markus,Persson,32,,,120""" println(CSVConverter[List[Person]].from(input)) } // Implementation /** Exception to throw if something goes wrong during CSV parsing */ class CSVException(s: String) extends RuntimeException(s) /** Trait for types that can be serialized to/deserialized from CSV */ trait CSVConverter[T] { def from(s: String): Try[T] def to(t: T): String } /** Instances of the CSVConverter trait */ object CSVConverter { def apply[T](implicit st: => CSVConverter[T]): CSVConverter[T] = st def fail(s: String) = Failure(new CSVException(s)) // Primitives implicit def stringCSVConverter: CSVConverter[String] = new CSVConverter[String] { def from(s: String): Try[String] = Success(s) def to(s: String): String = s } implicit def intCsvConverter: CSVConverter[Int] = new CSVConverter[Int] { def from(s: String): Try[Int] = Try(s.toInt) def to(i: Int): String = i.toString } def listCsvLinesConverter[A](l: List[String])(implicit ec: CSVConverter[A]) : Try[List[A]] = l match { case Nil => Success(Nil) case Cons(s,ss) => for { x <- ec.from(s) xs <- listCsvLinesConverter(ss)(ec) } yield Cons(x, xs) } implicit def listCsvConverter[A](implicit ec: CSVConverter[A]) : CSVConverter[List[A]] = new CSVConverter[List[A]] { def from(s: String): Try[List[A]] = listCsvLinesConverter(s.split("\\n").toList)(ec) def to(l: List[A]): String = l.map(ec.to).mkString("\\n") } // HList implicit def deriveHNil: CSVConverter[HNil] = new CSVConverter[HNil] { def from(s: String): Try[HNil] = s match { case "" => Success(HNil) case s => fail("Cannot convert '" ++ s ++ "' to HNil") } def to(n: HNil) = "" } implicit def deriveHCons[V, T <: HList] (implicit scv: => CSVConverter[V], sct: => CSVConverter[T]) : CSVConverter[V :: T] = new CSVConverter[V :: T] { def from(s: String): Try[V :: T] = s.span(_ != ',') match { case (before,after) => for { front <- scv.from(before) back <- sct.from(if (after.isEmpty) after else after.tail) } yield front :: back case _ => fail("Cannot convert '" ++ s ++ "' to HList") } def to(ft: V :: T): String = { scv.to(ft.head) ++ "," ++ sct.to(ft.tail) } } implicit def deriveHConsOption[V, T <: HList] (implicit scv: => CSVConverter[V], sct: => CSVConverter[T]) : CSVConverter[Option[V] :: T] = new CSVConverter[Option[V] :: T] { def from(s: String): Try[Option[V] :: T] = s.span(_ != ',') match { case (before,after) => (for { front <- scv.from(before) back <- sct.from(if (after.isEmpty) after else after.tail) } yield Some(front) :: back).orElse { sct.from(if (s.isEmpty) s else s.tail).map(None :: _) } case _ => fail("Cannot convert '" ++ s ++ "' to HList") } def to(ft: Option[V] :: T): String = { ft.head.map(scv.to(_) ++ ",").getOrElse("") ++ sct.to(ft.tail) } } // Anything with a Generic implicit def deriveClass[A,R](implicit gen: Generic.Aux[A,R], conv: CSVConverter[R]) : CSVConverter[A] = new CSVConverter[A] { def from(s: String): Try[A] = conv.from(s).map(gen.from) def to(a: A): String = conv.to(gen.to(a)) } }
isaka/shapeless
examples/src/main/scala/shapeless/examples/csv.scala
Scala
apache-2.0
4,556
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * This code is based on code org.apache.spark.repl.SparkILoop released under Apache 2.0" * Link on Github: https://github.com/apache/spark/blob/master/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala * Author: Alexander Spoon */ package org.apache.spark.repl import java.io._ import java.lang.{Class => jClass} import java.net.URI import org.apache.spark.{Logging, SparkContext} import org.apache.spark.annotation.DeveloperApi import org.apache.spark.h2o.H2OContext import org.apache.spark.util.Utils import scala.Predef.{println => _, _} import scala.language.{existentials, implicitConversions, postfixOps} import scala.reflect.NameTransformer._ import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse} import scala.reflect.{ClassTag, classTag} import scala.tools.nsc._ import scala.tools.nsc.interpreter.session._ import scala.tools.nsc.interpreter.{Results => IR, _} import scala.tools.nsc.io.{Directory, File} import scala.tools.nsc.util.ScalaClassLoader._ import scala.tools.nsc.util._ import scala.tools.util.{Javap, _} import scala.util.Properties.{javaVersion, jdkHome} /** The Scala interactive shell. It provides a read-eval-print loop * around the Interpreter class. * After instantiation, clients should call the main() method. * * If no in0 is specified, then input will come from the console, and * the class will attempt to provide input editing feature such as * input history. * * @author Moez A. Abdel-Gawad * @author Lex Spoon * @version 1.2 */ @DeveloperApi class H2OILoop(var sparkContext: SparkContext, var h2oContext: H2OContext, var sessionID: Int ) extends AnyRef with LoopCommands with H2OILoopInit with Logging { /** Show the history */ private lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)") { override def usage = "[num]" def defaultLines = 20 def apply(line: String): Result = { if (history eq NoHistory) return "No history available." val xs = words(line) val current = history.index val count = try xs.head.toInt catch { case _: Exception => defaultLines } val lines = history.asStrings takeRight count val offset = current - lines.size + 1 for ((line, index) <- lines.zipWithIndex) echo("%3d %s".format(index + offset, line)) } } /** Standard commands */ private lazy val standardCommands = List( LoopCommand.cmd("cp", "<path>", "add a jar or directory to the classpath", addClasspath), LoopCommand.cmd("help", "[command]", "print this summary or command-specific help", helpCommand), historyCommand, LoopCommand.cmd("h?", "<string>", "search the history", searchHistory), LoopCommand.cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand), LoopCommand.cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand), LoopCommand.cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand), LoopCommand.cmd("load", "<path>", "load and interpret a Scala file", loadCommand), LoopCommand.nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand), // nullary("power", "enable power user mode", powerCmd), LoopCommand.nullary("quit", "exit the repl", () => Result(false, None)), LoopCommand.nullary("replay", "reset execution and replay all previous commands", replay), LoopCommand.nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand), shCommand, LoopCommand.nullary("silent", "disable/enable automatic printing of results", verbosity), LoopCommand.nullary("fallback", """ |disable/enable advanced repl changes, these fix some issues but may introduce others. |This mode will be removed once these fixes stablize""".stripMargin, toggleFallbackMode), LoopCommand.cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand), LoopCommand.nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand) ) /** Power user commands */ private lazy val powerCommands: List[LoopCommand] = List( // cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand) ) // private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap()) private lazy val javap = try newJavap() catch { case _: Exception => null } /** fork a shell and run a command */ private lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])") { override def usage = "<command line>" def apply(line: String): Result = line match { case "" => showUsage() case _ => val toRun = classOf[ProcessResult].getName + "(" + string2codeQuoted(line) + ")" intp interpret toRun () } } /** The context class loader at the time this object was created */ protected val originalClassLoader = Utils.getContextOrSparkClassLoader private val in0: Option[BufferedReader] = None private val outWriter = new StringWriter() private val typeTransforms = List( "scala.collection.immutable." -> "immutable.", "scala.collection.mutable." -> "mutable.", "scala.collection.generic." -> "generic.", "java.lang." -> "jl.", "scala.runtime." -> "runtime." ) private val replayQuestionMessage = """|That entry seems to have slain the compiler. Shall I replay |your session? I can re-run each line except the last one. |[y/n] """.trim.stripMargin private val u: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe private val m = u.runtimeMirror(Utils.getSparkClassLoader) private val baos = new ByteArrayOutputStream() private val printStream = new PrintStream(baos) // NOTE: Exposed in package for testing private[repl] var settings: Settings = _ private[repl] var intp: H2OIMain = _ private var in: InteractiveReader = SimpleReader(new BufferedReader(new StringReader("")), out, false) // the input stream from which commands come // classpath entries added via :cp private var addedClasspath: String = "" /** A reverse list of commands to replay if the user requests a :replay */ private var replayCommandStack: List[String] = Nil private var fallbackMode = false private var currentPrompt = Properties.shellPromptString implicit def stabilizeIMain(intp: H2OIMain) = new IMainOps[intp.type](intp) override def echoCommandMessage(msg: String) { intp.reporter printMessage msg } /** Close the interpreter and set the var to null. */ def closeInterpreter() { if (intp ne null) { intp.close() intp = null } } /** * Sets the prompt string used by the REPL. * * @param prompt The new prompt string */ @DeveloperApi def setPrompt(prompt: String) = currentPrompt = prompt /** * Represents the current prompt string used by the REPL. * * @return The current prompt string */ @DeveloperApi def prompt = currentPrompt /** * Provides a list of available commands. * * @return The list of commands */ @DeveloperApi def commands: List[LoopCommand] = standardCommands /*++ ( if (isReplPower) powerCommands else Nil )*/ def initH2OILoop(): Unit = savingContextLoader { if (getMaster() == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true") this.settings = createSettings() createInterpreter() lazy val tagOfH2OIMain = tagOfStaticClass[org.apache.spark.repl.H2OIMain] // Bind intp somewhere out of the regular namespace where // we can get at it in generated code. addThunk(intp.quietBind(NamedParam[H2OIMain]("$intp", intp)(tagOfH2OIMain, classTag[H2OIMain]))) addThunk({ val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp()) if (autorun.isDefined) intp.quietRun(autorun.get) }) addThunk(initializeSpark()) if (intp.reporter.hasErrors) return false // This is about the illusion of snappiness. We call initialize() // which spins off a separate thread, then print the prompt and try // our best to look ready. The interlocking lazy vals tend to // inter-deadlock, so we break the cycle with a single asynchronous // message to an actor. if (isAsync) { intp initialize initializedCallback() createAsyncListener() // listens for signal to run postInitialization } else { intp.initializeSynchronous() postInitialization() } // printWelcome() loadFiles(settings) // for(jar <-sc.addedJars){ // intp.addUrlsToClassPath(new URL(jar._1)) // } } /** process command-line arguments and do as they request */ def process(args: Array[String]): Boolean = { val command = new SparkCommandLine(args.toList, msg => echo(msg)) def neededHelp(): String = (if (command.settings.help.value) command.usageMsg + "\\n" else "") + (if (command.settings.Xhelp.value) command.xusageMsg + "\\n" else "") // if they asked for no help and command is valid, we call the real main neededHelp() match { case "" => command.ok && process(command.settings) case help => echoNoNL(help); true } } /** * Get response of interpreter * @return */ def interpreterResponse: String = { val res = outWriter.toString outWriter.getBuffer.setLength(0) // reset the writer res } /** * Redirected printed output comming from commands written in the interpreter * @return */ def printedOutput: String = { val result = baos.toString() baos.reset() result } /** * Run bunch of code in a string * @param code * @return */ def runCode(code: String): String = { import java.io.{BufferedReader, StringReader} // set the input stream val input = new BufferedReader(new StringReader(code)) in = SimpleReader(input, out, false) // redirect output from console to our own stream scala.Console.withOut(printStream) { try loop() catch AbstractOrMissingHandler() } if (intp.reporter.hasErrors) { "Error" } else { "Success" } } /** * Constructs a new interpreter. */ protected def createInterpreter() { require(settings != null) if (addedClasspath != "") settings.classpath.append(addedClasspath) val addedJars = if (Utils.isWindows) { // Strip any URI scheme prefix so we can add the correct path to the classpath // e.g. file:/C:/my/path.jar -> C:/my/path.jar SparkILoop.getAddedJars.map { jar => new URI(jar).getPath.stripPrefix("/") } } else { // We need new URI(jar).getPath here for the case that `jar` includes encoded white space (%20). SparkILoop.getAddedJars.map { jar => new URI(jar).getPath } } // work around for Scala bug val totalClassPath = addedJars.foldLeft( settings.classpath.value)((l, r) => ClassPath.join(l, r)) this.settings.classpath.value = totalClassPath intp = new H2OILoopInterpreter } // def isAsync = !settings.Yreplsync.value private[repl] def isAsync = false /** Record a command for replay should the user request a :replay */ private def addReplay(cmd: String) = replayCommandStack ::= cmd private def savingReplayStack[T](body: => T): T = { val saved = replayCommandStack try body finally replayCommandStack = saved } private def savingReader[T](body: => T): T = { val saved = in try body finally in = saved } private def sparkCleanUp() { echo("Stopping spark context.") intp.beQuietDuring { command("sc.stop()") } } /** print a friendly help message */ private def helpCommand(line: String): Result = { if (line == "") helpSummary() else uniqueCommand(line) match { case Some(lc) => echo("\\n" + lc.longHelp) case _ => ambiguousError(line) } } private def helpSummary() = { val usageWidth = commands map (_.usageMsg.length) max val formatStr = "%-" + usageWidth + "s %s %s" echo("All commands can be abbreviated, e.g. :he instead of :help.") echo("Those marked with a * have more detailed help, e.g. :help imports.\\n") commands foreach { cmd => val star = if (cmd.hasLongHelp) "*" else " " echo(formatStr.format(cmd.usageMsg, star, cmd.help)) } } private def ambiguousError(cmd: String): Result = { matchingCommands(cmd) match { case Nil => echo(cmd + ": no such command. Type :help for help.") case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?") } Result(true, None) } // private def dumpCommand(): Result = { // echo("" + power) // history.asStrings takeRight 30 foreach echo // in.redrawLine() // } // private def valsCommand(): Result = power.valsDescription private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd) private def uniqueCommand(cmd: String): Option[LoopCommand] = { // this lets us add commands willy-nilly and only requires enough command to disambiguate matchingCommands(cmd) match { case List(x) => Some(x) // exact match OK even if otherwise appears ambiguous case xs => xs find (_.name == cmd) } } private def toggleFallbackMode() { val old = fallbackMode fallbackMode = !old System.setProperty("spark.repl.fallback", fallbackMode.toString) echo( s""" |Switched ${if (old) "off" else "on"} fallback mode without restarting. | If you have defined classes in the repl, it would |be good to redefine them incase you plan to use them. If you still run |into issues it would be good to restart the repl and turn on `:fallback` |mode as first command. """.stripMargin) } // When you know you are most likely breaking into the middle // of a line being typed. This softens the blow. private[repl] def echoAndRefresh(msg: String) = { echo("\\n" + msg) in.redrawLine() } private def echoNoNL(msg: String) = { out print msg out.flush() } /** Search the history */ private def searchHistory(_cmdline: String) { val cmdline = _cmdline.toLowerCase val offset = history.index - history.size + 1 for ((line, index) <- history.asStrings.zipWithIndex; if line.toLowerCase contains cmdline) echo("%d %s".format(index + offset, line)) } // lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals]) private def history = in.history private def importsCommand(line: String): Result = { val tokens = words(line) val handlers = intp.languageWildcardHandlers ++ intp.importHandlers val isVerbose = tokens contains "-v" handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach { case (handler, idx) => val (types, terms) = handler.importedSymbols partition (_.name.isTypeName) val imps = handler.implicitSymbols val found = tokens filter (handler importsSymbolNamed _) val typeMsg = if (types.isEmpty) "" else types.size.toString + " types" val termMsg = if (terms.isEmpty) "" else terms.size.toString + " terms" val implicitMsg = if (imps.isEmpty) "" else imps.size.toString + " are implicit" val foundMsg = if (found.isEmpty) "" else found.mkString(" // imports: ", ", ", "") val statsMsg = List(typeMsg, termMsg, implicitMsg) filterNot (_ == "") mkString("(", ", ", ")") intp.reporter.printMessage("%2d) %-30s %s%s".format( idx + 1, handler.importString, statsMsg, foundMsg )) } } private def implicitsCommand(line: String): Result = onIntp { intp => import intp._ import global._ def p(x: Any) = intp.reporter.printMessage("" + x) // If an argument is given, only show a source with that // in its name somewhere. val args = line split "\\\\s+" val filtered = intp.implicitSymbolsBySource filter { case (source, syms) => (args contains "-v") || { if (line == "") (source.fullName.toString != "scala.Predef") else (args exists (source.name.toString contains _)) } } if (filtered.isEmpty) return "No implicits have been imported other than those in Predef." filtered foreach { case (source, syms) => p("/* " + syms.size + " implicit members imported from " + source.fullName + " */") // This groups the members by where the symbol is defined val byOwner = syms groupBy (_.owner) val sortedOwners = byOwner.toList sortBy { case (owner, _) => afterTyper(source.info.baseClasses indexOf owner) } sortedOwners foreach { case (owner, members) => // Within each owner, we cluster results based on the final result type // if there are more than a couple, and sort each cluster based on name. // This is really just trying to make the 100 or so implicits imported // by default into something readable. val memberGroups: List[List[Symbol]] = { val groups = members groupBy (_.tpe.finalResultType) toList val (big, small) = groups partition (_._2.size > 3) val xss = ( (big sortBy (_._1.toString) map (_._2)) :+ (small flatMap (_._2)) ) xss map (xs => xs sortBy (_.name.toString)) } val ownerMessage = if (owner == source) " defined in " else " inherited from " p(" /* " + members.size + ownerMessage + owner.fullName + " */") memberGroups foreach { group => group foreach (s => p(" " + intp.symbolDefString(s))) p("") } } p("") } } /** Having inherited the difficult "var-ness" of the repl instance, * I'm trying to work around it by moving operations into a class from * which it will appear a stable prefix. */ private def onIntp[T](f: H2OIMain => T): T = f(intp) private def newJavap() = new JavapClass(addToolsJarToLoader(), new H2OIMain.ReplStrippingWriter(intp)) { override def tryClass(path: String): Array[Byte] = { val hd :: rest = path.split(".").toList // If there are dots in the name, the first segment is the // key to finding it. if (rest.nonEmpty) { intp optFlatName hd match { case Some(flat) => val clazz = flat :: rest mkString NAME_JOIN_STRING val bytes = super.tryClass(clazz) if (bytes.nonEmpty) bytes else super.tryClass(clazz + MODULE_SUFFIX_STRING) case _ => super.tryClass(path) } } else { // Look for Foo first, then Foo$, but if Foo$ is given explicitly, // we have to drop the $ to find object Foo, then tack it back onto // the end of the flattened name. def className = intp flatName path def moduleName = (intp flatName path.stripSuffix(MODULE_SUFFIX_STRING)) + MODULE_SUFFIX_STRING val bytes = super.tryClass(className) if (bytes.nonEmpty) bytes else super.tryClass(moduleName) } } } private def addToolsJarToLoader() = { val cl = findToolsJar match { case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader) case _ => intp.classLoader } if (Javap.isAvailable(cl)) { logDebug(":javap available.") cl } else { logDebug(":javap unavailable: no tools.jar at " + jdkHome) intp.classLoader } } // private def phaseCommand(name: String): Result = { // val phased: Phased = power.phased // import phased.NoPhaseName // if (name == "clear") { // phased.set(NoPhaseName) // intp.clearExecutionWrapper() // "Cleared active phase." // } // else if (name == "") phased.get match { // case NoPhaseName => "Usage: :phase <expr> (e.g. typer, erasure.next, erasure+3)" // case ph => "Active phase is '%s'. (To clear, :phase clear)".format(phased.get) // } // else { // val what = phased.parse(name) // if (what.isEmpty || !phased.set(what)) // "'" + name + "' does not appear to represent a valid phase." // else { // intp.setExecutionWrapper(pathToPhaseWrapper) // val activeMessage = // if (what.toString.length == name.length) "" + what // else "%s (%s)".format(what, name) // "Active phase is now: " + activeMessage // } // } // } private def findToolsJar() = { val jdkPath = Directory(jdkHome) val jar = jdkPath / "lib" / "tools.jar" toFile; if (jar isFile) Some(jar) else if (jdkPath.isDirectory) jdkPath.deepFiles find (_.name == "tools.jar") else None } // Still todo: modules. private def typeCommand(line0: String): Result = { line0.trim match { case "" => ":type [-v] <expression>" case s if s startsWith "-v " => typeCommandInternal(s stripPrefix "-v " trim, true) case s => typeCommandInternal(s, false) } } /** TODO - * -n normalize * -l label with case class parameter names * -c complete - leave nothing out */ private def typeCommandInternal(expr: String, verbose: Boolean): Result = { onIntp { intp => val sym = intp.symbolOfLine(expr) if (sym.exists) intp.echoTypeSignature(sym, verbose) else "" } } private def warningsCommand(): Result = { if (intp.lastWarnings.isEmpty) "Can't find any cached warnings." else intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) } } private def javapCommand(line: String): Result = { if (javap == null) ":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome) else if (javaVersion startsWith "1.7") ":javap not yet working with java 1.7" else if (line == "") ":javap [-lcsvp] [path1 path2 ...]" else javap(words(line)) foreach { res => if (res.isError) return "Failed: " + res.value else res.show() } } private def wrapCommand(line: String): Result = { def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T" onIntp { intp => import intp._ import global._ words(line) match { case Nil => intp.executionWrapper match { case "" => "No execution wrapper is set." case s => "Current execution wrapper: " + s } case "clear" :: Nil => intp.executionWrapper match { case "" => "No execution wrapper is set." case s => intp.clearExecutionWrapper(); "Cleared execution wrapper." } case wrapper :: Nil => intp.typeOfExpression(wrapper) match { case PolyType(List(targ), MethodType(List(arg), restpe)) => intp setExecutionWrapper intp.pathToTerm(wrapper) "Set wrapper to '" + wrapper + "'" case tp => failMsg + "\\nFound: <unknown>" } case _ => failMsg } } } private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent" private def crashRecovery(ex: Throwable): Boolean = { echo(ex.toString) ex match { case _: NoSuchMethodError | _: NoClassDefFoundError => echo("\\nUnrecoverable error.") throw ex case _ => def fn(): Boolean = try in.readYesOrNo(replayQuestionMessage, { echo("\\nYou must enter y or n.") ; fn() }) catch { case _: RuntimeException => false } if (fn()) replay() else echo("\\nAbandoning crashed session.") } true } /** The main read-eval-print loop for the repl. It calls * command() for each line of input, and stops when * command() returns false. */ private def loop() { def readOneLine() = { out.flush() in readLine prompt } // return false if repl should exit def processLine(line: String): Boolean = { if (isAsync) { if (!awaitInitialized()) return false runThunks() } if (line eq null) false // assume null means EOF else command(line) match { case Result(false, _) => false case Result(_, Some(finalLine)) => addReplay(finalLine) ; true case _ => true } } def innerLoop() { val shouldContinue = try { processLine(readOneLine()) } catch {case t: Throwable => crashRecovery(t)} if (shouldContinue) innerLoop() } innerLoop() } /** interpret all lines from a specified file */ private def interpretAllFrom(file: File) { savingReader { savingReplayStack { file applyReader { reader => in = SimpleReader(reader, out, false) echo("Loading " + file + "...") loop() } } } } /** create a new interpreter and replay the given commands */ private def replay() { reset() if (replayCommandStack.isEmpty) echo("Nothing to replay.") else for (cmd <- replayCommands) { echo("Replaying: " + cmd) // flush because maybe cmd will have its own output command(cmd) echo("") } } private def resetCommand() { echo("Resetting repl state.") if (replayCommandStack.nonEmpty) { echo("Forgetting this session history:\\n") replayCommands foreach echo echo("") replayCommandStack = Nil } if (intp.namedDefinedTerms.nonEmpty) echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", ")) if (intp.definedTypes.nonEmpty) echo("Forgetting defined types: " + intp.definedTypes.mkString(", ")) reset() } /** A list of commands to replay if the user requests a :replay */ private def replayCommands = replayCommandStack.reverse private def reset() { intp.reset() // unleashAndSetPhase() } private def withFile(filename: String)(action: File => Unit) { val f = File(filename) if (f.exists) action(f) else echo("That file does not exist") } // private def unleashAndSetPhase() { // if (isReplPower) { // // power.unleash() // // Set the phase to "typer" // intp beSilentDuring phaseCommand("typer") // } // } private def loadCommand(arg: String) = { var shouldReplay: Option[String] = None withFile(arg)(f => { interpretAllFrom(f) shouldReplay = Some(":load " + arg) }) Result(true, shouldReplay) } private def addAllClasspath(args: Seq[String]): Unit = { var added = false var totalClasspath = "" for (arg <- args) { val f = File(arg).normalize if (f.exists) { added = true addedClasspath = ClassPath.join(addedClasspath, f.path) totalClasspath = ClassPath.join(settings.classpath.value, addedClasspath) intp.addUrlsToClassPath(f.toURI.toURL) sparkContext.addJar(f.toURI.toURL.getPath) } } } private def addClasspath(arg: String): Unit = { val f = File(arg).normalize if (f.exists) { addedClasspath = ClassPath.join(addedClasspath, f.path) intp.addUrlsToClassPath(f.toURI.toURL) sparkContext.addJar(f.toURI.toURL.getPath) echo("Added '%s'. Your new classpath is:\\n\\"%s\\"".format(f.path, intp.global.classPath.asClasspathString)) } else echo("The path '" + f + "' doesn't seem to exist.") } private def powerCmd(): Result = { if (isReplPower) "Already in power mode." else enablePowerMode(false) } private[repl] def enablePowerMode(isDuringInit: Boolean) = { // replProps.power setValue true // unleashAndSetPhase() // asyncEcho(isDuringInit, power.banner) } private def asyncEcho(async: Boolean, msg: => String) { if (async) asyncMessage(msg) else echo(msg) } import paste.{ContinueString, PromptString} private[repl] def echo(msg: String) = { out println msg out.flush() } override protected def out: JPrintWriter = new JPrintWriter(outWriter) private def verbosity() = { // val old = intp.printResults // intp.printResults = !old // echo("Switched " + (if (old) "off" else "on") + " result printing.") } /** Run one command submitted by the user. Two values are returned: * (1) whether to keep running, (2) the line to record for replay, * if any. */ private[repl] def command(line: String): Result = { if (line startsWith ":") { val cmd = line.tail takeWhile (x => !x.isWhitespace) uniqueCommand(cmd) match { case Some(lc) => lc(line.tail stripPrefix cmd dropWhile (_.isWhitespace)) case _ => ambiguousError(cmd) } } else if (intp.global == null) Result(false, None) // Notice failure to create compiler else Result(true, interpretStartingWith(line)) } private def pasteCommand(): Result = { echo("// Entering paste mode (ctrl-D to finish)\\n") val code = readWhile(_ => true) mkString "\\n" echo("\\n// Exiting paste mode, now interpreting.\\n") intp interpret code () } private def readWhile(cond: String => Boolean) = { Iterator continually in.readLine("") takeWhile (x => x != null && cond(x)) } /** Interpret expressions starting with the first line. * Read lines until a complete compilation unit is available * or until a syntax error has been seen. If a full unit is * read, go ahead and interpret it. Return the full string * to be recorded for replay, if any. */ private def interpretStartingWith(code: String): Option[String] = { // signal completion non-completion input has been received in.completion.resetVerbosity() def reallyInterpret = { val reallyResult = intp.interpret(code) (reallyResult, reallyResult match { case IR.Error => None case IR.Success => Some(code) case IR.Incomplete => if (in.interactive && code.endsWith("\\n\\n")) { echo("You typed two blank lines. Starting a new command.") None } else in.readLine(ContinueString) match { case null => // we know compilation is going to fail since we're at EOF and the // parser thinks the input is still incomplete, but since this is // a file being read non-interactively we want to fail. So we send // it straight to the compiler for the nice error message. intp.compileString(code) None case line => interpretStartingWith(code + "\\n" + line) } }) } /** Here we place ourselves between the user and the interpreter and examine * the input they are ostensibly submitting. We intervene in several cases: * * 1) If the line starts with "scala> " it is assumed to be an interpreter paste. * 2) If the line starts with "." (but not ".." or "./") it is treated as an invocation * on the previous result. * 3) If the Completion object's execute returns Some(_), we inject that value * and avoid the interpreter, as it's likely not valid scala code. */ if (code == "") None else if (!paste.running && code.trim.startsWith(PromptString)) { paste.transcript(code) None } else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") { interpretStartingWith(intp.mostRecentVar + code) } else if (code.trim startsWith "//") { // line comment, do nothing None } else reallyInterpret._2 } // runs :load `file` on any files passed via -i private def loadFiles(settings: Settings) = settings match { case settings: SparkRunnerSettings => for (filename <- settings.loadfiles.value) { val cmd = ":load " + filename command(cmd) addReplay(cmd) echo("") } case _ => } /** Tries to create a JLineReader, falling back to SimpleReader: * unless settings or properties are such that it should start * with SimpleReader. */ private def chooseReader(settings: Settings): InteractiveReader = { if (settings.Xnojline.value || Properties.isEmacsShell) SimpleReader() else try new SparkJLineReader( NoCompletion ) catch { case ex@(_: Exception | _: NoClassDefFoundError) => echo("Failed to created SparkJLineReader: " + ex + "\\nFalling back to SimpleReader.") SimpleReader() } } private def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] = u.TypeTag[T]( m, new TypeCreator { def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U#Type = m.staticClass(classTag[T].runtimeClass.getName).toTypeConstructor.asInstanceOf[U#Type] }) private def process(settings: Settings): Boolean = savingContextLoader { if (getMaster() == "yarn-client") System.setProperty("SPARK_YARN_MODE", "true") this.settings = settings createInterpreter() // sets in to some kind of reader depending on environmental cues in = in0 match { case Some(reader) => SimpleReader(reader, out, true) case None => // some post-initialization chooseReader(settings) match { case x: SparkJLineReader => addThunk(x.consoleReader.postInit); x case x => x } } lazy val tagOfH2OIMain = tagOfStaticClass[H2OIMain] // Bind intp somewhere out of the regular namespace where // we can get at it in generated code. addThunk(intp.quietBind(NamedParam[H2OIMain]("$intp", intp)(tagOfH2OIMain, classTag[H2OIMain]))) addThunk({ val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp()) if (autorun.isDefined) intp.quietRun(autorun.get) }) addThunk(printWelcome()) addThunk(initializeSpark()) // it is broken on startup; go ahead and exit if (intp.reporter.hasErrors) return false // This is about the illusion of snappiness. We call initialize() // which spins off a separate thread, then print the prompt and try // our best to look ready. The interlocking lazy vals tend to // inter-deadlock, so we break the cycle with a single asynchronous // message to an actor. if (isAsync) { intp initialize initializedCallback() createAsyncListener() // listens for signal to run postInitialization } else { intp.initializeSynchronous() postInitialization() } // printWelcome() loadFiles(settings) try loop() catch AbstractOrMissingHandler() finally closeInterpreter() true } private def getMaster(): String = { sparkContext.master } /** * This method is used to initialize settings to slave interpreters only, sc can not be null when this method is called * @return */ private def createSettings(): Settings = { settings = new Settings() settings.usejavacp.value = true // set the classloader of some H2O class settings.embeddedDefaults[H2OContext] // synchronous calls settings.Yreplsync.value = true // add jars to the interpreter ( needed for the hadoop ) for (jar <- sparkContext.jars) { settings.classpath.append(jar) settings.bootclasspath.append(jar) } for (jar <- sparkContext.addedJars) { settings.bootclasspath.append(jar._1) settings.classpath.append(jar._1) } settings } class IMainOps[T <: H2OIMain](val intp: T) { import intp._ import global._ def echoTypeSignature(sym: Symbol, verbose: Boolean) = { if (verbose) H2OILoop.this.echo("// Type signature") printAfterTyper("" + replInfo(sym)) if (verbose) { H2OILoop.this.echo("\\n// Internal Type structure") echoTypeStructure(sym) } } def echoTypeStructure(sym: Symbol) = printAfterTyper("" + deconstruct.show(replInfo(sym))) /** Strip NullaryMethodType artifacts. */ private def replInfo(sym: Symbol) = { sym.info match { case NullaryMethodType(restpe) if sym.isAccessor => restpe case info => info } } def printAfterTyper(msg: => String) = intp.reporter printMessage afterTyper(msg) } class H2OILoopInterpreter extends H2OIMain(settings, out, sessionID) { outer => override private[repl] lazy val formatting = new Formatting { def prompt = H2OILoop.this.prompt } override protected def parentClassLoader = SparkHelper.explicitParentLoader(settings).getOrElse(classOf[H2OILoop].getClassLoader) } private object paste extends Pasted { val ContinueString = " | " val PromptString = "scala> " def interpret(line: String): Unit = { echo(line.trim) intp interpret line echo("") } def transcript(start: String) = { echo("\\n// Detected repl transcript paste: ctrl-D to finish.\\n") apply(Iterator(start) ++ readWhile(_.trim != PromptString.trim)) } } initH2OILoop() }
tromika/sparkling-water
core/src/main/scala/org/apache/spark/repl/H2OILoop.scala
Scala
apache-2.0
38,446
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.storage import java.io.IOException import java.util.{HashMap => JHashMap} import java.util.concurrent.TimeUnit import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.{ExecutionContext, Future} import scala.util.Random import org.apache.spark.SparkConf import org.apache.spark.annotation.DeveloperApi import org.apache.spark.internal.{config, Logging} import org.apache.spark.network.shuffle.ExternalShuffleClient import org.apache.spark.rpc.{RpcCallContext, RpcEndpointRef, RpcEnv, ThreadSafeRpcEndpoint} import org.apache.spark.scheduler._ import org.apache.spark.storage.BlockManagerMessages._ import org.apache.spark.util.{RpcUtils, ThreadUtils, Utils} /** * BlockManagerMasterEndpoint is an [[ThreadSafeRpcEndpoint]] on the master node to track statuses * of all slaves' block managers. */ private[spark] class BlockManagerMasterEndpoint( override val rpcEnv: RpcEnv, val isLocal: Boolean, conf: SparkConf, listenerBus: LiveListenerBus, externalShuffleClient: Option[ExternalShuffleClient]) extends ThreadSafeRpcEndpoint with Logging { // Mapping from block manager id to the block manager's information. private val blockManagerInfo = new mutable.HashMap[BlockManagerId, BlockManagerInfo] // Mapping from external shuffle service block manager id to the block statuses. private val blockStatusByShuffleService = new mutable.HashMap[BlockManagerId, JHashMap[BlockId, BlockStatus]] // Mapping from executor ID to block manager ID. private val blockManagerIdByExecutor = new mutable.HashMap[String, BlockManagerId] // Mapping from block id to the set of block managers that have the block. private val blockLocations = new JHashMap[BlockId, mutable.HashSet[BlockManagerId]] private val askThreadPool = ThreadUtils.newDaemonCachedThreadPool("block-manager-ask-thread-pool", 100) private implicit val askExecutionContext = ExecutionContext.fromExecutorService(askThreadPool) private val topologyMapper = { val topologyMapperClassName = conf.get( config.STORAGE_REPLICATION_TOPOLOGY_MAPPER) val clazz = Utils.classForName(topologyMapperClassName) val mapper = clazz.getConstructor(classOf[SparkConf]).newInstance(conf).asInstanceOf[TopologyMapper] logInfo(s"Using $topologyMapperClassName for getting topology information") mapper } val proactivelyReplicate = conf.get(config.STORAGE_REPLICATION_PROACTIVE) val defaultRpcTimeout = RpcUtils.askRpcTimeout(conf) logInfo("BlockManagerMasterEndpoint up") // same as `conf.get(config.SHUFFLE_SERVICE_ENABLED) // && conf.get(config.SHUFFLE_SERVICE_FETCH_RDD_ENABLED)` private val externalShuffleServiceRddFetchEnabled: Boolean = externalShuffleClient.isDefined private val externalShuffleServicePort: Int = StorageUtils.externalShuffleServicePort(conf) override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = { case RegisterBlockManager(blockManagerId, maxOnHeapMemSize, maxOffHeapMemSize, slaveEndpoint) => context.reply(register(blockManagerId, maxOnHeapMemSize, maxOffHeapMemSize, slaveEndpoint)) case _updateBlockInfo @ UpdateBlockInfo(blockManagerId, blockId, storageLevel, deserializedSize, size) => context.reply(updateBlockInfo(blockManagerId, blockId, storageLevel, deserializedSize, size)) listenerBus.post(SparkListenerBlockUpdated(BlockUpdatedInfo(_updateBlockInfo))) case GetLocations(blockId) => context.reply(getLocations(blockId)) case GetLocationsAndStatus(blockId) => context.reply(getLocationsAndStatus(blockId)) case GetLocationsMultipleBlockIds(blockIds) => context.reply(getLocationsMultipleBlockIds(blockIds)) case GetPeers(blockManagerId) => context.reply(getPeers(blockManagerId)) case GetExecutorEndpointRef(executorId) => context.reply(getExecutorEndpointRef(executorId)) case GetMemoryStatus => context.reply(memoryStatus) case GetStorageStatus => context.reply(storageStatus) case GetBlockStatus(blockId, askSlaves) => context.reply(blockStatus(blockId, askSlaves)) case IsExecutorAlive(executorId) => context.reply(blockManagerIdByExecutor.contains(executorId)) case GetMatchingBlockIds(filter, askSlaves) => context.reply(getMatchingBlockIds(filter, askSlaves)) case RemoveRdd(rddId) => context.reply(removeRdd(rddId)) case RemoveShuffle(shuffleId) => context.reply(removeShuffle(shuffleId)) case RemoveBroadcast(broadcastId, removeFromDriver) => context.reply(removeBroadcast(broadcastId, removeFromDriver)) case RemoveBlock(blockId) => removeBlockFromWorkers(blockId) context.reply(true) case RemoveExecutor(execId) => removeExecutor(execId) context.reply(true) case StopBlockManagerMaster => context.reply(true) stop() case BlockManagerHeartbeat(blockManagerId) => context.reply(heartbeatReceived(blockManagerId)) case HasExclusiveCachedBlocks(executorId) => blockManagerIdByExecutor.get(executorId) match { case Some(bm) => if (blockManagerInfo.contains(bm)) { val bmInfo = blockManagerInfo(bm) context.reply(bmInfo.exclusiveCachedBlocks.nonEmpty) } else { context.reply(false) } case None => context.reply(false) } } private def removeRdd(rddId: Int): Future[Seq[Int]] = { // First remove the metadata for the given RDD, and then asynchronously remove the blocks // from the slaves. // The message sent to the slaves to remove the RDD val removeMsg = RemoveRdd(rddId) // Find all blocks for the given RDD, remove the block from both blockLocations and // the blockManagerInfo that is tracking the blocks and create the futures which asynchronously // remove the blocks from slaves and gives back the number of removed blocks val blocks = blockLocations.asScala.keys.flatMap(_.asRDDId).filter(_.rddId == rddId) val blocksToDeleteByShuffleService = new mutable.HashMap[BlockManagerId, mutable.HashSet[RDDBlockId]] blocks.foreach { blockId => val bms: mutable.HashSet[BlockManagerId] = blockLocations.remove(blockId) val (bmIdsExtShuffle, bmIdsExecutor) = bms.partition(_.port == externalShuffleServicePort) val liveExecutorsForBlock = bmIdsExecutor.map(_.executorId).toSet bmIdsExtShuffle.foreach { bmIdForShuffleService => // if the original executor is already released then delete this disk block via // the external shuffle service if (!liveExecutorsForBlock.contains(bmIdForShuffleService.executorId)) { val blockIdsToDel = blocksToDeleteByShuffleService.getOrElseUpdate(bmIdForShuffleService, new mutable.HashSet[RDDBlockId]()) blockIdsToDel += blockId blockStatusByShuffleService.get(bmIdForShuffleService).foreach { blockStatus => blockStatus.remove(blockId) } } } bmIdsExecutor.foreach { bmId => blockManagerInfo.get(bmId).foreach { bmInfo => bmInfo.removeBlock(blockId) } } } val removeRddFromExecutorsFutures = blockManagerInfo.values.map { bmInfo => bmInfo.slaveEndpoint.ask[Int](removeMsg).recover { case e: IOException => logWarning(s"Error trying to remove RDD ${removeMsg.rddId} " + s"from block manager ${bmInfo.blockManagerId}", e) 0 // zero blocks were removed } }.toSeq val removeRddBlockViaExtShuffleServiceFutures = externalShuffleClient.map { shuffleClient => blocksToDeleteByShuffleService.map { case (bmId, blockIds) => Future[Int] { val numRemovedBlocks = shuffleClient.removeBlocks( bmId.host, bmId.port, bmId.executorId, blockIds.map(_.toString).toArray) numRemovedBlocks.get(defaultRpcTimeout.duration.toSeconds, TimeUnit.SECONDS) } } }.getOrElse(Seq.empty) Future.sequence(removeRddFromExecutorsFutures ++ removeRddBlockViaExtShuffleServiceFutures) } private def removeShuffle(shuffleId: Int): Future[Seq[Boolean]] = { // Nothing to do in the BlockManagerMasterEndpoint data structures val removeMsg = RemoveShuffle(shuffleId) Future.sequence( blockManagerInfo.values.map { bm => bm.slaveEndpoint.ask[Boolean](removeMsg) }.toSeq ) } /** * Delegate RemoveBroadcast messages to each BlockManager because the master may not notified * of all broadcast blocks. If removeFromDriver is false, broadcast blocks are only removed * from the executors, but not from the driver. */ private def removeBroadcast(broadcastId: Long, removeFromDriver: Boolean): Future[Seq[Int]] = { val removeMsg = RemoveBroadcast(broadcastId, removeFromDriver) val requiredBlockManagers = blockManagerInfo.values.filter { info => removeFromDriver || !info.blockManagerId.isDriver } val futures = requiredBlockManagers.map { bm => bm.slaveEndpoint.ask[Int](removeMsg).recover { case e: IOException => logWarning(s"Error trying to remove broadcast $broadcastId from block manager " + s"${bm.blockManagerId}", e) 0 // zero blocks were removed } }.toSeq Future.sequence(futures) } private def removeBlockManager(blockManagerId: BlockManagerId) { val info = blockManagerInfo(blockManagerId) // Remove the block manager from blockManagerIdByExecutor. blockManagerIdByExecutor -= blockManagerId.executorId // Remove it from blockManagerInfo and remove all the blocks. blockManagerInfo.remove(blockManagerId) val iterator = info.blocks.keySet.iterator while (iterator.hasNext) { val blockId = iterator.next val locations = blockLocations.get(blockId) locations -= blockManagerId // De-register the block if none of the block managers have it. Otherwise, if pro-active // replication is enabled, and a block is either an RDD or a test block (the latter is used // for unit testing), we send a message to a randomly chosen executor location to replicate // the given block. Note that we ignore other block types (such as broadcast/shuffle blocks // etc.) as replication doesn't make much sense in that context. if (locations.size == 0) { blockLocations.remove(blockId) logWarning(s"No more replicas available for $blockId !") } else if (proactivelyReplicate && (blockId.isRDD || blockId.isInstanceOf[TestBlockId])) { // As a heursitic, assume single executor failure to find out the number of replicas that // existed before failure val maxReplicas = locations.size + 1 val i = (new Random(blockId.hashCode)).nextInt(locations.size) val blockLocations = locations.toSeq val candidateBMId = blockLocations(i) blockManagerInfo.get(candidateBMId).foreach { bm => val remainingLocations = locations.toSeq.filter(bm => bm != candidateBMId) val replicateMsg = ReplicateBlock(blockId, remainingLocations, maxReplicas) bm.slaveEndpoint.ask[Boolean](replicateMsg) } } } listenerBus.post(SparkListenerBlockManagerRemoved(System.currentTimeMillis(), blockManagerId)) logInfo(s"Removing block manager $blockManagerId") } private def removeExecutor(execId: String) { logInfo("Trying to remove executor " + execId + " from BlockManagerMaster.") blockManagerIdByExecutor.get(execId).foreach(removeBlockManager) } /** * Return true if the driver knows about the given block manager. Otherwise, return false, * indicating that the block manager should re-register. */ private def heartbeatReceived(blockManagerId: BlockManagerId): Boolean = { if (!blockManagerInfo.contains(blockManagerId)) { blockManagerId.isDriver && !isLocal } else { blockManagerInfo(blockManagerId).updateLastSeenMs() true } } // Remove a block from the slaves that have it. This can only be used to remove // blocks that the master knows about. private def removeBlockFromWorkers(blockId: BlockId) { val locations = blockLocations.get(blockId) if (locations != null) { locations.foreach { blockManagerId: BlockManagerId => val blockManager = blockManagerInfo.get(blockManagerId) if (blockManager.isDefined) { // Remove the block from the slave's BlockManager. // Doesn't actually wait for a confirmation and the message might get lost. // If message loss becomes frequent, we should add retry logic here. blockManager.get.slaveEndpoint.ask[Boolean](RemoveBlock(blockId)) } } } } // Return a map from the block manager id to max memory and remaining memory. private def memoryStatus: Map[BlockManagerId, (Long, Long)] = { blockManagerInfo.map { case(blockManagerId, info) => (blockManagerId, (info.maxMem, info.remainingMem)) }.toMap } private def storageStatus: Array[StorageStatus] = { blockManagerInfo.map { case (blockManagerId, info) => new StorageStatus(blockManagerId, info.maxMem, Some(info.maxOnHeapMem), Some(info.maxOffHeapMem), info.blocks.asScala) }.toArray } /** * Return the block's status for all block managers, if any. NOTE: This is a * potentially expensive operation and should only be used for testing. * * If askSlaves is true, the master queries each block manager for the most updated block * statuses. This is useful when the master is not informed of the given block by all block * managers. */ private def blockStatus( blockId: BlockId, askSlaves: Boolean): Map[BlockManagerId, Future[Option[BlockStatus]]] = { val getBlockStatus = GetBlockStatus(blockId) /* * Rather than blocking on the block status query, master endpoint should simply return * Futures to avoid potential deadlocks. This can arise if there exists a block manager * that is also waiting for this master endpoint's response to a previous message. */ blockManagerInfo.values.map { info => val blockStatusFuture = if (askSlaves) { info.slaveEndpoint.ask[Option[BlockStatus]](getBlockStatus) } else { Future { info.getStatus(blockId) } } (info.blockManagerId, blockStatusFuture) }.toMap } /** * Return the ids of blocks present in all the block managers that match the given filter. * NOTE: This is a potentially expensive operation and should only be used for testing. * * If askSlaves is true, the master queries each block manager for the most updated block * statuses. This is useful when the master is not informed of the given block by all block * managers. */ private def getMatchingBlockIds( filter: BlockId => Boolean, askSlaves: Boolean): Future[Seq[BlockId]] = { val getMatchingBlockIds = GetMatchingBlockIds(filter) Future.sequence( blockManagerInfo.values.map { info => val future = if (askSlaves) { info.slaveEndpoint.ask[Seq[BlockId]](getMatchingBlockIds) } else { Future { info.blocks.asScala.keys.filter(filter).toSeq } } future } ).map(_.flatten.toSeq) } private def externalShuffleServiceIdOnHost(blockManagerId: BlockManagerId): BlockManagerId = { // we need to keep the executor ID of the original executor to let the shuffle service know // which local directories should be used to look for the file BlockManagerId(blockManagerId.executorId, blockManagerId.host, externalShuffleServicePort) } /** * Returns the BlockManagerId with topology information populated, if available. */ private def register( idWithoutTopologyInfo: BlockManagerId, maxOnHeapMemSize: Long, maxOffHeapMemSize: Long, slaveEndpoint: RpcEndpointRef): BlockManagerId = { // the dummy id is not expected to contain the topology information. // we get that info here and respond back with a more fleshed out block manager id val id = BlockManagerId( idWithoutTopologyInfo.executorId, idWithoutTopologyInfo.host, idWithoutTopologyInfo.port, topologyMapper.getTopologyForHost(idWithoutTopologyInfo.host)) val time = System.currentTimeMillis() if (!blockManagerInfo.contains(id)) { blockManagerIdByExecutor.get(id.executorId) match { case Some(oldId) => // A block manager of the same executor already exists, so remove it (assumed dead) logError("Got two different block manager registrations on same executor - " + s" will replace old one $oldId with new one $id") removeExecutor(id.executorId) case None => } logInfo("Registering block manager %s with %s RAM, %s".format( id.hostPort, Utils.bytesToString(maxOnHeapMemSize + maxOffHeapMemSize), id)) blockManagerIdByExecutor(id.executorId) = id val externalShuffleServiceBlockStatus = if (externalShuffleServiceRddFetchEnabled) { val externalShuffleServiceBlocks = blockStatusByShuffleService .getOrElseUpdate(externalShuffleServiceIdOnHost(id), new JHashMap[BlockId, BlockStatus]) Some(externalShuffleServiceBlocks) } else { None } blockManagerInfo(id) = new BlockManagerInfo(id, System.currentTimeMillis(), maxOnHeapMemSize, maxOffHeapMemSize, slaveEndpoint, externalShuffleServiceBlockStatus) } listenerBus.post(SparkListenerBlockManagerAdded(time, id, maxOnHeapMemSize + maxOffHeapMemSize, Some(maxOnHeapMemSize), Some(maxOffHeapMemSize))) id } private def updateBlockInfo( blockManagerId: BlockManagerId, blockId: BlockId, storageLevel: StorageLevel, memSize: Long, diskSize: Long): Boolean = { if (!blockManagerInfo.contains(blockManagerId)) { if (blockManagerId.isDriver && !isLocal) { // We intentionally do not register the master (except in local mode), // so we should not indicate failure. return true } else { return false } } if (blockId == null) { blockManagerInfo(blockManagerId).updateLastSeenMs() return true } blockManagerInfo(blockManagerId).updateBlockInfo(blockId, storageLevel, memSize, diskSize) var locations: mutable.HashSet[BlockManagerId] = null if (blockLocations.containsKey(blockId)) { locations = blockLocations.get(blockId) } else { locations = new mutable.HashSet[BlockManagerId] blockLocations.put(blockId, locations) } if (storageLevel.isValid) { locations.add(blockManagerId) } else { locations.remove(blockManagerId) } if (blockId.isRDD && storageLevel.useDisk && externalShuffleServiceRddFetchEnabled) { val externalShuffleServiceId = externalShuffleServiceIdOnHost(blockManagerId) if (storageLevel.isValid) { locations.add(externalShuffleServiceId) } else { locations.remove(externalShuffleServiceId) } } // Remove the block from master tracking if it has been removed on all slaves. if (locations.size == 0) { blockLocations.remove(blockId) } true } private def getLocations(blockId: BlockId): Seq[BlockManagerId] = { if (blockLocations.containsKey(blockId)) blockLocations.get(blockId).toSeq else Seq.empty } private def getLocationsAndStatus(blockId: BlockId): Option[BlockLocationsAndStatus] = { val locations = Option(blockLocations.get(blockId)).map(_.toSeq).getOrElse(Seq.empty) val status = locations.headOption.flatMap { bmId => if (externalShuffleServiceRddFetchEnabled && bmId.port == externalShuffleServicePort) { Option(blockStatusByShuffleService(bmId).get(blockId)) } else { blockManagerInfo(bmId).getStatus(blockId) } } if (locations.nonEmpty && status.isDefined) { Some(BlockLocationsAndStatus(locations, status.get)) } else { None } } private def getLocationsMultipleBlockIds( blockIds: Array[BlockId]): IndexedSeq[Seq[BlockManagerId]] = { blockIds.map(blockId => getLocations(blockId)) } /** Get the list of the peers of the given block manager */ private def getPeers(blockManagerId: BlockManagerId): Seq[BlockManagerId] = { val blockManagerIds = blockManagerInfo.keySet if (blockManagerIds.contains(blockManagerId)) { blockManagerIds.filterNot { _.isDriver }.filterNot { _ == blockManagerId }.toSeq } else { Seq.empty } } /** * Returns an [[RpcEndpointRef]] of the [[BlockManagerSlaveEndpoint]] for sending RPC messages. */ private def getExecutorEndpointRef(executorId: String): Option[RpcEndpointRef] = { for ( blockManagerId <- blockManagerIdByExecutor.get(executorId); info <- blockManagerInfo.get(blockManagerId) ) yield { info.slaveEndpoint } } override def onStop(): Unit = { askThreadPool.shutdownNow() } } @DeveloperApi case class BlockStatus(storageLevel: StorageLevel, memSize: Long, diskSize: Long) { def isCached: Boolean = memSize + diskSize > 0 } @DeveloperApi object BlockStatus { def empty: BlockStatus = BlockStatus(StorageLevel.NONE, memSize = 0L, diskSize = 0L) } private[spark] class BlockManagerInfo( val blockManagerId: BlockManagerId, timeMs: Long, val maxOnHeapMem: Long, val maxOffHeapMem: Long, val slaveEndpoint: RpcEndpointRef, val externalShuffleServiceBlockStatus: Option[JHashMap[BlockId, BlockStatus]]) extends Logging { val maxMem = maxOnHeapMem + maxOffHeapMem val externalShuffleServiceEnabled = externalShuffleServiceBlockStatus.isDefined private var _lastSeenMs: Long = timeMs private var _remainingMem: Long = maxMem // Mapping from block id to its status. private val _blocks = new JHashMap[BlockId, BlockStatus] /** * Cached blocks which are not available via the external shuffle service. * This does not include broadcast blocks. */ private val _exclusiveCachedBlocks = new mutable.HashSet[BlockId] def getStatus(blockId: BlockId): Option[BlockStatus] = Option(_blocks.get(blockId)) def updateLastSeenMs() { _lastSeenMs = System.currentTimeMillis() } def updateBlockInfo( blockId: BlockId, storageLevel: StorageLevel, memSize: Long, diskSize: Long) { updateLastSeenMs() val blockExists = _blocks.containsKey(blockId) var originalMemSize: Long = 0 var originalDiskSize: Long = 0 var originalLevel: StorageLevel = StorageLevel.NONE if (blockExists) { // The block exists on the slave already. val blockStatus: BlockStatus = _blocks.get(blockId) originalLevel = blockStatus.storageLevel originalMemSize = blockStatus.memSize originalDiskSize = blockStatus.diskSize if (originalLevel.useMemory) { _remainingMem += originalMemSize } } if (storageLevel.isValid) { /* isValid means it is either stored in-memory or on-disk. * The memSize here indicates the data size in or dropped from memory, * externalBlockStoreSize here indicates the data size in or dropped from externalBlockStore, * and the diskSize here indicates the data size in or dropped to disk. * They can be both larger than 0, when a block is dropped from memory to disk. * Therefore, a safe way to set BlockStatus is to set its info in accurate modes. */ var blockStatus: BlockStatus = null if (storageLevel.useMemory) { blockStatus = BlockStatus(storageLevel, memSize = memSize, diskSize = 0) _blocks.put(blockId, blockStatus) _remainingMem -= memSize if (blockExists) { logInfo(s"Updated $blockId in memory on ${blockManagerId.hostPort}" + s" (current size: ${Utils.bytesToString(memSize)}," + s" original size: ${Utils.bytesToString(originalMemSize)}," + s" free: ${Utils.bytesToString(_remainingMem)})") } else { logInfo(s"Added $blockId in memory on ${blockManagerId.hostPort}" + s" (size: ${Utils.bytesToString(memSize)}," + s" free: ${Utils.bytesToString(_remainingMem)})") } } if (storageLevel.useDisk) { blockStatus = BlockStatus(storageLevel, memSize = 0, diskSize = diskSize) _blocks.put(blockId, blockStatus) if (blockExists) { logInfo(s"Updated $blockId on disk on ${blockManagerId.hostPort}" + s" (current size: ${Utils.bytesToString(diskSize)}," + s" original size: ${Utils.bytesToString(originalDiskSize)})") } else { logInfo(s"Added $blockId on disk on ${blockManagerId.hostPort}" + s" (size: ${Utils.bytesToString(diskSize)})") } } if (!blockId.isBroadcast) { if (!externalShuffleServiceEnabled || !storageLevel.useDisk) { _exclusiveCachedBlocks += blockId } else if (blockExists) { // removing block from the exclusive cached blocks when updated to non-exclusive _exclusiveCachedBlocks -= blockId } } externalShuffleServiceBlockStatus.foreach { shuffleServiceBlocks => if (!blockId.isBroadcast && blockStatus.diskSize > 0) { shuffleServiceBlocks.put(blockId, blockStatus) } } } else if (blockExists) { // If isValid is not true, drop the block. _blocks.remove(blockId) _exclusiveCachedBlocks -= blockId externalShuffleServiceBlockStatus.foreach { blockStatus => blockStatus.remove(blockId) } if (originalLevel.useMemory) { logInfo(s"Removed $blockId on ${blockManagerId.hostPort} in memory" + s" (size: ${Utils.bytesToString(originalMemSize)}," + s" free: ${Utils.bytesToString(_remainingMem)})") } if (originalLevel.useDisk) { logInfo(s"Removed $blockId on ${blockManagerId.hostPort} on disk" + s" (size: ${Utils.bytesToString(originalDiskSize)})") } } } def removeBlock(blockId: BlockId) { if (_blocks.containsKey(blockId)) { _remainingMem += _blocks.get(blockId).memSize _blocks.remove(blockId) externalShuffleServiceBlockStatus.foreach { blockStatus => blockStatus.remove(blockId) } } _exclusiveCachedBlocks -= blockId } def remainingMem: Long = _remainingMem def lastSeenMs: Long = _lastSeenMs def blocks: JHashMap[BlockId, BlockStatus] = _blocks def exclusiveCachedBlocks: collection.Set[BlockId] = _exclusiveCachedBlocks override def toString: String = "BlockManagerInfo " + timeMs + " " + _remainingMem def clear() { _blocks.clear() } }
LantaoJin/spark
core/src/main/scala/org/apache/spark/storage/BlockManagerMasterEndpoint.scala
Scala
apache-2.0
27,791
/* * Copyright 2013 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.storehaus import com.twitter.bijection.Injection import com.twitter.util.{ Future, Time } /** * ReadableStore enrichment for ReadableStore[OuterK, ReadableStore[InnerK, V]] * on top of a ReadableStore[K, V] * * @author Ruban Monu */ object PivotedReadableStore { def fromMap[K, OuterK, InnerK, V](m: Map[K, V])( implicit inj: Injection[(OuterK, InnerK), K]): PivotedReadableStore[K, OuterK, InnerK, V] = new PivotedReadableStore[K, OuterK, InnerK, V](ReadableStore.fromMap(m))(inj) def fromReadableStore[K, OuterK, InnerK, V](store: ReadableStore[K, V])( implicit inj: Injection[(OuterK, InnerK), K]): PivotedReadableStore[K, OuterK, InnerK, V] = new PivotedReadableStore[K, OuterK, InnerK, V](store)(inj) } class PivotedReadableStore[K, -OuterK, InnerK, +V](store: ReadableStore[K, V])( implicit inj: Injection[(OuterK, InnerK), K]) extends ReadableStore[OuterK, ReadableStore[InnerK, V]] { override def get(outerK: OuterK) : Future[Option[ReadableStore[InnerK, V]]] = Future.value(Some(new ReadableStore[InnerK, V]() { override def get(innerK: InnerK) = store.get(inj((outerK, innerK))) })) override def close(time: Time): Future[Unit] = store.close(time) }
twitter/storehaus
storehaus-core/src/main/scala/com/twitter/storehaus/PivotedReadableStore.scala
Scala
apache-2.0
1,836
package uk.co.seansaville.ninetyninescalaprobs.lists /** * Problem 20: Remove the Kth element from a list. */ object Problem20 { def removeAt[T](k: Int, list: List[T]): (List[T], T) = { def removeHelper[U](k: Int, list: List[U], temp: List[U]): (List[U], U) = (k, list) match { case (0, h :: t) => (temp.reverse ++ t, h) case (n, h :: t) => removeHelper(n - 1, t, h :: temp) case (_, Nil) => throw new NoSuchElementException } removeHelper(k, list, List()) } }
seansaville/99scalaprobs
src/main/scala/uk/co/seansaville/ninetyninescalaprobs/lists/Problem20.scala
Scala
mit
505
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.scheduler import java.util.Properties import javax.annotation.Nullable import scala.collection.Map import com.fasterxml.jackson.annotation.JsonTypeInfo import org.apache.spark.TaskEndReason import org.apache.spark.annotation.{DeveloperApi, Since} import org.apache.spark.executor.{ExecutorMetrics, TaskMetrics} import org.apache.spark.resource.ResourceProfile import org.apache.spark.scheduler.cluster.ExecutorInfo import org.apache.spark.storage.{BlockManagerId, BlockUpdatedInfo} @DeveloperApi @JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY, property = "Event") trait SparkListenerEvent { /* Whether output this event to the event log */ protected[spark] def logEvent: Boolean = true } @DeveloperApi case class SparkListenerStageSubmitted(stageInfo: StageInfo, properties: Properties = null) extends SparkListenerEvent @DeveloperApi case class SparkListenerStageCompleted(stageInfo: StageInfo) extends SparkListenerEvent @DeveloperApi case class SparkListenerTaskStart(stageId: Int, stageAttemptId: Int, taskInfo: TaskInfo) extends SparkListenerEvent @DeveloperApi case class SparkListenerTaskGettingResult(taskInfo: TaskInfo) extends SparkListenerEvent @DeveloperApi case class SparkListenerSpeculativeTaskSubmitted( stageId: Int, stageAttemptId: Int = 0) extends SparkListenerEvent @DeveloperApi case class SparkListenerTaskEnd( stageId: Int, stageAttemptId: Int, taskType: String, reason: TaskEndReason, taskInfo: TaskInfo, taskExecutorMetrics: ExecutorMetrics, // may be null if the task has failed @Nullable taskMetrics: TaskMetrics) extends SparkListenerEvent @DeveloperApi case class SparkListenerJobStart( jobId: Int, time: Long, stageInfos: Seq[StageInfo], properties: Properties = null) extends SparkListenerEvent { // Note: this is here for backwards-compatibility with older versions of this event which // only stored stageIds and not StageInfos: val stageIds: Seq[Int] = stageInfos.map(_.stageId) } @DeveloperApi case class SparkListenerJobEnd( jobId: Int, time: Long, jobResult: JobResult) extends SparkListenerEvent @DeveloperApi case class SparkListenerEnvironmentUpdate(environmentDetails: Map[String, Seq[(String, String)]]) extends SparkListenerEvent @DeveloperApi case class SparkListenerBlockManagerAdded( time: Long, blockManagerId: BlockManagerId, maxMem: Long, maxOnHeapMem: Option[Long] = None, maxOffHeapMem: Option[Long] = None) extends SparkListenerEvent { } @DeveloperApi case class SparkListenerBlockManagerRemoved(time: Long, blockManagerId: BlockManagerId) extends SparkListenerEvent @DeveloperApi case class SparkListenerUnpersistRDD(rddId: Int) extends SparkListenerEvent @DeveloperApi case class SparkListenerExecutorAdded(time: Long, executorId: String, executorInfo: ExecutorInfo) extends SparkListenerEvent @DeveloperApi case class SparkListenerExecutorRemoved(time: Long, executorId: String, reason: String) extends SparkListenerEvent @DeveloperApi @deprecated("use SparkListenerExecutorExcluded instead", "3.1.0") case class SparkListenerExecutorBlacklisted( time: Long, executorId: String, taskFailures: Int) extends SparkListenerEvent @DeveloperApi @Since("3.1.0") case class SparkListenerExecutorExcluded( time: Long, executorId: String, taskFailures: Int) extends SparkListenerEvent @deprecated("use SparkListenerExecutorExcludedForStage instead", "3.1.0") @DeveloperApi case class SparkListenerExecutorBlacklistedForStage( time: Long, executorId: String, taskFailures: Int, stageId: Int, stageAttemptId: Int) extends SparkListenerEvent @DeveloperApi @Since("3.1.0") case class SparkListenerExecutorExcludedForStage( time: Long, executorId: String, taskFailures: Int, stageId: Int, stageAttemptId: Int) extends SparkListenerEvent @deprecated("use SparkListenerNodeExcludedForStage instead", "3.1.0") @DeveloperApi case class SparkListenerNodeBlacklistedForStage( time: Long, hostId: String, executorFailures: Int, stageId: Int, stageAttemptId: Int) extends SparkListenerEvent @DeveloperApi @Since("3.1.0") case class SparkListenerNodeExcludedForStage( time: Long, hostId: String, executorFailures: Int, stageId: Int, stageAttemptId: Int) extends SparkListenerEvent @deprecated("use SparkListenerExecutorUnexcluded instead", "3.1.0") @DeveloperApi case class SparkListenerExecutorUnblacklisted(time: Long, executorId: String) extends SparkListenerEvent @DeveloperApi case class SparkListenerExecutorUnexcluded(time: Long, executorId: String) extends SparkListenerEvent @deprecated("use SparkListenerNodeExcluded instead", "3.1.0") @DeveloperApi case class SparkListenerNodeBlacklisted( time: Long, hostId: String, executorFailures: Int) extends SparkListenerEvent @DeveloperApi @Since("3.1.0") case class SparkListenerNodeExcluded( time: Long, hostId: String, executorFailures: Int) extends SparkListenerEvent @deprecated("use SparkListenerNodeUnexcluded instead", "3.1.0") @DeveloperApi case class SparkListenerNodeUnblacklisted(time: Long, hostId: String) extends SparkListenerEvent @DeveloperApi @Since("3.1.0") case class SparkListenerNodeUnexcluded(time: Long, hostId: String) extends SparkListenerEvent @DeveloperApi @Since("3.1.0") case class SparkListenerUnschedulableTaskSetAdded( stageId: Int, stageAttemptId: Int) extends SparkListenerEvent @DeveloperApi @Since("3.1.0") case class SparkListenerUnschedulableTaskSetRemoved( stageId: Int, stageAttemptId: Int) extends SparkListenerEvent @DeveloperApi case class SparkListenerBlockUpdated(blockUpdatedInfo: BlockUpdatedInfo) extends SparkListenerEvent @DeveloperApi case class SparkListenerMiscellaneousProcessAdded(time: Long, processId: String, info: MiscellaneousProcessDetails) extends SparkListenerEvent /** * Periodic updates from executors. * @param execId executor id * @param accumUpdates sequence of (taskId, stageId, stageAttemptId, accumUpdates) * @param executorUpdates executor level per-stage metrics updates * * @since 3.1.0 */ @DeveloperApi case class SparkListenerExecutorMetricsUpdate( execId: String, accumUpdates: Seq[(Long, Int, Int, Seq[AccumulableInfo])], executorUpdates: Map[(Int, Int), ExecutorMetrics] = Map.empty) extends SparkListenerEvent /** * Peak metric values for the executor for the stage, written to the history log at stage * completion. * @param execId executor id * @param stageId stage id * @param stageAttemptId stage attempt * @param executorMetrics executor level metrics peak values */ @DeveloperApi case class SparkListenerStageExecutorMetrics( execId: String, stageId: Int, stageAttemptId: Int, executorMetrics: ExecutorMetrics) extends SparkListenerEvent @DeveloperApi case class SparkListenerApplicationStart( appName: String, appId: Option[String], time: Long, sparkUser: String, appAttemptId: Option[String], driverLogs: Option[Map[String, String]] = None, driverAttributes: Option[Map[String, String]] = None) extends SparkListenerEvent @DeveloperApi case class SparkListenerApplicationEnd(time: Long) extends SparkListenerEvent /** * An internal class that describes the metadata of an event log. */ @DeveloperApi case class SparkListenerLogStart(sparkVersion: String) extends SparkListenerEvent @DeveloperApi @Since("3.1.0") case class SparkListenerResourceProfileAdded(resourceProfile: ResourceProfile) extends SparkListenerEvent /** * Interface for listening to events from the Spark scheduler. Most applications should probably * extend SparkListener or SparkFirehoseListener directly, rather than implementing this class. * * Note that this is an internal interface which might change in different Spark releases. */ private[spark] trait SparkListenerInterface { /** * Called when a stage completes successfully or fails, with information on the completed stage. */ def onStageCompleted(stageCompleted: SparkListenerStageCompleted): Unit /** * Called when a stage is submitted */ def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted): Unit /** * Called when a task starts */ def onTaskStart(taskStart: SparkListenerTaskStart): Unit /** * Called when a task begins remotely fetching its result (will not be called for tasks that do * not need to fetch the result remotely). */ def onTaskGettingResult(taskGettingResult: SparkListenerTaskGettingResult): Unit /** * Called when a task ends */ def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit /** * Called when a job starts */ def onJobStart(jobStart: SparkListenerJobStart): Unit /** * Called when a job ends */ def onJobEnd(jobEnd: SparkListenerJobEnd): Unit /** * Called when environment properties have been updated */ def onEnvironmentUpdate(environmentUpdate: SparkListenerEnvironmentUpdate): Unit /** * Called when a new block manager has joined */ def onBlockManagerAdded(blockManagerAdded: SparkListenerBlockManagerAdded): Unit /** * Called when an existing block manager has been removed */ def onBlockManagerRemoved(blockManagerRemoved: SparkListenerBlockManagerRemoved): Unit /** * Called when an RDD is manually unpersisted by the application */ def onUnpersistRDD(unpersistRDD: SparkListenerUnpersistRDD): Unit /** * Called when the application starts */ def onApplicationStart(applicationStart: SparkListenerApplicationStart): Unit /** * Called when the application ends */ def onApplicationEnd(applicationEnd: SparkListenerApplicationEnd): Unit /** * Called when the driver receives task metrics from an executor in a heartbeat. */ def onExecutorMetricsUpdate(executorMetricsUpdate: SparkListenerExecutorMetricsUpdate): Unit /** * Called with the peak memory metrics for a given (executor, stage) combination. Note that this * is only present when reading from the event log (as in the history server), and is never * called in a live application. */ def onStageExecutorMetrics(executorMetrics: SparkListenerStageExecutorMetrics): Unit /** * Called when the driver registers a new executor. */ def onExecutorAdded(executorAdded: SparkListenerExecutorAdded): Unit /** * Called when the driver removes an executor. */ def onExecutorRemoved(executorRemoved: SparkListenerExecutorRemoved): Unit /** * Called when the driver excludes an executor for a Spark application. */ @deprecated("use onExecutorExcluded instead", "3.1.0") def onExecutorBlacklisted(executorBlacklisted: SparkListenerExecutorBlacklisted): Unit /** * Called when the driver excludes an executor for a Spark application. */ def onExecutorExcluded(executorExcluded: SparkListenerExecutorExcluded): Unit /** * Called when the driver excludes an executor for a stage. */ @deprecated("use onExecutorExcludedForStage instead", "3.1.0") def onExecutorBlacklistedForStage( executorBlacklistedForStage: SparkListenerExecutorBlacklistedForStage): Unit /** * Called when the driver excludes an executor for a stage. */ def onExecutorExcludedForStage( executorExcludedForStage: SparkListenerExecutorExcludedForStage): Unit /** * Called when the driver excludes a node for a stage. */ @deprecated("use onNodeExcludedForStage instead", "3.1.0") def onNodeBlacklistedForStage(nodeBlacklistedForStage: SparkListenerNodeBlacklistedForStage): Unit /** * Called when the driver excludes a node for a stage. */ def onNodeExcludedForStage(nodeExcludedForStage: SparkListenerNodeExcludedForStage): Unit /** * Called when the driver re-enables a previously excluded executor. */ @deprecated("use onExecutorUnexcluded instead", "3.1.0") def onExecutorUnblacklisted(executorUnblacklisted: SparkListenerExecutorUnblacklisted): Unit /** * Called when the driver re-enables a previously excluded executor. */ def onExecutorUnexcluded(executorUnexcluded: SparkListenerExecutorUnexcluded): Unit /** * Called when the driver excludes a node for a Spark application. */ @deprecated("use onNodeExcluded instead", "3.1.0") def onNodeBlacklisted(nodeBlacklisted: SparkListenerNodeBlacklisted): Unit /** * Called when the driver excludes a node for a Spark application. */ def onNodeExcluded(nodeExcluded: SparkListenerNodeExcluded): Unit /** * Called when the driver re-enables a previously excluded node. */ @deprecated("use onNodeUnexcluded instead", "3.1.0") def onNodeUnblacklisted(nodeUnblacklisted: SparkListenerNodeUnblacklisted): Unit /** * Called when the driver re-enables a previously excluded node. */ def onNodeUnexcluded(nodeUnexcluded: SparkListenerNodeUnexcluded): Unit /** * Called when a taskset becomes unschedulable due to exludeOnFailure and dynamic allocation * is enabled. */ def onUnschedulableTaskSetAdded( unschedulableTaskSetAdded: SparkListenerUnschedulableTaskSetAdded): Unit /** * Called when an unschedulable taskset becomes schedulable and dynamic allocation * is enabled. */ def onUnschedulableTaskSetRemoved( unschedulableTaskSetRemoved: SparkListenerUnschedulableTaskSetRemoved): Unit /** * Called when the driver receives a block update info. */ def onBlockUpdated(blockUpdated: SparkListenerBlockUpdated): Unit /** * Called when a speculative task is submitted */ def onSpeculativeTaskSubmitted(speculativeTask: SparkListenerSpeculativeTaskSubmitted): Unit /** * Called when other events like SQL-specific events are posted. */ def onOtherEvent(event: SparkListenerEvent): Unit /** * Called when a Resource Profile is added to the manager. */ def onResourceProfileAdded(event: SparkListenerResourceProfileAdded): Unit } /** * :: DeveloperApi :: * A default implementation for `SparkListenerInterface` that has no-op implementations for * all callbacks. * * Note that this is an internal interface which might change in different Spark releases. */ @DeveloperApi abstract class SparkListener extends SparkListenerInterface { override def onStageCompleted(stageCompleted: SparkListenerStageCompleted): Unit = { } override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted): Unit = { } override def onTaskStart(taskStart: SparkListenerTaskStart): Unit = { } override def onTaskGettingResult(taskGettingResult: SparkListenerTaskGettingResult): Unit = { } override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = { } override def onJobStart(jobStart: SparkListenerJobStart): Unit = { } override def onJobEnd(jobEnd: SparkListenerJobEnd): Unit = { } override def onEnvironmentUpdate(environmentUpdate: SparkListenerEnvironmentUpdate): Unit = { } override def onBlockManagerAdded(blockManagerAdded: SparkListenerBlockManagerAdded): Unit = { } override def onBlockManagerRemoved( blockManagerRemoved: SparkListenerBlockManagerRemoved): Unit = { } override def onUnpersistRDD(unpersistRDD: SparkListenerUnpersistRDD): Unit = { } override def onApplicationStart(applicationStart: SparkListenerApplicationStart): Unit = { } override def onApplicationEnd(applicationEnd: SparkListenerApplicationEnd): Unit = { } override def onExecutorMetricsUpdate( executorMetricsUpdate: SparkListenerExecutorMetricsUpdate): Unit = { } override def onStageExecutorMetrics( executorMetrics: SparkListenerStageExecutorMetrics): Unit = { } override def onExecutorAdded(executorAdded: SparkListenerExecutorAdded): Unit = { } override def onExecutorRemoved(executorRemoved: SparkListenerExecutorRemoved): Unit = { } override def onExecutorBlacklisted( executorBlacklisted: SparkListenerExecutorBlacklisted): Unit = { } override def onExecutorExcluded( executorExcluded: SparkListenerExecutorExcluded): Unit = { } override def onExecutorBlacklistedForStage( executorBlacklistedForStage: SparkListenerExecutorBlacklistedForStage): Unit = { } override def onExecutorExcludedForStage( executorExcludedForStage: SparkListenerExecutorExcludedForStage): Unit = { } override def onNodeBlacklistedForStage( nodeBlacklistedForStage: SparkListenerNodeBlacklistedForStage): Unit = { } override def onNodeExcludedForStage( nodeExcludedForStage: SparkListenerNodeExcludedForStage): Unit = { } override def onExecutorUnblacklisted( executorUnblacklisted: SparkListenerExecutorUnblacklisted): Unit = { } override def onExecutorUnexcluded( executorUnexcluded: SparkListenerExecutorUnexcluded): Unit = { } override def onNodeBlacklisted( nodeBlacklisted: SparkListenerNodeBlacklisted): Unit = { } override def onNodeExcluded( nodeExcluded: SparkListenerNodeExcluded): Unit = { } override def onNodeUnblacklisted( nodeUnblacklisted: SparkListenerNodeUnblacklisted): Unit = { } override def onNodeUnexcluded( nodeUnexcluded: SparkListenerNodeUnexcluded): Unit = { } override def onUnschedulableTaskSetAdded( unschedulableTaskSetAdded: SparkListenerUnschedulableTaskSetAdded): Unit = { } override def onUnschedulableTaskSetRemoved( unschedulableTaskSetRemoved: SparkListenerUnschedulableTaskSetRemoved): Unit = { } override def onBlockUpdated(blockUpdated: SparkListenerBlockUpdated): Unit = { } override def onSpeculativeTaskSubmitted( speculativeTask: SparkListenerSpeculativeTaskSubmitted): Unit = { } override def onOtherEvent(event: SparkListenerEvent): Unit = { } override def onResourceProfileAdded(event: SparkListenerResourceProfileAdded): Unit = { } }
maropu/spark
core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
Scala
apache-2.0
18,656
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * */ package io.github.mandar2812.dynaml.graph import breeze.linalg.{DenseMatrix, DenseVector} import breeze.stats.distributions.Uniform import com.tinkerpop.blueprints.{Graph, GraphFactory} import com.tinkerpop.frames.{FramedGraph, FramedGraphFactory} import io.github.mandar2812.dynaml.graph.utils.{Neuron, Synapse} import org.apache.log4j.Logger import scala.collection.JavaConversions import scala.collection.JavaConversions._ /** * Represents the underlying graph of a * feed-forward neural network. * * @param baseGraph The base graph object, [[FFNeuralGraph]] encapsulates * an existing graph object of type [[FramedGraph]] and builds * upon it by defining a set of behaviours expected from Neural Network * graphs * * @param hidden The number of hidden layers in the network. * * @param act A list of Strings containing the activations for each layer. * Options for activation functions are: * 1) "logsig" or "sigmoid" * 2) "tansig" * 3) "linear" * 4) "recLinear" * */ class FFNeuralGraph(baseGraph: FramedGraph[Graph], act: List[String], hidden: Int = 1) extends NeuralGraph[FramedGraph[Graph], DenseVector[Double], DenseVector[Double]]{ override protected val g = baseGraph val hidden_layers = hidden val activations = act /** * Get as a scala [[Iterable]] the neurons for a particular layer. * * @param layer The layer number, can vary from 0 (input layer) * to hidden_layers + 1 (output layer) * * @return The neurons in the particular layer as [[Neuron]] objects * * */ def getLayer(layer: Int) = JavaConversions.iterableAsScalaIterable( g.getVertices[Neuron]("layer", layer, classOf[Neuron]) ) /** * Get as a scala [[Iterable]] the synapses between layer l and l-1. * * @param layer The layer number, can vary from 1 (input layer synapses) * to hidden_layers + 1 (output layer synapses) * * @return The respective synapses as [[Synapse]] objects * * */ def getLayerSynapses(layer: Int) = JavaConversions.iterableAsScalaIterable( g.getEdges[Synapse]("layer", layer, classOf[Synapse]) ) val num_inputs: Int = getLayer(0).size - 1 val num_outputs: Int = getLayer(hidden_layers+1).size /** * Perform a forward pass through the network to * calculate the predicted output. * */ override val forwardPass: (DenseVector[Double]) => DenseVector[Double] = (pattern) => { //Set the pattern as input to layer 0 val inputnodes = getLayer(0) filter (_.getNeuronType() == "input") inputnodes.foreach(node => { val id = node.getNID() node.setValue(pattern(id-1)) }) val outputs:Map[Int, Double] = getLayer(hidden_layers+1) .map(outputNeuron => (outputNeuron.getNID(), Neuron.getLocalField(outputNeuron)._1)) .toMap DenseVector.tabulate[Double](num_outputs)(i => outputs(i+1)) } /** * Get as a breeze [[DenseMatrix]] the synapses between layer l and l-1. * * @param layer The layer number, can vary from 1 (input layer synapses) * to hidden_layers + 1 (output layer synapses) * * @return The respective synapses as elements of a matrix * * */ def getSynapsesAsMatrix(layer: Int): DenseMatrix[Double] = { val synapses = getLayerSynapses(layer) val inN = getLayer(layer-1).toList.length val outN = if(layer <= hidden_layers) getLayer(layer).toList.length-1 else getLayer(layer).toList.length val synapsesMap: Map[(Int, Int), Double] = synapses.map(s => ( (s.getPostSynapticNeuron().getNID(), s.getPreSynapticNeuron().getNID()), s.getWeight())).toMap DenseMatrix.tabulate[Double](outN, inN)((i,j) => synapsesMap((i+1, j+1))) } /** * Perform a forward pass through the network to * calculate the predicted output for a batch of * input points. * * @param procInputs The input batch as a List of Lists * where each level of the top level List * represents an input node. On the other hand * each element of the lower level list represents * a particular dimension of a particular data point * in the data set. * */ def predictBatch(procInputs: List[List[Double]]) = { getLayer(0).foreach(node => node.getNeuronType() match { case "input" => node.setValueBuffer(procInputs(node.getNID() - 1).toArray) node.setLocalFieldBuffer(procInputs(node.getNID() - 1).toArray) case "bias" => node.setValueBuffer(Array.fill[Double](procInputs.head.length)(1.0)) node.setLocalFieldBuffer(Array.fill[Double](procInputs.head.length)(1.0)) }) (1 to hidden_layers).foreach(layer => { getLayer(layer).foreach(node => node.getNeuronType() match { case "perceptron" => val (locfield, field) = Neuron.getLocalFieldBuffer(node) node.setLocalFieldBuffer(locfield) node.setValueBuffer(field) case "bias" => node.setValueBuffer(Array.fill[Double](procInputs.head.length)(1.0)) node.setLocalFieldBuffer(Array.fill[Double](procInputs.head.length)(1.0)) }) }) getLayer(hidden_layers+1) .map(node => (node.getNID()-1, Neuron.getLocalFieldBuffer(node)._1.zipWithIndex.map(_.swap).toMap)) .toMap } } object FFNeuralGraph { val manager: FramedGraphFactory = new FramedGraphFactory private val logger = Logger.getLogger(this.getClass) /** * Create a [[FFNeuralGraph]] object with * [[FramedGraph]] as the base graph. * * @param num_inputs Number of input dimensions * @param num_outputs Number of input dimensions * @param hidden_layers Number of hidden layers * @param nCounts The number of neurons in each hidden layer * @param activations The activation functions for each layer * @param biasFlag Indicates if bias unit is to be created. * * */ def apply(num_inputs: Int, num_outputs: Int, hidden_layers: Int = 1, activations: List[String], nCounts:List[Int] = List(), biasFlag: Boolean = true): FFNeuralGraph = { val uni = new Uniform(-1.0, 1.0) val neuronCounts:List[Int] = if(nCounts.isEmpty) List.tabulate[Int](hidden_layers+1)(i => { if(i <= hidden_layers) 3 else num_outputs }) else nCounts val graphconfig = Map("blueprints.graph" -> "com.tinkerpop.blueprints.impls.tg.TinkerGraph") val fg = manager.create(GraphFactory.open(mapAsJavaMap(graphconfig))) (0 to hidden_layers+1).foreach((layer) => { logger.info("Initializing layer "+layer) //For each layer create neurons if(layer == 0) { (1 to num_inputs).foreach(inputnode => { //create input node val inNode: Neuron = fg.addVertex((0,inputnode), classOf[Neuron]) inNode.setLayer(0) inNode.setNID(inputnode) inNode.setNeuronType("input") }) //Create Bias unit if(biasFlag) { val biasInput: Neuron = fg.addVertex((0,num_inputs+1), classOf[Neuron]) biasInput.setLayer(0) biasInput.setNeuronType("bias") biasInput.setNID(num_inputs+1) } } else { val num_neurons = if(layer == hidden_layers+1) num_outputs else neuronCounts(layer-1) (1 to num_neurons).foreach(neuronID => { //create neuron val neuron: Neuron = fg.addVertex((layer, neuronID), classOf[Neuron]) neuron.setLayer(layer) neuron.setNID(neuronID) neuron.setActivationFunc(activations(layer-1)) neuron.setNeuronType("perceptron") //Wire incoming synapses val n = fg.getVertices[Neuron]("layer", layer-1, classOf[Neuron]) n.foreach(vertex => { val synapse: Synapse = fg.addEdge((layer, vertex.getNID(), neuron.getNID()), vertex.asVertex(), neuron.asVertex(), "synapse", classOf[Synapse]) synapse.setLayer(layer) synapse.setWeight(uni.draw) synapse.setPrevWeightUpdate(0.0) }) }) //Create Bias unit for layer if it is not an output layer if(layer < hidden_layers+1 && biasFlag) { val biasLayerL: Neuron = fg.addVertex((layer, num_neurons+1), classOf[Neuron]) biasLayerL.setLayer(layer) biasLayerL.setNeuronType("bias") biasLayerL.setNID(num_neurons+1) } } }) new FFNeuralGraph(fg, activations, hidden_layers) } }
transcendent-ai-labs/DynaML
dynaml-core/src/main/scala/io/github/mandar2812/dynaml/graph/FFNeuralGraph.scala
Scala
apache-2.0
9,499
/* * Copyright (C) 2016 Nikos Katzouris * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package metric import lomrf.logic.{AtomicFormula, Constant, Variable} /** * Created by nkatz on 18/12/19. */ object Test extends App { val metric = AtomMetric(HungarianMatcher) val d = metric.distance( IndexedSeq(AtomicFormula("A", Vector(Variable("x"), Constant("R")))), IndexedSeq(AtomicFormula("B", Vector(Variable("y"), Constant("E")))) ) println(d) }
nkatzz/OLED
src/main/scala/metric/Test.scala
Scala
gpl-3.0
1,077
package thought import com.twitter.finatra.http.Controller import com.twitter.finatra.request.QueryParam import jarta.thoughtservice.thriftscala.{Thought, ThoughtService} class ThoughtController(thoughtService: ThoughtService.FutureIface) extends Controller { get("/thought/:id") { request: GetThoughtRequest => thoughtService.getThought(request.id) } put("/thought") { request: PutThoughtRequest => thoughtService.insertThought(Thought(0, request.description)) } delete("/thought/:id") { request: DeleteThoughtRequest => thoughtService.deleteThought(request.id) } } case class GetThoughtRequest(@QueryParam id: Int) case class PutThoughtRequest(@QueryParam description: String) case class DeleteThoughtRequest(@QueryParam id: Int)
lbunschoten/jarta
apps-bff/src/main/scala/thought/ThoughtController.scala
Scala
apache-2.0
764
package ru.tomtrix.ttl import org.eclipse.swt.widgets._ import akka.actor.ActorSystem object GUI { def createApp(title: String, width: Int, height: Int)(f: (Shell, ActorSystem) => Unit) { // Akka Actor System val system = ActorSystem(title) // display & shell val display = Display.getDefault val shell = new Shell(display) // customize the shell shell setText title shell setSize (width, height) shell setMinimumSize shell.getSize putToCenter(shell) f(shell, system) // main loop while (!shell.isDisposed) if (!display.readAndDispatch()) display.sleep //close everything display.dispose() system.shutdown() } def runInGUIThread(f: => Unit) { Display.getDefault.asyncExec(new Runnable { def run() = f }) } def putToCenter(shell: Shell) { val display = Display.getDefault shell setLocation ( display.getBounds.width/2 - shell.getBounds.width/2, display.getBounds.height/2 - shell.getBounds.height/2 ) } }
tom-trix/TomTrixLibrary
src/main/scala/ru/tomtrix/ttl/GUI.scala
Scala
apache-2.0
1,032
/*********************************************************************** * Copyright (c) 2013-2017 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.accumulo.iterators import com.esotericsoftware.kryo.io.Output import org.apache.accumulo.core.client.IteratorSetting import org.apache.accumulo.core.data.{Key, Value} import org.apache.accumulo.core.iterators.user.RowEncodingIterator import org.apache.accumulo.core.iterators.{IteratorEnvironment, SortedKeyValueIterator} import org.locationtech.geomesa.features.kryo.KryoFeatureSerializer import org.locationtech.geomesa.features.kryo.impl.{KryoFeatureDeserialization, KryoFeatureSerialization} import org.locationtech.geomesa.index.iterators.IteratorCache import org.locationtech.geomesa.utils.cache.CacheKeyGenerator import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes import org.opengis.feature.simple.SimpleFeatureType /** * Assumes cq are byte-encoded attribute number */ class KryoVisibilityRowEncoder extends RowEncodingIterator { private var sft: SimpleFeatureType = _ private var nullBytes: Array[Array[Byte]] = _ private var offsets: Array[Int] = _ private var offsetStart: Int = -1 private val output: Output = new Output(128, -1) override def init(source: SortedKeyValueIterator[Key, Value], options: java.util.Map[String, String], env: IteratorEnvironment): Unit = { super.init(source, options, env) sft = IteratorCache.sft(options.get(KryoVisibilityRowEncoder.SftOpt)) if (offsets == null || offsets.length != sft.getAttributeCount) { offsets = Array.ofDim[Int](sft.getAttributeCount) } val cacheKey = CacheKeyGenerator.cacheKey(sft) nullBytes = KryoFeatureSerialization.getWriters(cacheKey, sft).map { writer => output.clear() writer(output, null) output.toBytes } } override def rowEncoder(keys: java.util.List[Key], values: java.util.List[Value]): Value = { if (values.size() == 1) { return values.get(0) } val allValues = Array.ofDim[Array[Byte]](sft.getAttributeCount) var i = 0 while (i < keys.size) { val cq = keys.get(i).getColumnQualifier val comma = cq.find(",") val indices = if (comma == -1) cq.getBytes.map(_.toInt) else cq.getBytes.drop(comma + 1).map(_.toInt) val bytes = values.get(i).get readOffsets(bytes) // set the non-null values indices.foreach { index => val endIndex = offsets.indexWhere(_ != -1, index + 1) val end = if (endIndex == -1) offsetStart else offsets(endIndex) val length = end - offsets(index) val values = Array.ofDim[Byte](length) System.arraycopy(bytes, offsets(index), values, 0, length) allValues(index) = values } i += 1 } i = 0 while (i < allValues.length) { if (allValues(i) == null) { allValues(i) = nullBytes(i) } i += 1 } // TODO if we don't have a geometry, skip the record? KryoVisibilityRowEncoder.encode(allValues, output, offsets) } /** * Reads offsets in the 'offsets' array and sets the start of the offset block * * @param bytes kryo feature bytes * @return */ private def readOffsets(bytes: Array[Byte]): Unit = { val input = KryoFeatureDeserialization.getInput(bytes, 0, bytes.length) // reset our offsets input.setPosition(1) // skip version offsetStart = input.readInt() input.setPosition(offsetStart) // set to offsets start var i = 0 while (i < offsets.length) { offsets(i) = if (input.position < input.limit) input.readInt(true) else -1 i += 1 } } override def rowDecoder(rowKey: Key, rowValue: Value): java.util.SortedMap[Key, Value] = throw new NotImplementedError("") override def deepCopy(env: IteratorEnvironment): SortedKeyValueIterator[Key, Value] = { val iterator = new KryoVisibilityRowEncoder if (sourceIter != null) { iterator.sourceIter = sourceIter.deepCopy(env) } iterator.sft = sft iterator.offsets = Array.ofDim[Int](sft.getAttributeCount) iterator.nullBytes = nullBytes iterator } } object KryoVisibilityRowEncoder { val SftOpt = "sft" val DefaultPriority = 21 // needs to be first thing that runs after the versioning iterator at 20 def configure(sft: SimpleFeatureType, priority: Int = DefaultPriority): IteratorSetting = { val is = new IteratorSetting(priority, "feature-merge-iter", classOf[KryoVisibilityRowEncoder]) is.addOption(SftOpt, SimpleFeatureTypes.encodeType(sft, includeUserData = true)) // need user data for id calc is } private def encode(values: Array[Array[Byte]], output: Output, offsets: Array[Int]): Value = { output.clear() output.writeInt(KryoFeatureSerializer.VERSION, true) output.setPosition(5) // leave 4 bytes to write the offsets // note: we don't write ID - tables are assumed to be using serialization without IDs // write attributes and keep track off offset into byte array var i = 0 while (i < values.length) { offsets(i) = output.position() output.write(values(i)) i += 1 } // write the offsets - variable width i = 0 val offsetStart = output.position() while (i < values.length) { output.writeInt(offsets(i), true) i += 1 } // got back and write the start position for the offsets val total = output.position() output.setPosition(1) output.writeInt(offsetStart) output.setPosition(total) // set back to the end so that we get all the bytes new Value(output.toBytes) } }
ronq/geomesa
geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/KryoVisibilityRowEncoder.scala
Scala
apache-2.0
5,976
package almond.display import java.net.URL final class Json private( val contentOrUrl: Either[URL, String], // FIXME This may not be the right terminology (see https://en.wikipedia.org/wiki/Media_type). // This could also be generalized to the other Display classes. val vendorPart: Option[String], val displayId: String ) extends TextDisplay { private def copy( contentOrUrl: Either[URL, String] = contentOrUrl, vendorPart: Option[String] = vendorPart ): Json = new Json(contentOrUrl, vendorPart, displayId) def withContent(code: String): Json = copy(contentOrUrl = Right(code)) def withUrl(url: String): Json = copy(contentOrUrl = Left(new URL(url))) def withUrl(url: URL): Json = copy(contentOrUrl = Left(url)) def withSubType(subType: String): Json = copy(vendorPart = Some(subType)) def withVendorPart(vendorPartOpt: Option[String]): Json = copy(vendorPart = vendorPartOpt) def mimeType: String = { val vendorPart0 = vendorPart.fold("")(_ + "+") Json.mimeType(vendorPart0) } def data(): Map[String, String] = Map(mimeType -> finalContent) } object Json extends TextDisplay.Builder[Json] { protected def build(contentOrUrl: Either[URL, String]): Json = new Json(contentOrUrl, None, UpdatableDisplay.generateId()) def mimeType = "application/json" def mimeType(vendorPart: String) = s"application/${vendorPart}json" }
alexarchambault/jupyter-scala
modules/scala/jupyter-api/src/main/scala/almond/display/Json.scala
Scala
apache-2.0
1,420
import scala.collection.immutable.{ Range, NumericRange } object Test { def rangeForeach(range : Range) = { val buffer = new scala.collection.mutable.ListBuffer[Int]; range.foreach(buffer += _); assert(buffer.toList == range.iterator.toList, buffer.toList.toString + "/" + range.iterator.toList) } def boundaryTests() = { // #4321 assert((Int.MinValue to Int.MaxValue by Int.MaxValue).size == 3) // #4308 val caught = ( try { (Long.MinValue to Long.MaxValue).sum ; false } catch { case _: IllegalArgumentException => true } ) assert(caught) // #7432 val noElemAtMin = ( try { (10 until 10).min ; false } catch { case _: NoSuchElementException => true } ) assert(noElemAtMin) val noElemAtMax = ( try { (10 until 10).max ; false } catch { case _: NoSuchElementException => true } ) assert(noElemAtMax) } case class GR[T](val x: T)(implicit val num: Integral[T]) { import num._ def negated = GR[T](-x) def gr1 = NumericRange(x, x, x) def gr2 = NumericRange.inclusive(x, x, x) def gr3 = NumericRange(x, x * fromInt(4), x * fromInt(2)) // scala/bug#9348 def gr4 = NumericRange(x, x * fromInt(-2), x * fromInt(-2)) def gr5 = NumericRange(x, x * fromInt(10), x) def gr6 = NumericRange.inclusive(x, x * fromInt(10), x) def gr7 = gr3.toList ::: negated.gr3.toList def check = { assert(gr1.isEmpty && !gr2.isEmpty) assert(gr3.size == 2 && gr4.size == 2) assert(gr5.size == 9 && gr6.size == 10) assert(gr7.sum == num.zero, gr7.toString) assert(!(gr5 contains (x * fromInt(10)))) assert(gr6 contains (x * fromInt(10))) } } def main(args: Array[String]): Unit = { implicit val imp1: Numeric.BigDecimalAsIfIntegral = Numeric.BigDecimalAsIfIntegral val _grs = List[GR[_]]( GR(BigDecimal(5.0)), GR(BigDecimal(0.25)), // scala/bug#9348 GR(BigInt(5)), GR(5L), GR(2.toByte) ) val grs = _grs ::: (_grs map (_.negated)) grs foreach (_.check) assert(NumericRange(1, 10, 1) sameElements (1 until 10)) assert(NumericRange.inclusive(1, 10, 1) sameElements (1 to 10)) assert(NumericRange.inclusive(1, 100, 3) sameElements (1 to 100 by 3)) // #2518 assert((3L to 7 by 2) sameElements List(3L, 5L, 7L)) rangeForeach(1 to 10); rangeForeach(1 until 10); rangeForeach(10 to 1 by -1); rangeForeach(10 until 1 by -1); rangeForeach(10 to 1 by -3); rangeForeach(10 until 1 by -3); // living on the edges boundaryTests() } }
som-snytt/dotty
tests/run/range.scala
Scala
apache-2.0
2,607
package castalia.actors // //import akka.actor.ActorSystem import akka.actor.ActorDSL._ import akka.actor.{ActorRef, ActorSystem} import akka.http.scaladsl.model.StatusCodes._ import akka.http.scaladsl.model.{HttpMethods, HttpRequest} import akka.testkit.TestActor.{NoAutoPilot, AutoPilot} import akka.testkit.TestProbe import castalia.StubConfigParser._ import castalia._ import castalia.model.Messages.{EndpointMetricsInit, EndpointMetricsGet, Done, UpsertEndpoint} import castalia.model.Model.{EndpointMetrics, StubResponse} class ReceptionistSpec(_system: ActorSystem) extends ActorSpecBase(_system) { def this() = this(ActorSystem("ReceptionistSpec")) val metricsCollectorProbe = new TestProbe(_system) val receptionist = actor("receptionist")(new Receptionist() { override def createMetricsCollector(): ActorRef = metricsCollectorProbe.ref }) val stubConfig = parseStubConfigs(List("jsonconfiguredstub.json")).head override def beforeAll: Unit = { receptionist ! UpsertEndpoint(stubConfig) expectMsg(Done(stubConfig.endpoint)) } "Receptionist actor" when { "receives UpsertEndpoint message" should { "process it and reply with Done" in { receptionist ! UpsertEndpoint(stubConfig) expectMsg(Done(stubConfig.endpoint)) } } "receives a request to an existing endpoint " should { "forward the request to the endpoint and get a 200 response" in { val r = HttpRequest(HttpMethods.GET, "/doublepathparam/1/responsedata/id2" ) receptionist ! r expectMsg(StubResponse(OK.intValue, "{\\"id\\":\\"twee\\",\\"someValue\\":\\"123123\\",\\"someAdditionalValue\\":\\"345345\\"}")) // todo: rewrite into converting the response to json, unmarshal it and inspect. } } "receives a request to get metrics for endpoints" should { "forward the request to the metrics collector" in { val metrics = Map("endpoint" -> Map("calls" -> 1)) metricsCollectorProbe.setAutoPilot(new AutoPilot { override def run(sender: ActorRef, msg: Any): AutoPilot = msg match{ case EndpointMetricsGet(None) => sender ! EndpointMetrics(metrics) NoAutoPilot } }) receptionist ! EndpointMetricsGet(None) expectMsg(EndpointMetrics(metrics)) } } /* "receives a request to a non-existing endpoint" should { "return HTTP status code 404" in { val r = HttpRequest(HttpMethods.GET, "/nonexistingstub" ) receptionist ! r expectMsg(StubResponse(NotFound.intValue, "From receptionist " + NotFound.reason)) } } "receives a request to an existing endpoint " should { "forward the request to the endpoint and get a 404" in { val r = HttpRequest(HttpMethods.GET, "/doublepathparam/0/responsedata/notfound" ) receptionist ! r expectMsg(StubResponse(NotFound.intValue, "")) } } */ } // "A HTTP GET request to '/stubs/doublepathparam/0/responsedata/notfound' " should { // "result in a HTTP 404 response from the stubserver" in { // Get(s"/stubs/doublepathparam/0/responsedata/notfound") ~> service.routes ~> check { // status shouldBe NotFound // responseAs[String] shouldBe empty // } // } // } // // // "A HTTP GET request to '/stubs/doublepathparam/0/responsedata/internalerror' " should { // "result in a HTTP 503 response from the stubserver and related response" in { // Get(s"/stubs/doublepathparam/0/responsedata/internalerror") ~> service.routes ~> check { // status shouldBe ServiceUnavailable // responseAs[String] shouldBe empty // } // } // } // // "A HTTP GET request to '/stubs/doublepathparam/1/responsedata/id1' " should { // "result in a HTTP 200 response from the stubserver and related response" in { // Get(s"/stubs/doublepathparam/1/responsedata/id1") ~> service.routes ~> check { // status shouldBe OK // contentType shouldBe `application/json` // responseAs[String].parseJson.convertTo[AnyJsonObject] shouldBe // Some(Map("id" -> JsString("een"), "someValue" -> JsString("123123"))) // } // } // } // // "An empty list of static responses and map of dynamic responses" should { // implicit val system = ActorSystem("StubServiceSpecSystem", ConfigFactory.parseString("""akka.loggers = ["akka.testkit.TestEventListener"]""")) // // "result in a log message at info of 'No stubConfigs given' " in { // val stubService = new StubService(List.empty) // // EventFilter.info(message = "No StubConfigs given", occurrences = 1) intercept { // Get("/stubs/static") ~> stubService.routes ~> check { // handled shouldBe true // status shouldBe NotFound // } // } // // } // } // // "Duplicated endpoints configured" should { // val duplicatedStubConfigs = parseStubConfigs(parse("multiple-same-endpoints-config.json").stubs) // "result in an IllegalArgumentException" in { // intercept[IllegalArgumentException] { // new StubService(duplicatedStubConfigs) // } // } // } // // "A HTTP POST request to a endpoint described in /responses" should { // "result in a HTTP 200 response from the stubserver" in { // Post("/stubs/jsonconfiguredstub/more/responses", // ResponseConfig(None, None, 200, Some(Map("someValue" -> JsString("123123"))))) ~> service.routes ~> check { // status shouldBe OK // responseAs[String] shouldBe empty // } // } // } }
TimSoethout/stubserver
src/test/scala/castalia/actors/ReceptionistSpec.scala
Scala
mit
5,563
package controllers import java.util.UUID import javax.inject.Inject import com.mohiva.play.silhouette.api._ import com.mohiva.play.silhouette.api.repositories.AuthInfoRepository import com.mohiva.play.silhouette.api.services.AvatarService import com.mohiva.play.silhouette.api.util.PasswordHasher import com.mohiva.play.silhouette.impl.authenticators.CookieAuthenticator import com.mohiva.play.silhouette.impl.providers._ import forms.SignUpForm import models.User import models.services.UserService import play.api.i18n.{ MessagesApi, Messages } import play.api.libs.concurrent.Execution.Implicits._ import play.api.mvc.Action import scala.concurrent.Future /** * The sign up controller. * * @param messagesApi The Play messages API. * @param env The Silhouette environment. * @param userService The user service implementation. * @param authInfoRepository The auth info repository implementation. * @param avatarService The avatar service implementation. * @param passwordHasher The password hasher implementation. */ class SignUpController @Inject() ( val messagesApi: MessagesApi, val env: Environment[User, CookieAuthenticator], userService: UserService, authInfoRepository: AuthInfoRepository, avatarService: AvatarService, passwordHasher: PasswordHasher) extends Silhouette[User, CookieAuthenticator] { /** * Registers a new user. * * @return The result to display. */ def signUp = Action.async { implicit request => SignUpForm.form.bindFromRequest.fold( form => Future.successful(BadRequest(views.html.signUp(form))), data => { val loginInfo = LoginInfo(CredentialsProvider.ID, data.email) userService.retrieve(loginInfo).flatMap { case Some(user) => Future.successful(Redirect(routes.ApplicationController.signUp()).flashing("error" -> Messages("user.exists"))) case None => val authInfo = passwordHasher.hash(data.password) val user = User( userID = UUID.randomUUID(), loginInfo = loginInfo, email = Some(data.email), profile = models.Profile( fullName = data.profile.fullName, age = data.profile.age, sex = data.profile.sex ), avatarURL = None ) for { avatar <- avatarService.retrieveURL(data.email) user <- userService.create(user.copy(avatarURL = avatar)) authInfo <- authInfoRepository.add(loginInfo, authInfo) authenticator <- env.authenticatorService.create(loginInfo) value <- env.authenticatorService.init(authenticator) result <- env.authenticatorService.embed(value, Redirect(routes.ApplicationController.admin())) } yield { env.eventBus.publish(SignUpEvent(user, request, request2Messages)) env.eventBus.publish(LoginEvent(user, request, request2Messages)) result } } } ) } }
renexdev/Play-Auth-Slick-Seed-Load-Schema
app/controllers/SignUpController.scala
Scala
apache-2.0
3,056
package org.sisioh.aws4s.s3.model import com.amazonaws.services.s3.model.{ CannedAccessControlList, AccessControlList, SetObjectAclRequest } import org.sisioh.aws4s.PimpedType object SetObjectAclRequestFactory { def create(bucketName: String, key: String, acl: AccessControlList): SetObjectAclRequest = new SetObjectAclRequest(bucketName, key, acl) def create(bucketName: String, key: String, acl: CannedAccessControlList): SetObjectAclRequest = new SetObjectAclRequest(bucketName, key, acl) def create(bucketName: String, key: String, versionId: String, acl: AccessControlList): SetObjectAclRequest = new SetObjectAclRequest(bucketName, key, versionId, acl) def create(bucketName: String, key: String, versionId: String, acl: CannedAccessControlList): SetObjectAclRequest = new SetObjectAclRequest(bucketName, key, versionId, acl) } class RichSetObjectAclRequest(val underlying: SetObjectAclRequest) extends AnyVal with PimpedType[SetObjectAclRequest] { def bucketName: String = underlying.getBucketName // --- def key: String = underlying.getKey // --- def versionIdOpt: Option[String] = Option(underlying.getVersionId) // --- def aclOpt: Option[AccessControlList] = Option(underlying.getAcl) // --- def cannedAclOpt = Option(underlying.getCannedAcl) }
everpeace/aws4s
aws4s-s3/src/main/scala/org/sisioh/aws4s/s3/model/RichSetObjectAclRequest.scala
Scala
mit
1,319
package feh.tec.visual import java.awt.Canvas import feh.tec.visual.util.AwtEventUtils import scala.swing.event.{WindowClosing, WindowActivated} import org.lwjgl.opengl.{GL11, Display} /** * From http://lwjgl.org/wiki/index.php?title=Using_a_Resizeable_AWT_Frame_with_LWJGL */ trait SwingNicolApp extends AppSurroundingSwingFrame{ frame: SwingSurroundingFrameAppCreation#SurroundingLayoutDSL with SwingSurroundingFrameAppCreation#SurroundingLayoutBuilder => lazy val drawComponent = new Canvas() with AwtEventUtils{ canvas => addComponentListener(ComponentListener(resized = _ => resizeApp(canvas.getSize))) } frame.reactions += { case WindowActivated(`frame`) => drawComponent.requestFocusInWindow() case WindowClosing(`frame`) => stop() } /** * binds game display into `drawComponent`; needs OpenGL Context in it's thread */ def bindDisplay(){ Display.setParent(drawComponent) Display.create() } /** * sets view port to the size of `drawComponent`; needs OpenGL Context in it's thread */ def setViewPort() { GL11.glViewport(0, 0, drawComponent.getWidth, drawComponent.getHeight) Display.update() } protected def resizeApp(size: (Int, Int)) def start(): Unit = { buildLayout() frame.open() app.start() } def stop(): Unit = { app.stop() frame.close() } }
fehu/agent-tareas
draw-integration/src/main/scala/feh/tec/visual/SwingNicolApp.scala
Scala
mit
1,363
package com.twitter.summingbird.planner import org.scalacheck._ import org.scalacheck.Prop.forAll object ComposedFunctionsTest extends Properties("ComposedFunctions") { property("KeyFlatMapFunction maps only keys") = forAll { (f: Int => Iterable[Int], i: Int, s: String) => val fn = KeyFlatMapFunction[Int, Int, String](f) fn((i, s)).toList == f(i).map((_, s)).toList } property("ValueFlatMapFunction maps only values") = forAll { (f: Int => Iterable[Int], i: Int, s: String) => val fn = ValueFlatMapFunction[String, Int, Int](f) fn((s, i)).toList == f(i).map((s, _)).toList } }
twitter/summingbird
summingbird-core/src/test/scala/com/twitter/summingbird/planner/ComposedFunctionsTest.scala
Scala
apache-2.0
627
/* * Copyright 2012 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.zipkin.storage.cassandra import com.twitter.cassie._ import com.twitter.conversions.time._ import com.twitter.ostrich.stats.Stats import com.twitter.util.{Await, Duration, Future} import com.twitter.zipkin.common.Span import com.twitter.zipkin.conversions.thrift._ import com.twitter.zipkin.thriftscala import com.twitter.zipkin.storage.Storage import scala.collection.JavaConverters._ case class CassandraStorage( keyspace: Keyspace, traces: ColumnFamily[Long, String, thriftscala.Span], readBatchSize: Int, dataTimeToLive: Duration ) extends Storage { def close() { keyspace.close() } // storing the span in the traces cf private val CASSANDRA_STORE_SPAN = Stats.getCounter("cassandra_storespan") // read the trace private val CASSANDRA_GET_TRACE = Stats.getCounter("cassandra_gettrace") // trace exist call private val CASSANDRA_TRACE_EXISTS = Stats.getCounter("cassandra_traceexists") // trace is too big! private val CASSANDRA_GET_TRACE_TOO_BIG = Stats.getCounter("cassandra_gettrace_too_big") private val WRITE_REQUEST_COUNTER = Stats.getCounter("cassandra.write_request_counter") // there's a bug somewhere that creates massive traces. if we try to // read them without a limit we run the risk of blowing up the memory used in // cassandra. so instead we limit it and won't return it. hacky. private val TRACE_MAX_COLS = 100000 def storeSpan(span: Span): Future[Unit] = { CASSANDRA_STORE_SPAN.incr WRITE_REQUEST_COUNTER.incr() val traceKey = span.traceId val traceCol = Column[String, thriftscala.Span](createSpanColumnName(span), span.toThrift).ttl(dataTimeToLive) traces.insert(traceKey, traceCol).unit } def setTimeToLive(traceId: Long, ttl: Duration): Future[Unit] = { val rowFuture = traces.getRow(traceId) val batch = traces.batch() // fetch each col for trace, change ttl and reinsert // note that we block here Await.result(rowFuture).values().asScala.foreach { value => // creating a new column in order to set timestamp to None val col = Column[String, thriftscala.Span](value.name, value.value).ttl(ttl) batch.insert(traceId, col) } batch.execute().unit } def getTimeToLive(traceId: Long): Future[Duration] = { val rowFuture = traces.getRow(traceId) rowFuture map { rows => // fetch the val minTtlSec = rows.values().asScala.foldLeft(Int.MaxValue)((ttl: Int, col: Column[String, thriftscala.Span]) => math.min(ttl, col.ttl.map(_.inSeconds).getOrElse(Int.MaxValue))) if (minTtlSec == Int.MaxValue) { throw new IllegalArgumentException("The trace " + traceId + " does not have any ttl set!") } minTtlSec.seconds } } /** * Finds traces that have been stored from a list of trace IDs * * @param traceIds a List of trace IDs * @return a Set of those trace IDs from the list which are stored */ def tracesExist(traceIds: Seq[Long]): Future[Set[Long]] = { CASSANDRA_TRACE_EXISTS.incr Future.collect { traceIds.grouped(readBatchSize).toSeq.map { ids => traces.multigetRows(ids.toSet.asJava, None, None, Order.Normal, 1).map { rowSet => ids.flatMap { id => val spans = rowSet.asScala(id).asScala.map { case (colName, col) => col.value.toSpan } if (spans.isEmpty) { None } else { Some(spans.head.traceId) } }.toSet } } }.map { _.reduce { (left, right) => left ++ right } } } /** * Fetches traces from the underlying storage. Note that there might be multiple * entries per span. */ def getSpansByTraceId(traceId: Long): Future[Seq[Span]] = { getSpansByTraceIds(Seq(traceId)).map { _.head } } def getSpansByTraceIds(traceIds: Seq[Long]): Future[Seq[Seq[Span]]] = { CASSANDRA_GET_TRACE.incr Future.collect { traceIds.grouped(readBatchSize).toSeq.map { ids => traces.multigetRows(ids.toSet.asJava, None, None, Order.Normal, TRACE_MAX_COLS).map { rowSet => ids.flatMap { id => val spans = rowSet.asScala(id).asScala.map { case (colName, col) => col.value.toSpan } spans.toSeq match { case Nil => { None } case s if s.length > TRACE_MAX_COLS => { CASSANDRA_GET_TRACE_TOO_BIG.incr() None } case s => { Some(s) } } } } } }.map { _.flatten } } def getDataTimeToLive: Int = dataTimeToLive.inSeconds /* * Helper methods * -------------- */ /** * One span will be logged by two different machines we want to store it in cassandra * without having to do a read first to do so we create a unique column name */ private def createSpanColumnName(span: Span) : String = { // TODO make into a codec? span.id.toString + "_" + span.annotations.hashCode + "_" + span.binaryAnnotations.hashCode } }
wyzssw/zipkin
zipkin-cassandra/src/main/scala/com/twitter/zipkin/storage/cassandra/CassandraStorage.scala
Scala
apache-2.0
5,741
package org.jetbrains.plugins.scala package lang.psi.light import com.intellij.psi._ import com.intellij.psi.impl.light.LightFieldBuilder import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition import org.jetbrains.plugins.scala.lang.psi.types.ScType /** * @author Alefas * @since 07.12.12 */ object ScLightField { def apply(name: String, scType: ScType, containingClass: ScTypeDefinition, modifiers: String*): PsiField = { new LightFieldBuilder(name, scType.toPsiType, containingClass) .setContainingClass(containingClass) .setModifiers(modifiers: _*) } }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/light/ScLightField.scala
Scala
apache-2.0
609
/* * Copyright 2016 the original author or authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package at.makubi.maven.plugin.avrohugger import java.io.File object Implicits { implicit class FileArrayEnricher(files: Array[File]) { def withSuffix(suffix: String): Array[File] = { files.filter(_.getName.endsWith(suffix)) } } }
sini/avrohugger-maven-plugin
src/main/scala/at/makubi/maven/plugin/avrohugger/Implicits.scala
Scala
apache-2.0
869
/* Copyright 2016 ScalABM Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.economicsl.agora.markets.tradables.orders.ask import org.economicsl.agora.markets.tradables.orders.Order import org.economicsl.agora.markets.tradables.Quantity /** Trait defining an order to sell a `Tradable` object. */ trait AskOrder extends Order
EconomicSL/agora
src/main/scala/org/economicsl/agora/markets/tradables/orders/ask/AskOrder.scala
Scala
apache-2.0
817
/* * Copyright (C) 2005, The Beangle Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.beangle.commons.file.diff.bsdiff import java.io.DataInputStream import java.io.IOException import java.io.InputStream import java.io.OutputStream /** * bsdiff encodes offsets (represented by the C off_t type) as 64-bit chunks. * In this implementation only 32-bit signed integers are supported, but the * additional encoding steps are left to illustrate the process (which, in Java, * would encode/decode a long primitive data type). */ object Offset { /** * Size of a bsdiff-encoded offset, in bytes. */ val OFFSET_SIZE = 8 /** * Reads a bsdiff-encoded offset (based on the C off_t type) from an * {@link InputStream}. */ def readOffset(in: InputStream): Int = { val buf = new Array[Byte](OFFSET_SIZE) val bytesRead = in.read(buf) if (bytesRead < OFFSET_SIZE) throw new IOException("Could not read offset.") var y = 0 y = buf(7) & 0x7F y *= 256 y += buf(6) & 0xFF y *= 256 y += buf(5) & 0xFF y *= 256 y += buf(4) & 0xFF y *= 256 y += buf(3) & 0xFF y *= 256 y += buf(2) & 0xFF y *= 256 y += buf(1) & 0xFF y *= 256 y += buf(0) & 0xFF /* An integer overflow occurred */ if (y < 0) throw new IOException( "Integer overflow: 64-bit offsets not supported.") if ((buf(7) & 0x80) != 0) y = -y return y } /** * Writes a bsdiff-encoded offset to an {@link OutputStream}. */ def writeOffset(value: Int, out: OutputStream): Unit = { val buf = new Array[Byte](OFFSET_SIZE) var y = 0 if (value < 0) { y = -value /* Set the sign bit */ buf(7) = (buf(7) | 0x80).asInstanceOf[Byte] } else y = value buf(0) = (buf(0) | y % 256).asInstanceOf[Byte] y -= buf(0) & 0xFF y /= 256 buf(1) = (buf(1) | y % 256).asInstanceOf[Byte] y -= buf(1) & 0xFF y /= 256 buf(2) = (buf(2) | y % 256).asInstanceOf[Byte] y -= buf(2) & 0xFF y /= 256 buf(3) = (buf(3) | y % 256).asInstanceOf[Byte] y -= buf(3) & 0xFF y /= 256 buf(4) = (buf(4) | y % 256).asInstanceOf[Byte] y -= buf(4) & 0xFF y /= 256 buf(5) = (buf(5) | y % 256).asInstanceOf[Byte] y -= buf(5) & 0xFF y /= 256 buf(6) = (buf(6) | y % 256).asInstanceOf[Byte] y -= buf(6) & 0xFF y /= 256 buf(7) = (buf(7) | y % 256).asInstanceOf[Byte] out.write(buf) } def writeBlock(b: Format.Block, out: OutputStream): Unit = { writeOffset(b.diffLength, out) writeOffset(b.extraLength, out) writeOffset(b.seekLength, out) } def readBlock(in: InputStream): Format.Block = Format.Block(readOffset(in), readOffset(in), readOffset(in)) def writeHeader(h: Format.Header, out: OutputStream): Unit = { out.write(Format.HeaderMagic.getBytes()) writeOffset(h.controlLength, out) writeOffset(h.diffLength, out) writeOffset(h.outputLength, out) } def readHeader(in: InputStream): Format.Header = { val headerIn = new DataInputStream(in) val buf = new Array[Byte](8) headerIn.read(buf) val magic = new String(buf) if (!"BSDIFF40".equals(magic)) throw new RuntimeException("Header missing magic number") Format.Header(readOffset(headerIn), readOffset(headerIn), readOffset(headerIn)) } }
beangle/commons
file/src/main/scala/org/beangle/commons/file/diff/bsdiff/Offset.scala
Scala
lgpl-3.0
3,997
package org.sisioh.aws4s.sns.model import com.amazonaws.services.sns.model.{ ListSubscriptionsByTopicResult, Subscription } import org.sisioh.aws4s.PimpedType import scala.collection.JavaConverters._ object ListSubscriptionsByTopicResultFactory { def create(): ListSubscriptionsByTopicResult = new ListSubscriptionsByTopicResult() } class RichListSubscriptionsByTopicResult(val underlying: ListSubscriptionsByTopicResult) extends AnyVal with PimpedType[ListSubscriptionsByTopicResult] { def nextTokenOpt: Option[String] = Option(underlying.getNextToken) def nextTokenOpt_=(value: Option[String]): Unit = underlying.setNextToken(value.orNull) def withNextTokenOpt(value: Option[String]): ListSubscriptionsByTopicResult = underlying.withNextToken(value.orNull) def subscriptions: Seq[Subscription] = underlying.getSubscriptions.asScala.toVector def subscriptions_=(value: Seq[Subscription]): Unit = underlying.setSubscriptions(value.asJava) def withSubscription(value: Seq[Subscription]): ListSubscriptionsByTopicResult = underlying.withSubscriptions(value.asJava) }
sisioh/aws4s
aws4s-sns/src/main/scala/org/sisioh/aws4s/sns/model/RichListSubscriptionsByTopicResult.scala
Scala
mit
1,123
package polyite.fitness.scop_features import polyite.fitness.Feature import polyite.schedule.DomainCoeffInfo import polyite.schedule.Dependence import polyite.ScopInfo import polyite.util.SCoPMetrics import polyite.schedule.schedule_tree.ScheduleNode import polyite.config.Config /** * Maximum loop depth of the SCoP. */ object MaxLoopDepth extends Feature { def calc(t : ScheduleNode, conf : Config, scop : ScopInfo, scopMetrics : SCoPMetrics, domInfo : DomainCoeffInfo, deps : Set[Dependence]) : Double = scopMetrics.maxLoopDepth def isMultiStmt() : Boolean = false override def toString() : String = getClass.getSimpleName }
stganser/polyite
src/polyite/fitness/scop_features/MaxLoopDepth.scala
Scala
mit
646
package controllers import play.api._ import play.api.mvc._ import org.xml.sax.InputSource import org.htmlcleaner._ import scala.collection.mutable.ListBuffer import scala.xml._ import java.net._ import parsing._ import scala.io.Source import java.net.URL import scala.util.matching.Regex import scala.collection.mutable.ArrayBuffer import scala.io._ import java.io._ import play.api.db._ import play.api.Play.current object Application extends Controller { // Call index.scala.html def index = Action { val result = grabURLBack(getRealIndex("http://www.ptt.cc/bbs/BikerShop/index.html", 1),20) Ok(views.html.index("Your new application is not ready.", result.toArray)) } def printToFile(f: java.io.File)(op: java.io.PrintWriter => Unit) { val p = new java.io.PrintWriter(f) try { op(p) } finally { p.close() } } def grabURLArray(urlArray: ArrayBuffer[String]) = { val posts_result = ArrayBuffer[String]() @annotation.tailrec def go(in: ArrayBuffer[String], result: ArrayBuffer[String]): ArrayBuffer[String] = { if(in.isEmpty) result else{ println(in(0)) val posts = new post(in(0)) val resultArray = result += in(0) val resultArray_1 = resultArray ++ posts.urlToResult(in(0)) val resultArray_2 = resultArray_1 += "=====" go(in.tail, resultArray_2) } } go(urlArray, posts_result) // for (url <- urlArray) { // val posts = new post(url) // posts_result.append(url) // posts_result.appendAll( posts.urlToResult(url) ) // posts_result.append("=====") // } // posts_result } def getRealIndex(index_url: String, op: Int):String = { val html = Source.fromURL(index_url) val sourceString = html.mkString val perLineString = sourceString.split("\n") var linesWithArrow = perLineString.filter(_.contains("上頁")) if(op!=1){ linesWithArrow = perLineString.filter(_.contains("下頁")) } // val linesWithArrow = if(op!=1){}else[}] @annotation.tailrec def go(in: Array[String], result: String):String = { if(in.isEmpty) result else{ val row = in(0) val rowSplit = row.split("\"") val resultArray = gogo(rowSplit) go(in.tail, resultArray) } } @annotation.tailrec def gogo(in: Array[String]): String = { if(in(0).contains("/bbs/BikerShop")){ val result = "http://www.ptt.cc" + in(0) result }else{ gogo(in.tail) } } // for (row <- linesWithArrow) { // val rowSplit = row.split("\"") // for (r <- rowSplit) { // if(r.contains("/bbs/BikerShop")) { // var r2 = "http://www.ptt.cc" + r // // println(r2) // real_index = r2 // } // } // } val real_index = go(linesWithArrow, "") if(op!=1) real_index else{ getRealIndex(real_index, 2) } } def grabURLBack(url: String, amount : Int) = { val start = url.substring(37,41).toInt // val result = ArrayBuffer[String]() // val resultArray = ArrayBuffer[String]() @annotation.tailrec def grabURL(start: Int, count: Int, amount: Int, result: ArrayBuffer[String]): ArrayBuffer[String] = { if(count>=amount){ result }else{ oncePerSecond() val postListResult = new postList("") val resultArray = result ++ (grabURLArray(postListResult.urlToResult("http://www.ptt.cc/bbs/BikerShop/index"+(start-count)+".html"))) // println(resultArray) grabURL(start, count+1, amount, resultArray) } } val result = grabURL(start, 0, amount, resultArray) // // for( k <- 0 to amount) { // var postListResult = new postList("") // // println("http://www.ptt.cc/bbs/BikerShop/index"+(start-k)+".html") // oncePerSecond() // result.appendAll(grabURLArray(postListResult.urlToResult("http://www.ptt.cc/bbs/BikerShop/index"+(start-k)+".html"))) // } val cleanResult = ArrayBuffer[String]() val cleanResult2 = ArrayBuffer[String]() @annotation.tailrec def cleanSpace(result: ArrayBuffer[String], cleanResult: ArrayBuffer[String]):ArrayBuffer[String] = { if(result.isEmpty){ cleanResult }else{ val item = result(0) if(item != Nil && item.toString != "" && item.length >= 2 && item.toString.take(5)!="<span"){ val cleanResult_1 = cleanResult += item cleanSpace(result.tail, cleanResult_1) }else{ cleanSpace(result.tail, cleanResult) } } } // for(item <- result) { // if(item != Nil && item.toString != "" && item.length >= 2 && item.toString.take(5)!="<span"){ // cleanResult.append(item) // } // } val cleanResult_1 = cleanSpace(result, cleanResult) val result_length = cleanResult_1.length @annotation.tailrec def checkSet(count: Int, length: Int, result: ArrayBuffer[String], cleanSet: ArrayBuffer[String]):ArrayBuffer[String] = { if(count>=length-4){ cleanSet }else{ // println(result.length) // println(count) if((count+3)>=result.length){ println(result) } if(result(count).toString.takeRight(4) == "html" && result(count+3).toString == "====="){ val cleanSet_1 = cleanSet ++ ArrayBuffer[String](result(count), result(count+1), result(count+2), result(count+3)) checkSet(count+1, length, result, cleanSet_1) }else{ checkSet(count+1, length, result, cleanSet) } } } checkSet(0, result_length, cleanResult_1, cleanResult2) // for(i <- 0 to cleanResult_1.length-4) { // if(cleanResult_1(i).toString.takeRight(4) == "html" && cleanResult_1(i+3).toString == "====="){ // cleanResult2.append(cleanResult_1(i)) // cleanResult2.append(cleanResult_1(i+1)) // cleanResult2.append(cleanResult_1(i+2)) // cleanResult2.append(cleanResult_1(i+3)) // } // } // for(item <- cleanResult2) { // println(item) // } // cleanResult2 } def oncePerSecond() { Thread sleep 1000 } } class post(url: String){ def parseURLAndSplitPerLine(theURL: String) : Array[String] = { val html = Source.fromURL(theURL) val sourceString = html.mkString val perLineString = sourceString.split("\n") val clean_perLineString = perLineString.drop(50) clean_perLineString // val pattern = new Regex("《.....") // val str = "《Item》→光陽KYMCO三冠王125" } def findArrows(stringArray: Array[String]) : Array[String] = { val linesWithArrow = stringArray.filter(_.contains("\u300A")) val linesWithoutContent = linesWithArrow.filter(!_.contains("content")) // kk.foreach(_.println) val linesWithArrow2 = linesWithArrow.drop(2) linesWithArrow // println(kd) } def getNameAndPrice(stringArray: Array[String]) : ArrayBuffer[String] = { var resultArray = ArrayBuffer[String]() var rowSplitArray = ArrayBuffer[String]() @annotation.tailrec def checkFormat(input : Array[String], result: ArrayBuffer[String]) : ArrayBuffer[String] = { if (input.isEmpty) result else { val row = input(0) if(!row.contains("必填")&&(row.contains("價格")||row.contains("名稱"))){ val rowSplit = row.split("\u2192") addRowToRowSplitArray(rowSplit, result) } checkFormat(input.tail,result) } } @annotation.tailrec def addRowToRowSplitArray(input : Array[String], result: ArrayBuffer[String]) : ArrayBuffer[String] = { if(input.isEmpty) result else { val row = input(0) result += row addRowToRowSplitArray(input.tail, result) } } val kk = checkFormat(stringArray, rowSplitArray) @annotation.tailrec def delLabel(in: ArrayBuffer[String], result: ArrayBuffer[String]):ArrayBuffer[String] = { if(in.isEmpty){ result }else{ val r = in(0) if(!r.contains("\u300A")||(r.contains("價格")&& r.length > 10)||(r.contains("名稱")&& r.length > 10)) { if(r.contains("\u300A")){ delLabel(in.tail, result += inDelLabel(0, r.length, r, "")) }else delLabel(in.tail, result += r) }else delLabel(in.tail, result) } } @annotation.tailrec def inDelLabel(count: Int, length: Int, in: String, result: String): String = { if(count >= length-1){ result }else{ val r = in if(r.substring(count,count+1) == "\u300B"){ val cleanR = r.substring(count+1,r.length-1) // println(cleanR) // result = cleanR inDelLabel(count+1,length, in, cleanR) }else inDelLabel(count+1,length, in, result) } } delLabel(kk, ArrayBuffer[String]()) } def urlToResult(url: String) : ArrayBuffer[String] = { getNameAndPrice(findArrows(parseURLAndSplitPerLine(url))) } } class postList(url: String) { def parseURLAndSplitPerLine(theURL: String) : Array[String] = { val html = Source.fromURL(theURL) val sourceString = html.mkString val perLineString = sourceString.split("\n") perLineString } //<a href="/bbs/BikerShop/M.1402146694.A.E0E.html">[舊@新店:排氣管] 吉村R77碳纖維管For K-XCT300i</a> def findArrows(stringArray: Array[String]) : Array[String] = { val linesWithArrow = stringArray.filter(_.contains("舊")) val linesWithBike = linesWithArrow.filter(_.contains("機車")) linesWithBike } def getURLforPost(stringArray: Array[String]) : ArrayBuffer[String] = { val resultArray = ArrayBuffer[String]() @annotation.tailrec def go(in: Array[String], result: ArrayBuffer[String]): ArrayBuffer[String] = { if(in.isEmpty) result else{ val row = in(0) val rowSplit = row.split("\"") val resultArray = result ++ gogo(rowSplit) go(in.tail, resultArray) } } @annotation.tailrec def gogo(in: Array[String]): ArrayBuffer[String] = { if(in(0).contains("/bbs/BikerShop")){ val result = ArrayBuffer[String]("http://www.ptt.cc" + in(0)) result } else{ gogo(in.tail) } } go(stringArray, resultArray) // for (row <- stringArray) { // val rowSplit = row.split("\"") // for (r <- rowSplit) { // if(r.contains("/bbs/BikerShop")) { // val r2 = "http://www.ptt.cc" + r // println(r2) // resultArray += r2 // } // } // } // resultArray } def urlToResult(url: String) : ArrayBuffer[String] = { getURLforPost(findArrows(parseURLAndSplitPerLine(url))) } }
RichoHan/Fun_Programming_Hackthon
app/controllers/Application.scala
Scala
mit
12,441
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.deploy.history import java.util.{Date, NoSuchElementException} import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import scala.collection.mutable import com.codahale.metrics.Counter import org.eclipse.jetty.servlet.ServletContextHandler import org.mockito.ArgumentMatchers.any import org.mockito.Mockito._ import org.mockito.invocation.InvocationOnMock import org.mockito.stubbing.Answer import org.scalatest.Matchers import org.scalatest.mockito.MockitoSugar import org.apache.spark.SparkFunSuite import org.apache.spark.internal.Logging import org.apache.spark.status.api.v1.{ApplicationAttemptInfo => AttemptInfo, ApplicationInfo} import org.apache.spark.ui.SparkUI import org.apache.spark.util.ManualClock class ApplicationCacheSuite extends SparkFunSuite with Logging with MockitoSugar with Matchers { /** * Stub cache operations. * The state is kept in a map of [[CacheKey]] to [[CacheEntry]], * the `probeTime` field in the cache entry setting the timestamp of the entry */ class StubCacheOperations extends ApplicationCacheOperations with Logging { /** map to UI instances, including timestamps, which are used in update probes */ val instances = mutable.HashMap.empty[CacheKey, CacheEntry] /** Map of attached spark UIs */ val attached = mutable.HashMap.empty[CacheKey, SparkUI] var getAppUICount = 0L var attachCount = 0L var detachCount = 0L var updateProbeCount = 0L override def getAppUI(appId: String, attemptId: Option[String]): Option[LoadedAppUI] = { logDebug(s"getAppUI($appId, $attemptId)") getAppUICount += 1 instances.get(CacheKey(appId, attemptId)).map { e => e.loadedUI } } override def attachSparkUI( appId: String, attemptId: Option[String], ui: SparkUI, completed: Boolean): Unit = { logDebug(s"attachSparkUI($appId, $attemptId, $ui)") attachCount += 1 attached += (CacheKey(appId, attemptId) -> ui) } def putAndAttach( appId: String, attemptId: Option[String], completed: Boolean, started: Long, ended: Long): LoadedAppUI = { val ui = putAppUI(appId, attemptId, completed, started, ended) attachSparkUI(appId, attemptId, ui.ui, completed) ui } def putAppUI( appId: String, attemptId: Option[String], completed: Boolean, started: Long, ended: Long): LoadedAppUI = { val ui = LoadedAppUI(newUI(appId, attemptId, completed, started, ended)) instances(CacheKey(appId, attemptId)) = new CacheEntry(ui, completed) ui } /** * Detach a reconstructed UI * * @param ui Spark UI */ override def detachSparkUI(appId: String, attemptId: Option[String], ui: SparkUI): Unit = { logDebug(s"detachSparkUI($appId, $attemptId, $ui)") detachCount += 1 var name = ui.getAppName val key = CacheKey(appId, attemptId) attached.getOrElse(key, { throw new java.util.NoSuchElementException() }) attached -= key } /** * Lookup from the internal cache of attached UIs */ def getAttached(appId: String, attemptId: Option[String]): Option[SparkUI] = { attached.get(CacheKey(appId, attemptId)) } } /** * Create a new UI. The info/attempt info classes here are from the package * `org.apache.spark.status.api.v1`, not the near-equivalents from the history package */ def newUI( name: String, attemptId: Option[String], completed: Boolean, started: Long, ended: Long): SparkUI = { val info = new ApplicationInfo(name, name, Some(1), Some(1), Some(1), Some(64), Seq(new AttemptInfo(attemptId, new Date(started), new Date(ended), new Date(ended), ended - started, "user", completed, org.apache.spark.SPARK_VERSION))) val ui = mock[SparkUI] when(ui.getApplicationInfoList).thenReturn(List(info).iterator) when(ui.getAppName).thenReturn(name) when(ui.appName).thenReturn(name) val handler = new ServletContextHandler() when(ui.getHandlers).thenReturn(Seq(handler)) ui } /** * Test operations on completed UIs: they are loaded on demand, entries * are removed on overload. * * This effectively tests the original behavior of the history server's cache. */ test("Completed UI get") { val operations = new StubCacheOperations() val clock = new ManualClock(1) implicit val cache = new ApplicationCache(operations, 2, clock) val metrics = cache.metrics // cache misses val app1 = "app-1" assertNotFound(app1, None) assertMetric("lookupCount", metrics.lookupCount, 1) assertMetric("lookupFailureCount", metrics.lookupFailureCount, 1) assert(1 === operations.getAppUICount, "getAppUICount") assertNotFound(app1, None) assert(2 === operations.getAppUICount, "getAppUICount") assert(0 === operations.attachCount, "attachCount") val now = clock.getTimeMillis() // add the entry operations.putAppUI(app1, None, true, now, now) // make sure its local operations.getAppUI(app1, None).get operations.getAppUICount = 0 // now expect it to be found cache.withSparkUI(app1, None) { _ => } // assert about queries made of the operations assert(1 === operations.getAppUICount, "getAppUICount") assert(1 === operations.attachCount, "attachCount") // and in the map of attached assert(operations.getAttached(app1, None).isDefined, s"attached entry '1' from $cache") // go forward in time clock.setTime(10) val time2 = clock.getTimeMillis() val cacheEntry2 = cache.get(app1) // no more refresh as this is a completed app assert(1 === operations.getAppUICount, "getAppUICount") assert(0 === operations.updateProbeCount, "updateProbeCount") assert(0 === operations.detachCount, "attachCount") // evict the entry operations.putAndAttach("2", None, true, time2, time2) operations.putAndAttach("3", None, true, time2, time2) cache.get("2") cache.get("3") // there should have been a detachment here assert(1 === operations.detachCount, s"detach count from $cache") // and entry app1 no longer attached assert(operations.getAttached(app1, None).isEmpty, s"get($app1) in $cache") val appId = "app1" val attemptId = Some("_01") val time3 = clock.getTimeMillis() operations.putAppUI(appId, attemptId, false, time3, 0) // expect an error here assertNotFound(appId, None) } test("Test that if an attempt ID is set, it must be used in lookups") { val operations = new StubCacheOperations() val clock = new ManualClock(1) implicit val cache = new ApplicationCache(operations, retainedApplications = 10, clock = clock) val appId = "app1" val attemptId = Some("_01") operations.putAppUI(appId, attemptId, false, clock.getTimeMillis(), 0) assertNotFound(appId, None) } /** * Test that incomplete apps are not probed for updates during the time window, * but that they are checked if that window has expired and they are not completed. * Then, if they have changed, the old entry is replaced by a new one. */ test("Incomplete apps refreshed") { val operations = new StubCacheOperations() val clock = new ManualClock(50) implicit val cache = new ApplicationCache(operations, 5, clock) val metrics = cache.metrics // add the incomplete app // add the entry val started = clock.getTimeMillis() val appId = "app1" val attemptId = Some("001") val initialUI = operations.putAndAttach(appId, attemptId, false, started, 0) val firstUI = cache.withSparkUI(appId, attemptId) { ui => ui } assertMetric("lookupCount", metrics.lookupCount, 1) assert(0 === operations.updateProbeCount, "expected no update probe on that first get") // Invalidate the first entry to trigger a re-load. initialUI.invalidate() // Update the UI in the stub so that a new one is provided to the cache. operations.putAppUI(appId, attemptId, true, started, started + 10) val updatedUI = cache.withSparkUI(appId, attemptId) { ui => ui } assert(firstUI !== updatedUI, s"expected updated UI") assertMetric("lookupCount", metrics.lookupCount, 2) assert(1 === operations.detachCount, s"detach count") } /** * Assert that a metric counter has a specific value; failure raises an exception * including the cache's toString value * @param name counter name (for exceptions) * @param counter counter * @param expected expected value. * @param cache cache */ def assertMetric( name: String, counter: Counter, expected: Long) (implicit cache: ApplicationCache): Unit = { val actual = counter.getCount if (actual != expected) { // this is here because Scalatest loses stack depth throw new Exception(s"Wrong $name value - expected $expected but got $actual in $cache") } } /** * Assert that a key wasn't found in cache or loaded. * * Looks for the specific nested exception raised by [[ApplicationCache]] * @param appId application ID * @param attemptId attempt ID * @param cache app cache */ def assertNotFound( appId: String, attemptId: Option[String]) (implicit cache: ApplicationCache): Unit = { val ex = intercept[NoSuchElementException] { cache.get(appId, attemptId) } } test("Large Scale Application Eviction") { val operations = new StubCacheOperations() val clock = new ManualClock(0) val size = 5 // only two entries are retained, so we expect evictions to occur on lookups implicit val cache = new ApplicationCache(operations, retainedApplications = size, clock = clock) val attempt1 = Some("01") val ids = new mutable.ListBuffer[String]() // build a list of applications val count = 100 for (i <- 1 to count ) { val appId = f"app-$i%04d" ids += appId clock.advance(10) val t = clock.getTimeMillis() operations.putAppUI(appId, attempt1, true, t, t) } // now go through them in sequence reading them, expect evictions ids.foreach { id => cache.get(id, attempt1) } logInfo(cache.toString) val metrics = cache.metrics assertMetric("loadCount", metrics.loadCount, count) assertMetric("evictionCount", metrics.evictionCount, count - size) } test("Attempts are Evicted") { val operations = new StubCacheOperations() implicit val cache = new ApplicationCache(operations, 4, new ManualClock()) val metrics = cache.metrics val appId = "app1" val attempt1 = Some("01") val attempt2 = Some("02") val attempt3 = Some("03") operations.putAppUI(appId, attempt1, true, 100, 110) operations.putAppUI(appId, attempt2, true, 200, 210) operations.putAppUI(appId, attempt3, true, 300, 310) val attempt4 = Some("04") operations.putAppUI(appId, attempt4, true, 400, 410) val attempt5 = Some("05") operations.putAppUI(appId, attempt5, true, 500, 510) def expectLoadAndEvictionCounts(expectedLoad: Int, expectedEvictionCount: Int): Unit = { assertMetric("loadCount", metrics.loadCount, expectedLoad) assertMetric("evictionCount", metrics.evictionCount, expectedEvictionCount) } // first entry cache.get(appId, attempt1) expectLoadAndEvictionCounts(1, 0) // second cache.get(appId, attempt2) expectLoadAndEvictionCounts(2, 0) // no change cache.get(appId, attempt2) expectLoadAndEvictionCounts(2, 0) // eviction time cache.get(appId, attempt3) cache.size() should be(3) cache.get(appId, attempt4) expectLoadAndEvictionCounts(4, 0) cache.get(appId, attempt5) expectLoadAndEvictionCounts(5, 1) cache.get(appId, attempt5) expectLoadAndEvictionCounts(5, 1) } test("redirect includes query params") { val operations = new StubCacheOperations() val ui = operations.putAndAttach("foo", None, true, 0, 10) val cache = mock[ApplicationCache] when(cache.operations).thenReturn(operations) val filter = new ApplicationCacheCheckFilter(new CacheKey("foo", None), ui, cache) ui.invalidate() val request = mock[HttpServletRequest] when(request.getMethod()).thenReturn("GET") when(request.getRequestURI()).thenReturn("http://localhost:18080/history/local-123/jobs/job/") when(request.getQueryString()).thenReturn("id=2") val resp = mock[HttpServletResponse] when(resp.encodeRedirectURL(any())).thenAnswer(new Answer[String]() { override def answer(invocationOnMock: InvocationOnMock): String = { invocationOnMock.getArguments()(0).asInstanceOf[String] } }) filter.doFilter(request, resp, null) verify(resp).sendRedirect("http://localhost:18080/history/local-123/jobs/job/?id=2") } }
WindCanDie/spark
core/src/test/scala/org/apache/spark/deploy/history/ApplicationCacheSuite.scala
Scala
apache-2.0
13,740
package org.webant.extension.link import org.apache.commons.dbutils.handlers.BeanListHandler import org.apache.log4j.LogManager import org.webant.commons.entity.Link import org.webant.commons.link.JdbcLinkProvider import scala.collection.JavaConverters._ class MysqlLinkProvider extends JdbcLinkProvider { private val logger = LogManager.getLogger(classOf[MysqlLinkProvider]) DRIVER = "com.mysql.jdbc.Driver" override def init(params: java.util.Map[String, Object]): Boolean = { if (!super.init(params)) { logger.error(s"init ${getClass.getSimpleName} failed!") return false } logger.info(s"init ${getClass.getSimpleName} success!") createTable() } override def read(): Iterable[Link] = { try { read(Link.LINK_STATUS_INIT, batch) } catch { case e: Exception => e.printStackTrace() Iterable.empty } } override def write(link: Link): Int = { upsert(link) } override def write(links: Iterable[Link]): Int = { upsert(links) } private def read(status: String, size: Int): Iterable[Link] = { val sql = "SELECT id, taskId, siteId, url, referer, priority, lastCrawlTime, status, dataVersion, dataCreateTime, " + s"dataUpdateTime, dataDeleteTime FROM $table WHERE status = ? ORDER by dataCreateTime desc LIMIT ?, ?" val pageNo: Integer = 0 val pageSize: Integer = if (size <= 0 || size > 1000) 1000 else size val selectParams = Array[Object](status, pageNo, pageSize) var links = Iterable.empty[Link] try { conn.setAutoCommit(false) links = runner.query(conn, sql, new BeanListHandler[Link](classOf[Link]), selectParams: _*).asScala if (links.nonEmpty) { val updateSql = s"update $table set status = ?, dataVersion = dataVersion + 1, dataUpdateTime = now() where id = ?" val updateParams = links.map(link => { Array[Object](Link.LINK_STATUS_PENDING, link.getId) }).toArray runner.batch(conn, updateSql, updateParams) } conn.commit() } catch { case e: Exception => conn.rollback() e.printStackTrace() } finally conn.setAutoCommit(true) links } override def upsert(link: Link): Int = { // no reflection, simple and fast val sql = s"insert into $table ( id, taskId, siteId, url, referer, priority, lastCrawlTime, status, dataVersion, dataCreateTime, " + "dataUpdateTime, dataDeleteTime ) values ( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? ) ON DUPLICATE KEY UPDATE " + "taskId = ?, siteId = ?, url = ?, referer = ?, priority = ?, lastCrawlTime = ?, status = ?, dataVersion = dataVersion + 1, dataUpdateTime = now()" val values = Array[Object]( link.getId, link.getTaskId, link.getSiteId, link.getUrl, link.getReferer, link.getPriority, link.getLastCrawlTime, link.getStatus, link.getDataVersion, link.getDataCreateTime, link.getDataUpdateTime, link.getDataDeleteTime, link.getTaskId, link.getSiteId, link.getUrl, link.getReferer, link.getPriority, link.getLastCrawlTime, link.getStatus ) runner.update(conn, sql, values: _*) } override def upsert(links: Iterable[Link]): Int = { if (links == null || links.isEmpty) return 0 // no reflection, simple and fast // links.toArray.map can work, links.map can not work, it may be a bug in scala map() val placeholders = links.toArray.map(_ => "( ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ? )").mkString(", ") val sql = s"insert into $table (id, taskId, siteId, url, referer, priority, lastCrawlTime, status, dataVersion, " + s"dataCreateTime, dataUpdateTime, dataDeleteTime) values $placeholders ON DUPLICATE KEY UPDATE " + // "priority = values(priority), lastCrawlTime = values(lastCrawlTime), status = values(status), " + "dataVersion = dataVersion + 1, dataUpdateTime = now()" val values = links.toArray.flatMap(link => Array(link.getId, link.getTaskId, link.getSiteId, link.getUrl, link.getReferer, link.getPriority, link.getLastCrawlTime, link.getStatus, link.getDataVersion, link.getDataCreateTime, link.getDataUpdateTime, link.getDataDeleteTime)) runner.update(conn, sql, values: _*) } }
sutine/webant
webant-extension/src/main/scala/org/webant/extension/link/MysqlLinkProvider.scala
Scala
apache-2.0
4,188
package clide.messages import scala.concurrent.Future abstract class RefResolver[T,I](val resolve: I => Future[T]) sealed trait Ref[T,I] { def get(implicit resolver: RefResolver[T,I]): Future[T] def id: I override def equals(other: Any) = other match { case ref: Ref[_,_] => ref.id == this.id case _ => false } } case class Resolved[T,I](id: I, value: T) extends Ref[T,I] { def get(implicit resolver: RefResolver[T,I]) = Future.successful(value) } case class Unresolved[T,I](id: I) extends Ref[T,I] { def get(implicit resolver: RefResolver[T,I]) = resolver.resolve(id) }
martinring/clide2
modules/clide-common/src/main/scala/clide/messages/Ref.scala
Scala
lgpl-3.0
601
package nsmc.conversion import com.mongodb.casbah.Imports._ import nsmc.conversion.types.{StructureType, MongoAndInternal} import org.apache.spark.sql.Row import org.scalatest.{Matchers, FlatSpec} class RecordConverterTests extends FlatSpec with Matchers { "a flat record with no gaps in the right order" should "convert correctly" in { val mo = MongoDBObject("key" -> "hello") ++ ("val" -> 99) val t = MongoAndInternal.toInternal(mo) val rc = new RecordConverter(t.asInstanceOf[StructureType]) val r = rc.getSchemaRecord(mo) r.size should be (2) r.getString(0) should be ("hello") r.getInt(1) should be (99) } "a flat record with no gaps in the wrong order" should "convert correctly" in { val mo = MongoDBObject("key" -> "hello") ++ ("val" -> 99) val replacement = MongoDBObject("val" -> 99) ++ ("key" -> "hello") val t = MongoAndInternal.toInternal(mo) val rc = new RecordConverter(t.asInstanceOf[StructureType]) val r = rc.getSchemaRecord(replacement) r should have size (2) r.getString(0) should be ("hello") r.getInt(1) should be (99) } "a flat record with gaps in the wrong order" should "convert correctly" in { val mo = MongoDBObject("a" -> 1) ++ ("b" -> "2") ++ ("c" -> 3) ++ ("d" -> 4) ++ ("e" -> 5) val replacement = MongoDBObject("e" -> 5) ++ ("b" -> "2") ++ ("d" -> 4) val t = MongoAndInternal.toInternal(mo) val rc = new RecordConverter(t.asInstanceOf[StructureType]) val r = rc.getSchemaRecord(replacement) r should have size (5) r.isNullAt(0) should be (true) r.getString(1) should be ("2") r.isNullAt(2) should be (true) r.getInt(3) should be (4) r.getInt(4) should be (5) } "a nested record" should "convert correctly" in { val inner1 = MongoDBObject("a" -> 1) ++ ("b" -> "2") ++ ("c" -> 3) ++ ("d" -> 4) ++ ("e" -> 5) val inner2 = MongoDBObject("x" -> 11) ++ ("y" -> "12") ++ ("z" -> 13) val mo = MongoDBObject("a" -> 1) ++ ("b" -> inner1) ++ ("c" -> 3) ++ ("d" -> inner2) ++ ("e" -> 5) val replacement1 = MongoDBObject("a" -> 1) ++ ("c" -> 3) ++ ("d" -> 4) ++ ("e" -> 5) val replacement = MongoDBObject("e" -> 5) ++ ("b" -> replacement1) ++ ("d" -> inner2) val t = MongoAndInternal.toInternal(mo) val rc = new RecordConverter(t.asInstanceOf[StructureType]) val r = rc.getSchemaRecord(replacement) r should have size (5) r.isNullAt(0) should be (true) r.apply(1) shouldBe a [Row] val r1 = r.apply(1).asInstanceOf[Row] r1 should have size (5) r1.getInt(0) should be (1) r1.isNullAt(1) should be (true) r1.getInt(2) should be (3) r1.getInt(3) should be (4) r1.getInt(4) should be (5) r.isNullAt(2) should be (true) r.apply(3) shouldBe a [Row] val r2 = r.apply(3).asInstanceOf[Row] r2 should have size (3) r2.getInt(0) should be (11) r2.getString(1) should be ("12") r2.getInt(2) should be (13) r.getInt(4) should be (5) } "a record with an atomic array" should "convert correctly" in { val l:BasicDBList = MongoDBList(1,2,3) val mo = MongoDBObject("key" -> "hello") ++ ("val" -> l) val t = MongoAndInternal.toInternal(mo) val rc = new RecordConverter(t.asInstanceOf[StructureType]) val r = rc.getSchemaRecord(mo) r.size should be (2) r.getString(0) should be ("hello") val s = r.getAs[Seq[Int]](1) s.size should be (3) s(0) should be (1) s(1) should be (2) s(2) should be (3) } "a record with a structured array" should "convert correctly" in { val inner1 = MongoDBObject("a" -> 1) ++ ("b" -> 2) val inner2 = MongoDBObject("b" -> 3) ++ ("c" -> 4) val l:BasicDBList = MongoDBList(inner1, inner2) val mo = MongoDBObject("key" -> "hello") ++ ("val" -> l) val t = MongoAndInternal.toInternal(mo) val rc = new RecordConverter(t.asInstanceOf[StructureType]) val r = rc.getSchemaRecord(mo) r.size should be (2) r.getString(0) should be ("hello") val s = r.getAs[Seq[Row]](1) s.size should be (2) s(0) shouldBe a [Row] val r1 = s(0).asInstanceOf[Row] r1 should have size 3 r1.getInt(0) should be (1) r1.getInt(1) should be (2) r1.isNullAt(2) should be (true) s(1) shouldBe a [Row] val r2 = s(1).asInstanceOf[Row] r2 should have size 3 r2.isNullAt(0) should be (true) r2.getInt(1) should be (3) r2.getInt(2) should be (4) } }
shotishu/spark-mongodb-connector
src/test/scala/nsmc/conversion/RecordConverterTests.scala
Scala
apache-2.0
4,433
package com.datastax.spark.connector.writer import java.io.IOException import java.net.InetAddress import com.datastax.driver.core._ import com.datastax.spark.connector.cql._ import com.datastax.spark.connector.util.Quote._ import org.apache.spark.Logging import scala.collection.JavaConversions._ import scala.collection._ /** * A utility class for determining the Replica Set (Ip Addresses) of a particular Cassandra Row. Used * by the [[com.datastax.spark.connector.RDDFunctions.keyByCassandraReplica]] method. Uses the Java * Driver to obtain replica information. */ class ReplicaMapper[T] private( connector: CassandraConnector, tableDef: TableDef, rowWriter: RowWriter[T]) extends Serializable with Logging { val keyspaceName = tableDef.keyspaceName val tableName = tableDef.tableName val columnNames = rowWriter.columnNames implicit val protocolVersion = connector.withClusterDo { _.getConfiguration.getProtocolOptions.getProtocolVersionEnum } /** * This query is only used to build a prepared statement so we can more easily extract * partition tokens from tables. We prepare a statement of the form SELECT * FROM keyspace.table * where x= .... This statement is never executed. */ private lazy val querySelectUsingOnlyPartitionKeys: String = { val partitionKeys = tableDef.partitionKey def quotedColumnNames(columns: Seq[ColumnDef]) = partitionKeys.map(_.columnName).map(quote) val whereClause = quotedColumnNames(partitionKeys).map(c => s"$c = :$c").mkString(" AND ") s"SELECT * FROM ${quote(keyspaceName)}.${quote(tableName)} WHERE $whereClause" } private def prepareDummyStatement(session: Session): PreparedStatement = { try { session.prepare(querySelectUsingOnlyPartitionKeys) } catch { case t: Throwable => throw new IOException(s"Failed to prepare statement $querySelectUsingOnlyPartitionKeys: " + t.getMessage, t) } } /** * Pairs each piece of data with the Cassandra Replicas which that data would be found on * @param data A source of data which can be bound to a statement by BatchStatementBuilder * @return an Iterator over the same data keyed by the replica's ip addresses */ def keyByReplicas(data: Iterator[T]): Iterator[(scala.collection.immutable.Set[InetAddress], T)] = { connector.withSessionDo { session => val stmt = prepareDummyStatement(session) val routingKeyGenerator = new RoutingKeyGenerator(tableDef, columnNames) val boundStmtBuilder = new BoundStatementBuilder(rowWriter, stmt, protocolVersion) val clusterMetadata = session.getCluster.getMetadata data.map { row => val hosts = clusterMetadata .getReplicas(Metadata.quote(keyspaceName), routingKeyGenerator.apply(boundStmtBuilder.bind(row))) .map(_.getAddress) .toSet[InetAddress] (hosts, row) } } } } /** * Helper methods for mapping a set of data to their relative locations in a Cassandra Cluster. */ object ReplicaMapper { def apply[T: RowWriterFactory]( connector: CassandraConnector, keyspaceName: String, tableName: String): ReplicaMapper[T] = { val schema = Schema.fromCassandra(connector, Some(keyspaceName), Some(tableName)) val tableDef = schema.tables.headOption .getOrElse(throw new IOException(s"Table not found: $keyspaceName.$tableName")) val selectedColumns = tableDef.partitionKey.map(_.ref).toIndexedSeq val rowWriter = implicitly[RowWriterFactory[T]].rowWriter(tableDef, selectedColumns) new ReplicaMapper[T](connector, tableDef, rowWriter) } }
rafaelbarreto87/spark-cassandra-connector
spark-cassandra-connector/src/main/scala/com/datastax/spark/connector/writer/ReplicaMapper.scala
Scala
apache-2.0
3,652
package com.twitter.finagle.redis import java.lang.{Boolean => JBoolean, Long => JLong} import com.twitter.finagle.redis.protocol._ import com.twitter.io.Buf import com.twitter.util.Future private[redis] trait StringCommands { self: BaseClient => val FutureTrue: Future[JBoolean] = Future.value(true) val FutureFalse: Future[JBoolean] = Future.value(false) /** * Appends value at the given key. If key doesn't exist, * behavior is similar to SET command * * @param key * @param value * @return length of string after append operation */ def append(key: Buf, value: Buf): Future[JLong] = doRequest(Append(key, value)) { case IntegerReply(n) => Future.value(n) } def bitCount(key: Buf): Future[JLong] = doRequest(BitCount(key, None, None)) { case IntegerReply(n) => Future.value(n) } def bitCount(key: Buf, start: Option[Int], end: Option[Int]): Future[JLong] = doRequest(BitCount(key, start, end)) { case IntegerReply(n) => Future.value(n) } def bitOp(op: Buf, dstKey: Buf, srcKeys: Seq[Buf]): Future[JLong] = doRequest(BitOp(op, dstKey, srcKeys)) { case IntegerReply(n) => Future.value(n) } /** * Decrements number stored at key by 1. * * @param key * @return value after decrement. */ def decr(key: Buf): Future[JLong] = doRequest(Decr(key)) { case IntegerReply(n) => Future.value(n) } /** * Decrements number stored at key by given amount. If key doesn't * exist, value is set to 0 before the operation * * @param key * @param amount * @return value after decrement. Error if key contains value * of the wrong type */ def decrBy(key: Buf, amount: Long): Future[JLong] = doRequest(DecrBy(key, amount)) { case IntegerReply(n) => Future.value(n) } /** * Gets the value associated with the given key * * @param key * @return value, or none if key doesn't exist */ def get(key: Buf): Future[Option[Buf]] = doRequest(Get(key)) { case BulkReply(message) => Future.value(Some(message)) case EmptyBulkReply => Future.None } /** * Returns the bit value at offset in the string value stored at key. * * @param key, offset * @return the bit value stored at offset. * @see http://redis.io/commands/getbit */ def getBit(key: Buf, offset: Int): Future[JLong] = doRequest(GetBit(key, offset)) { case IntegerReply(n) => Future.value(n) } /** * Gets the substring of the value associated with given key * * @param key * @param start * @param end * @return substring, or none if key doesn't exist */ def getRange(key: Buf, start: Long, end: Long): Future[Option[Buf]] = doRequest(GetRange(key, start, end)) { case BulkReply(message) => Future.value(Some(message)) case EmptyBulkReply => Future.None } /** * Atomically sets key to value and returns the old value stored at key. * Returns an error when key exists but does not hold a string value. * * @param key, value * @return the old value stored at key wrapped in Some, * or None when key did not exist. * @see http://redis.io/commands/getset */ def getSet(key: Buf, value: Buf): Future[Option[Buf]] = doRequest(GetSet(key, value)) { case BulkReply(message) => Future.value(Some(message)) case EmptyBulkReply => Future.value(None) } /** * Increments the number stored at key by one. * * @param key * @return the value of key after the increment. * @see http://redis.io/commands/incr */ def incr(key: Buf): Future[JLong] = doRequest(Incr(key)) { case IntegerReply(n) => Future.value(n) } /** * Increments the number stored at key by increment. * * @param key, increment * @return the value of key after the increment. * @see http://redis.io/commands/incrby */ def incrBy(key: Buf, increment: Long): Future[JLong] = doRequest(IncrBy(key, increment)) { case IntegerReply(n) => Future.value(n) } /** * Returns the values of all specified keys. * * @param keys * @return list of values at the specified keys. * @see http://redis.io/commands/mget */ def mGet(keys: Seq[Buf]): Future[Seq[Option[Buf]]] = doRequest(MGet(keys)) { case MBulkReply(messages) => Future { messages.map { case BulkReply(message) => Some(message) case EmptyBulkReply => None case _ => throw new IllegalStateException() }.toSeq } case EmptyMBulkReply => Future.Nil } /** * Sets the given keys to their respective values. MSET replaces existing * values with new values, just as regular SET. * * @param kv * @see http://redis.io/commands/mset */ def mSet(kv: Map[Buf, Buf]): Future[Unit] = doRequest(MSet(kv)) { case StatusReply(message) => Future.Unit } /** * Sets the given keys to their respective values. MSETNX will not perform * any operation at all even if just a single key already exists. * * @param kv * @return 1 if all keys were set, 0 if no keys were set. * @see http://redis.io/commands/msetnx */ def mSetNx(kv: Map[Buf, Buf]): Future[JBoolean] = doRequest(MSetNx(kv)) { case IntegerReply(n) => Future.value(n == 1) } /** * Works exactly like SETEX with the sole difference that the expire * time is specified in milliseconds instead of seconds. * * @param key, millis * @see http://redis.io/commands/psetex */ def pSetEx(key: Buf, millis: Long, value: Buf): Future[Unit] = doRequest(PSetEx(key, millis, value)) { case StatusReply(message) => Future.Unit } /** * Sets the given value to key. If a value already exists for the key, * the value is overwritten with the new value * * @param key * @param value */ def set(key: Buf, value: Buf): Future[Unit] = doRequest(Set(key, value)) { case StatusReply(message) => Future.Unit } /** * Sets or clears the bit at offset in the string value stored at key. * * @param key, offset, value * @return the original bit value stored at offset. * @see http://redis.io/commands/setbit */ def setBit(key: Buf, offset: Int, value: Int): Future[JLong] = doRequest(SetBit(key, offset, value)) { case IntegerReply(n) => Future.value(n) } /** * Set key to hold the string value and set key to timeout after a given * number of seconds. * * @param key, seconds, value * @see http://redis.io/commands/setex */ def setEx(key: Buf, seconds: Long, value: Buf): Future[Unit] = doRequest(SetEx(key, seconds, value)) { case StatusReply(message) => Future.Unit } /** * Set key to hold the string value with the specified expire time in seconds * only if the key does not already exist. * * @param key, millis, value * @return true if the key was set, false if condition was not met. * @see http://redis.io.commands/set */ def setExNx(key: Buf, seconds: Long, value: Buf): Future[JBoolean] = doRequest(Set(key, value, Some(InSeconds(seconds)), true, false)) { case StatusReply(_) => FutureTrue case EmptyBulkReply => FutureFalse } /** * Set key to hold the string value with the specified expire time in seconds * only if the key already exist. * * @param key, millis, value * @return true if the key was set, false if condition was not met. * @see http://redis.io.commands/set */ def setExXx(key: Buf, seconds: Long, value: Buf): Future[JBoolean] = doRequest(Set(key, value, Some(InSeconds(seconds)), false, true)) { case StatusReply(_) => FutureTrue case EmptyBulkReply => FutureFalse } /** * Set key to hold string value if key does not exist. In that case, it is * equal to SET. When key already holds a value, no operation is performed. * * @param key, value * @return 1 if the key was set, 0 if the key was not set. * @see http://redis.io/commands/setnx */ def setNx(key: Buf, value: Buf): Future[JBoolean] = doRequest(SetNx(key, value)) { case IntegerReply(n) => Future.value(n == 1) } /** * Set key to hold the string value with the specified expire time in milliseconds. * * @param key, millis, value * @see http://redis.io.commands/set */ def setPx(key: Buf, millis: Long, value: Buf): Future[Unit] = doRequest(Set(key, value, Some(InMilliseconds(millis)))) { case StatusReply(_) => Future.Unit } /** * Set key to hold the string value with the specified expire time in milliseconds * only if the key does not already exist. * * @param key, millis, value * @return true if the key was set, false if condition was not met. * @see http://redis.io.commands/set */ def setPxNx(key: Buf, millis: Long, value: Buf): Future[JBoolean] = doRequest(Set(key, value, Some(InMilliseconds(millis)), true, false)) { case StatusReply(_) => FutureTrue case EmptyBulkReply => FutureFalse } /** * Set key to hold the string value with the specified expire time in milliseconds * only if the key already exist. * * @param key, millis, value * @return true if the key was set, false if condition was not met. * @see http://redis.io.commands/set */ def setPxXx(key: Buf, millis: Long, value: Buf): Future[JBoolean] = doRequest(Set(key, value, Some(InMilliseconds(millis)), false, true)) { case StatusReply(_) => FutureTrue case EmptyBulkReply => FutureFalse } /** * Set key to hold the string value only if the key already exist. * * @param key, value * @return true if the key was set, false if condition was not met. * @see http://redis.io.commands/set */ def setXx(key: Buf, value: Buf): Future[JBoolean] = doRequest(Set(key, value, None, false, true)) { case StatusReply(_) => FutureTrue case EmptyBulkReply => FutureFalse } /** * Overwrites part of the string stored at key, starting at the specified * offset, for the entire length of value. * * @param key, offset, value * @return the length of the string after it was modified. * @see http://redis.io/commands/setrange */ def setRange(key: Buf, offset: Int, value: Buf): Future[JLong] = doRequest(SetRange(key, offset, value)) { case IntegerReply(n) => Future.value(n) } /** * returns the length of the string value stored at key. * * @param key * @return the length of the string at key, or 0 when key does not exist. * @see http://redis.io/commands/strlen */ def strlen(key: Buf): Future[JLong] = doRequest(Strlen(key)) { case IntegerReply(n) => Future.value(n) } }
mkhq/finagle
finagle-redis/src/main/scala/com/twitter/finagle/redis/StringCommands.scala
Scala
apache-2.0
10,799
package org.jetbrains.plugins.scala package codeInspection package collections import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression import scala.collection.immutable.ArraySeq /** * Nikolay.Tropin * 2014-05-05 */ class FilterSizeInspection extends OperationOnCollectionInspection { override def possibleSimplificationTypes: ArraySeq[SimplificationType] = ArraySeq(FilterSize) } object FilterSize extends SimplificationType { override def hint: String = ScalaInspectionBundle.message("filter.size.hint") override def getSimplification(expr: ScExpression): Option[Simplification] = expr match { // TODO infix notation? case `.sizeOrLength`(qual`.filter`(cond)) => Some(replace(expr).withText(invocationText(qual, "count", cond)).highlightFrom(qual)) case _ => None } }
JetBrains/intellij-scala
scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/collections/FilterSizeInspection.scala
Scala
apache-2.0
840
package com.avsystem.commons package redis import java.io.File import org.apache.commons.io.FileUtils import org.scalatest.Suite import scala.concurrent.Await import scala.concurrent.duration._ /** * Author: ghik * Created: 27/06/16. */ trait UsesPreconfiguredCluster extends UsesActorSystem with UsesClusterServers { this: Suite => def ports: Range = 9000 to 9005 def preconfiguredDir: File = new File("preconfiguredCluster") protected def prepareDirectory(): Unit = FileUtils.copyDirectory(preconfiguredDir, clusterDir) override protected def beforeAll(): Unit = { super.beforeAll() val clients = addresses.map(addr => new RedisConnectionClient(addr)) val commands = clients.map(client => RedisApi.Connection.Async.BinaryTyped(client)) val initFuture = Future.traverse(commands)(c => waitUntil(c.clusterInfo.map(_.stateOk), 500.millis)) Await.result(initFuture, 30.seconds) clients.foreach(_.close()) } }
AVSystem/scala-commons
commons-redis/src/test/scala/com/avsystem/commons/redis/UsesPreconfiguredCluster.scala
Scala
mit
961
package provingground.learning import spire.algebra._ import spire.implicits._ object GramSchmidt { def makePerpFromON[V](orthonormals: Vector[V], vec: V)( implicit vs: InnerProductSpace[V, Double] ): V = orthonormals match { case Vector() => vec case init :+ last => val recVec = makePerpFromON(init, vec) val minusProj = -1.0 * (last dot recVec) recVec + (minusProj *: last) } def orthonormal[V]( v: Vector[V] )(implicit vs: InnerProductSpace[V, Double]): Vector[V] = v match { case Vector() => Vector() case init :+ last => val onInit = orthonormal(init) val perpLast = makePerpFromON(onInit, last) val onLast = perpLast.normalize if (perpLast.norm > 0) onInit :+ perpLast.normalize else onInit } def onVec(vv: Vector[Vector[Double]]) = orthonormal(vv) def perpVec(vv: Vector[Vector[Double]], v: Vector[Double]) = makePerpFromON(onVec(vv), v) } object MonixGramSchmidt { import monix.eval._ def makePerpFromON[V](orthonormals: Vector[V], vec: V)( implicit vs: InnerProductSpace[V, Double] ): Task[V] = Task.eval(orthonormals.isEmpty) flatMap { case true => Task.now(vec) case false => for { recVec <- makePerpFromON(orthonormals.init, vec) minusProj = -1.0 * (orthonormals.last dot recVec) } yield recVec + (minusProj *: orthonormals.last) } def orthonormal[V]( v: Vector[V] )(implicit vs: InnerProductSpace[V, Double]): Task[Vector[V]] = Task.eval(v.isEmpty) flatMap { case true => Task.now(Vector()) case false => import v._ for { onInit <- orthonormal(init) perpLast <- makePerpFromON(onInit, last) onLast = perpLast.normalize } yield if (perpLast.norm > 0) onInit :+ perpLast.normalize else onInit } def onVec(vv: Vector[Vector[Double]]): Task[Vector[Vector[Double]]] = orthonormal(vv) def perpVec( vv: Vector[Vector[Double]], v: Vector[Double] ): Task[Vector[Double]] = onVec(vv).flatMap(makePerpFromON(_, v)) } object MapVS { // implicit def mapVS[A]: VectorSpace[Map[A, Double], Double] = MapVS() def compose[A]( base: Map[A, Double], step: A => Map[A, Double] ): Map[A, Double] = { // val vs = MapVS[A]() val groups = base.map { case (x, p) => p *: step(x) } groups .map(_.toVector) .flatten .groupBy(_._1) .view .mapValues(v => v.map(_._2).sum) .toMap } } object FieldGramSchmidt { def makePerpFromON[V, F](orthonormals: Vector[V], vec: V)( implicit vs: InnerProductSpace[V, F], field: Field[F], roots: NRoot[F] ): V = orthonormals match { case Vector() => vec case init :+ last => val recVec = makePerpFromON(init, vec) val minusProj = -1.0 * (last dot recVec) recVec + (minusProj *: last) } def orthonormal[V, F]( v: Vector[V] )( implicit vs: InnerProductSpace[V, F], field: Field[F], roots: NRoot[F] ): Vector[V] = v match { case Vector() => Vector() case init :+ last => val onInit = orthonormal(init) val perpLast = makePerpFromON(onInit, last) val onLast = perpLast.normalize if (perpLast.norm != 0) onInit :+ perpLast.normalize else onInit } def onVec[F]( vv: Vector[Vector[F]] )(implicit field: Field[F], roots: NRoot[F]) = orthonormal(vv) def perpVec[F]( vv: Vector[Vector[F]], v: Vector[F] )(implicit field: Field[F], roots: NRoot[F]) = makePerpFromON(onVec(vv), v) } object MonixFieldGramSchmidt { import monix.eval._ def makePerpFromON[V, F](orthonormals: Vector[V], vec: V)( implicit vs: InnerProductSpace[V, F], field: Field[F], roots: NRoot[F] ): Task[V] = Task.eval(orthonormals.isEmpty) flatMap { case true => Task.now(vec) case false => for { recVec <- makePerpFromON(orthonormals.init, vec) minusProj = -1.0 * (orthonormals.last dot recVec) } yield recVec + (minusProj *: orthonormals.last) } def orthonormal[V, F]( v: Vector[V] )( implicit vs: InnerProductSpace[V, F], field: Field[F], roots: NRoot[F] ): Task[Vector[V]] = Task.eval(v.isEmpty) flatMap { case true => Task.now(Vector()) case false => import v._ for { onInit <- orthonormal(init) perpLast <- makePerpFromON(onInit, last) onLast = perpLast.normalize } yield if (perpLast.norm != 0) onInit :+ perpLast.normalize else onInit } def onVec[F]( vv: Vector[Vector[F]] )(implicit field: Field[F], roots: NRoot[F]): Task[Vector[Vector[F]]] = orthonormal(vv) def perpVec[F]( vv: Vector[Vector[F]], v: Vector[F] )(implicit field: Field[F], roots: NRoot[F]): Task[Vector[F]] = onVec(vv).flatMap(makePerpFromON(_, v)) }
siddhartha-gadgil/ProvingGround
core/src/main/scala/provingground/learning/GramSchmidt.scala
Scala
mit
5,010
package com.github.cuzfrog.maila.server import java.util.Date import javax.mail.Message.RecipientType import javax.mail.internet.{InternetAddress, MimeMessage} import javax.mail.{Address, MessagingException, Session, Transport} private[server] trait Sender { def send(recipients: Seq[String], subject: String, text: String): (Boolean, String) def close(): Unit } private[server] object Sender { def apply(session: Session, transport: Transport, user: String): Sender = new JmSender(session, transport, user) private class JmSender(session: Session, transport: Transport, user: String) extends Sender { def send(recipients: Seq[String], subject: String, text: String): (Boolean, String) = { val addresses: Array[Address] = recipients.map(new InternetAddress(_)).toArray val message = new MimeMessage(session) message.setFrom(new InternetAddress(user)) message.addRecipients(RecipientType.TO, addresses) message.setSubject(subject) message.setText(text) message.setSentDate(new Date()) val msg = s"Sent message[${message.getSubject}] to[${message.getAllRecipients.mkString(",")}]" try { transport.sendMessage(message, message.getAllRecipients) (true, s"$msg successfully.") } catch { case e: MessagingException => (false, s"$msg Failed with msg:${e.getMessage}") } } def close(): Unit = transport.close() } }
cuzfrog/maila
src/main/scala/com/github/cuzfrog/maila/server/Sender.scala
Scala
apache-2.0
1,437
package forimpatient.chapter01 /** * Created by Iryna Kharaborkina on 7/25/16. * * Solution to the Chapter 01 Exercise 10 'Scala for the Impatient' by Horstmann C.S. * * What do the take, drop, takeRight, and dropRight string functions do? What advantage or * disadvantage do they have over using substring? */ object Exercise10 extends App { println("Chapter 01 Exercise 10") val str = "Exercise09" println(str.take(3)) // returns string, containing first n symbols; here n == 3 println(str.drop(3)) // returns string, containing all but first n symbols; here n == 3 println(str.dropRight(3)) // returns string, containing all but last n symbols; here n == 3 println(str.takeRight(3)) // returns string, containing last n symbols; here n == 3 }
Kiryna/Scala-for-the-Impatient
src/forimpatient/chapter01/Exercise10.scala
Scala
apache-2.0
775
/** * Copyright (c) 2013, The National Archives <[email protected]> * https://www.nationalarchives.gov.uk * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package uk.gov.nationalarchives.csv.validator.schema import scala.util.parsing.combinator.Parsers import scala.language.implicitConversions /** * TraceableParsers provides the Traceable * Parser `::=` which allows you to name parsers * and also to trace their parse attempts. * * The `::=` symbol was chosen as it aligns with * the syntax of EBNF which also leads to the * nice conincidence that your parser definitions * will likely look closer to their EBNF counterpart * definitions. * * @author Adam Retter <[email protected]> */ trait TraceableParsers extends Parsers { val trace: Boolean //whether tracing should be enabled or not class TraceableParser[+T](parser: Parser[T]) extends Parser[T] { def apply(in: Input): ParseResult[T] = { if(trace) { val first = in.first val pos = in.pos val offset = in.offset val parseResult = parser.apply(in) println(s"""${parser.toString()}.apply for token "$first" at position $pos offset $offset returns "$parseResult"""") parseResult } else { parser.apply(in) } } } /** * Implicit which converts a name and a parser into a TraceableParser * * e.g. Give the EBNF: * <code><pre>MyParser ::= "SomeValue"?</pre></code> * * You would write the Scala code: * <code><pre>val myParser : Option[Parser[String]] = "MyParser" ::= "SomeValue"?</pre></code> */ implicit def toTraceableParser(name: String) = new { def ::=[T](p: Parser[T]) = new TraceableParser(p.named(name)) } }
adamretter/csv-validator
csv-validator-core/src/main/scala/uk/gov/nationalarchives/csv/validator/schema/TraceableParsers.scala
Scala
mpl-2.0
1,906
package com.twitter.finagle.memcached.util object ParserUtils { val DIGITS = "^\\d+$" }
firebase/finagle
finagle-memcached/src/main/scala/com/twitter/finagle/memcached/util/ParserUtils.scala
Scala
apache-2.0
91
package treeline import org.scalacheck._ object RenderingSpecification extends Properties("rendering") { import Prop._ def genTree = for { root <- Gen.alphaStr branch0 <- Gen.alphaStr branch1 <- Gen.alphaStr } yield (root, branch0, branch1) property("render unicode trees") = forAll(genTree) { case (root, b0, b1) => val res = Tree()(Branch(root, Branch(b0), Branch(b1))) res =? """%s |├── %s |└── %s |""".stripMargin.format(root, b0, b1) } property("render ascii trees") = forAll(genTree) { case (root, b0, b1) => val res = Tree().ascii(Branch(root, Branch(b0), Branch(b1))) res =? """%s |+-- %s |`-- %s |""".stripMargin.format(root, b0, b1) } }
softprops/treeline-scala
src/test/scala/rendering.scala
Scala
mit
802
/* * Copyright 2022 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package controllers import model.Exceptions._ import model.UniqueIdentifier import model.exchange.assessor.AssessorAvailabilityExamples._ import model.exchange.assessor.AssessorExamples import model.exchange.{ Assessor, AssessorAvailabilities } import org.mockito.ArgumentMatchers.{ eq => eqTo, _ } import org.mockito.Mockito._ import play.api.libs.json.Json import play.api.test.Helpers._ import services.AuditService import services.assessor.AssessorService import testkit.MockitoImplicits._ import testkit.UnitWithAppSpec import scala.concurrent.Future class AssessorControllerSpec extends UnitWithAppSpec { "save assessor" must { "return OK and log AssessorSaved audit event" + "when save is successful" in new TestFixture { when(mockAssessorService.saveAssessor(eqTo(AssessorExamples.UserId1), eqTo(AssessorExamples.Assessor1))).thenReturn(Future.successful(())) val request = fakeRequest(AssessorExamples.Assessor1) val response = controller.saveAssessor(AssessorExamples.UserId1)(request) status(response) mustBe OK val logDetails = Map("assessor" -> AssessorExamples.Assessor1.toString) verify(mockAuditService).logEvent(eqTo("AssessorSaved"), eqTo(logDetails))(any(), any()) } "return FAILED_DEPENDENCY " + "when there is a CannotUpdateAssessorWhenSkillsAreRemovedAndFutureAllocationExistsException" in new TestFixture { val Request = fakeRequest(AssessorExamples.Assessor1) when(mockAssessorService.saveAssessor(eqTo(AssessorExamples.UserId1), eqTo(AssessorExamples.Assessor1))).thenReturn( Future.failed(CannotUpdateAssessorWhenSkillsAreRemovedAndFutureAllocationExistsException("", ""))) val response = controller.saveAssessor(AssessorExamples.UserId1)(Request) status(response) mustBe FAILED_DEPENDENCY verify(mockAuditService, never()).logEvent(any(), any())(any(), any()) } "return CONFLICT " + "when there is a OptimisticLockException" in new TestFixture { val Request = fakeRequest(AssessorExamples.Assessor1) when(mockAssessorService.saveAssessor(eqTo(AssessorExamples.UserId1), eqTo(AssessorExamples.Assessor1))).thenReturn( Future.failed(OptimisticLockException(""))) val response = controller.saveAssessor(AssessorExamples.UserId1)(Request) status(response) mustBe CONFLICT verify(mockAuditService, never()).logEvent(any(), any())(any(), any()) } } "add availability" must { "return Ok when availability is added" in new TestFixture { when(mockAssessorService.saveAvailability(any[AssessorAvailabilities])).thenReturnAsync() val response = controller.saveAvailability()(fakeRequest(AssessorAvailabilitiesSum)) status(response) mustBe OK } } "find assessor" must { "return Assessor when is successful" in new TestFixture { when(mockAssessorService.findAssessor(eqTo(AssessorExamples.UserId1))).thenReturn(Future.successful(AssessorExamples.Assessor1)) val response = controller.findAssessor(AssessorExamples.UserId1)(fakeRequest) status(response) mustBe OK verify(mockAssessorService).findAssessor(eqTo(AssessorExamples.UserId1)) contentAsJson(response) mustBe Json.toJson[Assessor](AssessorExamples.Assessor1) } "return Not Found when assessor cannot be found" in new TestFixture { when(mockAssessorService.findAssessor(UserId)).thenReturn(Future.failed(AssessorNotFoundException(UserId))) val response = controller.findAssessor(UserId)(fakeRequest) status(response) mustBe NOT_FOUND verify(mockAssessorService).findAssessor(eqTo(UserId)) } } "find availability" must { "return an assessor's availability" in new TestFixture { when(mockAssessorService.findAvailability(UserId)).thenReturnAsync(AssessorAvailabilitiesSum) val response = controller.findAvailability(UserId)(fakeRequest) status(response) mustBe OK contentAsJson(response) mustBe Json.toJson[AssessorAvailabilities](AssessorAvailabilitiesSum) } "return Not Found when availability cannot be found" in new TestFixture { when(mockAssessorService.findAvailability(UserId)).thenReturn(Future.failed(AssessorNotFoundException(UserId))) val response = controller.findAvailability(UserId)(fakeRequest) status(response) mustBe NOT_FOUND } } "count submitted" must { "return zero if there are none submitted" in new TestFixture { when(mockAssessorService.countSubmittedAvailability()).thenReturnAsync(0) val response = controller.countSubmittedAvailability()(fakeRequest) status(response) mustBe OK contentAsJson(response) mustBe Json.obj("size" -> 0) } "return five if there are five submitted" in new TestFixture { when(mockAssessorService.countSubmittedAvailability()).thenReturnAsync(5) val response = controller.countSubmittedAvailability()(fakeRequest) status(response) mustBe OK contentAsJson(response) mustBe Json.obj("size" -> 5) } } "removeAssessor" must { "return CONFLICT " + "when there is CannotRemoveAssessorWhenFutureAllocationExistsException" in new TestFixture { val userId = UniqueIdentifier.randomUniqueIdentifier when(mockAssessorService.remove(eqTo(userId))).thenReturn( Future.failed(CannotRemoveAssessorWhenFutureAllocationExistsException("", ""))) val response = controller.removeAssessor(userId)(fakeRequest) status(response) mustBe CONFLICT response.futureValue verify(mockAuditService, never()).logEvent(any(), any())(any(), any()) } "return NOT_FOUND " + "when there is CannotRemoveAssessorWhenFutureAllocationExistsException" in new TestFixture { val userId = UniqueIdentifier.randomUniqueIdentifier when(mockAssessorService.remove(eqTo(userId))).thenReturn(Future.failed(AssessorNotFoundException(""))) val response = controller.removeAssessor(userId)(fakeRequest) status(response) mustBe NOT_FOUND response.futureValue verify(mockAuditService, never()).logEvent(any(), any())(any(), any()) } "return OK and log AssessorRemoved audit event" + "when assessor is removed" in new TestFixture { val userId = UniqueIdentifier.randomUniqueIdentifier when(mockAssessorService.remove(eqTo(userId))).thenReturnAsync() val response = controller.removeAssessor(userId)(fakeRequest) status(response) mustBe OK response.futureValue verify(mockAssessorService).remove(eqTo(userId)) val logDetails = Map("userId" -> userId.toString) verify(mockAuditService).logEvent(eqTo("AssessorRemoved"), eqTo(logDetails))(any(), any()) } } trait TestFixture { val mockAssessorService = mock[AssessorService] val mockAuditService = mock[AuditService] val controller = new AssessorController( stubControllerComponents(playBodyParsers = stubPlayBodyParsers(materializer)), mockAssessorService, mockAuditService ) } }
hmrc/fset-faststream
test/controllers/AssessorControllerSpec.scala
Scala
apache-2.0
7,631
package com.github.takezoe.solr.scala.sample import com.github.takezoe.solr.scala.Order import com.github.takezoe.solr.scala.async.AsyncSolrClient import scala.concurrent._ import scala.concurrent.duration._ import scala.util.{Failure, Success} import scala.concurrent.ExecutionContext.Implicits.global object AsyncSolrClientSample extends App { val client = new AsyncSolrClient("http://localhost:8983/solr") val f1 = client.register(Map("id" -> "005", "name" -> "ThinkPad X1 Carbon", "manu" -> "Lenovo")) val f2 = client.withTransaction { for { _ <- client.add(Map("id" -> "006", "name" -> "Nexus7 2012", "manu" -> "ASUS")) _ <- client.add(Map("id" -> "007", "name" -> "Nexus7 2013", "manu" -> "ASUS")) } yield () } val f3 = client.query("name:%name%") .fields("id", "manu", "name") .facetFields("manu") .sortBy("id", Order.asc) .getResultAsMap(Map("name" -> "ThinkPad X201s")) f3.onComplete { case Success(result) => { println("count: " + result.numFound) result.documents.foreach { doc => println("id: " + doc("id")) println(" manu: " + doc.get("manu").getOrElse("<NULL>")) println(" name: " + doc("name")) } } case Failure(t) => t.printStackTrace() } val future = for { _ <- Future.sequence(List(f1, f2)) _ <- f3 } yield () Await.result(future, Duration.Inf) client.shutdown() }
takezoe/solr-scala-client
src/main/scala/com/github/takezoe/solr/scala/sample/AsyncSolrClientSample.scala
Scala
apache-2.0
1,450
package org.sisioh.aws4s.eb.model import com.amazonaws.services.elasticbeanstalk.model.CreateStorageLocationResult import org.sisioh.aws4s.PimpedType object CreateStorageLocationResultFactory { def create(): CreateStorageLocationResult = new CreateStorageLocationResult() } class RichCreateStorageLocationResult(val underlying: CreateStorageLocationResult) extends AnyVal with PimpedType[CreateStorageLocationResult] { def s3BucketOpt: Option[String] = Option(underlying.getS3Bucket) def s3BucketOpt_=(value: Option[String]): Unit = underlying.setS3Bucket(value.orNull) def withS3BucketOpt(value: Option[String]): CreateStorageLocationResult = underlying.withS3Bucket(value.orNull) }
sisioh/aws4s
aws4s-eb/src/main/scala/org/sisioh/aws4s/eb/model/RichCreateStorageLocationResult.scala
Scala
mit
717
package jp.ne.opt.chronoscala import jp.ne.opt.chronoscala.Imports._ import org.scalacheck.{Prop, Properties} object RichLocalDateTimeSpec extends Properties("RichLocalDateTime") with Gens { import Prop.forAll property("totally ordered") = forAll(for { a <- localDateTimeGen b <- localDateTimeGen c <- localDateTimeGen } yield (a, b, c)) { case (a, b, c) => val antisymmetry = !(a <= b && b <= a) || a == b val transitivity = !(a <= b && b <= c) || a <= c val totality = a <= b || b <= a antisymmetry && transitivity && totality } }
opt-tech/chronoscala
shared/src/test/scala/jp/ne/opt/chronoscala/RichLocalDateTimeSpec.scala
Scala
mit
573
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spark import javax.cache.Cache import org.apache.ignite.cache.query._ import org.apache.ignite.cluster.ClusterNode import org.apache.ignite.configuration.CacheConfiguration import org.apache.ignite.internal.processors.cache.query.QueryCursorEx import org.apache.ignite.internal.processors.query.GridQueryFieldMetadata import org.apache.ignite.lang.IgniteUuid import org.apache.ignite.spark.impl._ import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi import org.apache.ignite.spi.discovery.tcp.internal.TcpDiscoveryNode import org.apache.spark._ import org.apache.spark.rdd.RDD import org.apache.spark.sql._ import org.apache.spark.sql.types._ import scala.collection.JavaConversions._ /** * Ignite RDD. Represents Ignite cache as Spark RDD abstraction. * * @param ic Ignite context to use. * @param cacheName Cache name. * @param cacheCfg Cache configuration. * @tparam K Key type. * @tparam V Value type. */ class IgniteRDD[K, V] ( val ic: IgniteContext, val cacheName: String, val cacheCfg: CacheConfiguration[K, V], val keepBinary: Boolean ) extends IgniteAbstractRDD[(K, V), K, V] (ic, cacheName, cacheCfg, keepBinary) { /** * Computes iterator based on given partition. * * @param part Partition to use. * @param context Task context. * @return Partition iterator. */ override def compute(part: Partition, context: TaskContext): Iterator[(K, V)] = { val cache = ensureCache() val qry: ScanQuery[K, V] = new ScanQuery[K, V](part.index) val cur = cache.query(qry) TaskContext.get().addTaskCompletionListener((_) ⇒ cur.close()) new IgniteQueryIterator[Cache.Entry[K, V], (K, V)](cur.iterator(), entry ⇒ { (entry.getKey, entry.getValue) }) } /** * Gets partitions for the given cache RDD. * * @return Partitions. */ override protected[spark] def getPartitions: Array[Partition] = { ensureCache() val parts = ic.ignite().affinity(cacheName).partitions() (0 until parts).map(new IgnitePartition(_)).toArray } /** * Gets preferred locations for the given partition. * * @param split Split partition. * @return */ override protected[spark] def getPreferredLocations(split: Partition): Seq[String] = { ensureCache() if (ic.ignite().configuration().getDiscoverySpi().isInstanceOf[TcpDiscoverySpi]) { ic.ignite().affinity(cacheName).mapPartitionToPrimaryAndBackups(split.index) .map(_.asInstanceOf[TcpDiscoveryNode].socketAddresses()).flatten.map(_.getHostName).toList } else { ic.ignite().affinity(cacheName).mapPartitionToPrimaryAndBackups(split.index) .flatten(_.hostNames).toSeq } } /** * Tells whether this IgniteRDD is empty or not. * * @return Whether this IgniteRDD is empty or not. */ override def isEmpty(): Boolean = { count() == 0 } /** * Gets number of tuples in this IgniteRDD. * * @return Number of tuples in this IgniteRDD. */ override def count(): Long = { val cache = ensureCache() cache.size() } /** * Runs an object SQL on corresponding Ignite cache. * * @param typeName Type name to run SQL against. * @param sql SQL query to run. * @param args Optional SQL query arguments. * @return RDD with query results. */ def objectSql(typeName: String, sql: String, args: Any*): RDD[(K, V)] = { val qry: SqlQuery[K, V] = new SqlQuery[K, V](typeName, sql) qry.setArgs(args.map(_.asInstanceOf[Object]):_*) new IgniteSqlRDD[(K, V), Cache.Entry[K, V], K, V](ic, cacheName, cacheCfg, qry, entry ⇒ (entry.getKey, entry.getValue), keepBinary) } /** * Runs an SQL fields query. * * @param sql SQL statement to run. * @param args Optional SQL query arguments. * @return `DataFrame` instance with the query results. */ def sql(sql: String, args: Any*): DataFrame = { val qry = new SqlFieldsQuery(sql) qry.setArgs(args.map(_.asInstanceOf[Object]):_*) val schema = buildSchema(ensureCache().query(qry).asInstanceOf[QueryCursorEx[java.util.List[_]]].fieldsMeta()) val rowRdd = new IgniteSqlRDD[Row, java.util.List[_], K, V]( ic, cacheName, cacheCfg, qry, list ⇒ Row.fromSeq(list), keepBinary) ic.sqlContext.createDataFrame(rowRdd, schema) } /** * Saves values from given RDD into Ignite. A unique key will be generated for each value of the given RDD. * * @param rdd RDD instance to save values from. */ def saveValues(rdd: RDD[V]) = { rdd.foreachPartition(it ⇒ { val ig = ic.ignite() ensureCache() val locNode = ig.cluster().localNode() val node: Option[ClusterNode] = ig.cluster().forHost(locNode).nodes().find(!_.eq(locNode)) val streamer = ig.dataStreamer[Object, V](cacheName) try { it.foreach(value ⇒ { val key = affinityKeyFunc(value, node.orNull) streamer.addData(key, value) }) } finally { streamer.close() } }) } /** * Saves values from given RDD into Ignite. A unique key will be generated for each value of the given RDD. * * @param rdd RDD instance to save values from. * @param f Transformation function. */ def saveValues[T](rdd: RDD[T], f: (T, IgniteContext) ⇒ V) = { rdd.foreachPartition(it ⇒ { val ig = ic.ignite() ensureCache() val locNode = ig.cluster().localNode() val node: Option[ClusterNode] = ig.cluster().forHost(locNode).nodes().find(!_.eq(locNode)) val streamer = ig.dataStreamer[Object, V](cacheName) try { it.foreach(t ⇒ { val value = f(t, ic) val key = affinityKeyFunc(value, node.orNull) streamer.addData(key, value) }) } finally { streamer.close() } }) } /** * Saves values from the given key-value RDD into Ignite. * * @param rdd RDD instance to save values from. * @param overwrite Boolean flag indicating whether the call on this method should overwrite existing * values in Ignite cache. * @param skipStore Sets flag indicating that write-through behavior should be disabled for data streaming. */ def savePairs(rdd: RDD[(K, V)], overwrite: Boolean = false, skipStore: Boolean = false) = { rdd.foreachPartition(it ⇒ { val ig = ic.ignite() // Make sure to deploy the cache ensureCache() val streamer = ig.dataStreamer[K, V](cacheName) try { streamer.allowOverwrite(overwrite) streamer.skipStore(skipStore) it.foreach(tup ⇒ { streamer.addData(tup._1, tup._2) }) } finally { streamer.close() } }) } /** * Saves values from the given RDD into Ignite. * * @param rdd RDD instance to save values from. * @param f Transformation function. * @param overwrite Boolean flag indicating whether the call on this method should overwrite existing * values in Ignite cache. * @param skipStore Sets flag indicating that write-through behavior should be disabled for data streaming. */ def savePairs[T](rdd: RDD[T], f: (T, IgniteContext) ⇒ (K, V), overwrite: Boolean, skipStore: Boolean) = { rdd.foreachPartition(it ⇒ { val ig = ic.ignite() // Make sure to deploy the cache ensureCache() val streamer = ig.dataStreamer[K, V](cacheName) try { streamer.allowOverwrite(overwrite) streamer.skipStore(skipStore) it.foreach(t ⇒ { val tup = f(t, ic) streamer.addData(tup._1, tup._2) }) } finally { streamer.close() } }) } /** * Saves values from the given RDD into Ignite. * * @param rdd RDD instance to save values from. * @param f Transformation function. */ def savePairs[T](rdd: RDD[T], f: (T, IgniteContext) ⇒ (K, V)): Unit = { savePairs(rdd, f, overwrite = false, skipStore = false) } /** * Removes all values from the underlying Ignite cache. */ def clear(): Unit = { ensureCache().removeAll() } /** * Returns `IgniteRDD` that will operate with binary objects. This method * behaves similar to [[org.apache.ignite.IgniteCache#withKeepBinary]]. * * @return New `IgniteRDD` instance for binary objects. */ def withKeepBinary[K1, V1](): IgniteRDD[K1, V1] = { new IgniteRDD[K1, V1]( ic, cacheName, cacheCfg.asInstanceOf[CacheConfiguration[K1, V1]], true) } /** * Builds spark schema from query metadata. * * @param fieldsMeta Fields metadata. * @return Spark schema. */ private def buildSchema(fieldsMeta: java.util.List[GridQueryFieldMetadata]): StructType = { new StructType(fieldsMeta.map(i ⇒ new StructField(i.fieldName(), IgniteRDD.dataType(i.fieldTypeName(), i.fieldName()), nullable = true)) .toArray) } /** * Generates affinity key for given cluster node. * * @param value Value to generate key for. * @param node Node to generate key for. * @return Affinity key. */ private def affinityKeyFunc(value: V, node: ClusterNode): IgniteUuid = { val aff = ic.ignite().affinity[IgniteUuid](cacheName) Stream.from(1, Math.max(1000, aff.partitions() * 2)) .map(_ ⇒ IgniteUuid.randomUuid()).find(node == null || aff.mapKeyToNode(_).eq(node)) .getOrElse(IgniteUuid.randomUuid()) } } object IgniteRDD { /** * Default decimal type. */ private[spark] val DECIMAL = DecimalType(DecimalType.MAX_PRECISION, 3) /** * Gets Spark data type based on type name. * * @param typeName Type name. * @return Spark data type. */ def dataType(typeName: String, fieldName: String): DataType = typeName match { case "java.lang.Boolean" ⇒ BooleanType case "java.lang.Byte" ⇒ ByteType case "java.lang.Short" ⇒ ShortType case "java.lang.Integer" ⇒ IntegerType case "java.lang.Long" ⇒ LongType case "java.lang.Float" ⇒ FloatType case "java.lang.Double" ⇒ DoubleType case "java.math.BigDecimal" ⇒ DECIMAL case "java.lang.String" ⇒ StringType case "java.util.Date" ⇒ DateType case "java.sql.Date" ⇒ DateType case "java.sql.Timestamp" ⇒ TimestampType case "[B" ⇒ BinaryType case _ ⇒ StructType(new Array[StructField](0)) } /** * Converts java.util.Date to java.sql.Date as j.u.Date not supported by Spark SQL. * * @param input Any value. * @return If input is java.util.Date returns java.sql.Date representation of given value, otherwise returns unchanged value. */ def convertIfNeeded(input: Any): Any = if (input == null) input else { input match { case timestamp: java.sql.Timestamp ⇒ timestamp //Spark SQL doesn't support java.util.Date see - https://spark.apache.org/docs/latest/sql-programming-guide.html#data-types case date: java.util.Date ⇒ new java.sql.Date(date.getTime) case _ ⇒ input } } }
NSAmelchev/ignite
modules/spark/src/main/scala/org/apache/ignite/spark/IgniteRDD.scala
Scala
apache-2.0
12,975
package org.jetbrains.plugins.scala.lang.completion3 import com.intellij.codeInsight.completion.CompletionType import com.intellij.codeInsight.lookup.LookupElementPresentation import com.intellij.openapi.vfs.VfsUtil import org.jetbrains.plugins.scala.codeInsight.ScalaCodeInsightTestBase import org.junit.Assert /** * @author Alefas * @since 23.03.12 */ class ScalaLookupRenderingTest extends ScalaCodeInsightTestBase { def testJavaVarargs() { val javaFileText = """ |package a; | |public class Java { | public static void foo(int... x) {} |} """.stripMargin('|').replaceAll("\\r", "").trim() val fileText = """ |import a.Java |class A { | Java.fo<caret> |} """.stripMargin('|').replaceAll("\\r", "").trim() val myVFile = getSourceRootAdapter.createChildDirectory(null, "a").createChildData(null, "Java.java") VfsUtil.saveText(myVFile, javaFileText) configureFromFileTextAdapter("dummy.scala", fileText) val (activeLookup, _) = complete(1, CompletionType.BASIC) val resultText = """ |foo(x: Int*) """.stripMargin('|').replaceAll("\\r", "").trim() val result = activeLookup.filter(_.getLookupString == "foo").map(p => { val presentation: LookupElementPresentation = new LookupElementPresentation p.renderElement(presentation) presentation.getItemText + presentation.getTailText }).sorted.mkString("\\n") Assert.assertEquals(resultText, result) } }
triggerNZ/intellij-scala
test/org/jetbrains/plugins/scala/lang/completion3/ScalaLookupRenderingTest.scala
Scala
apache-2.0
1,511
package models import java.sql.Connection case class CreateItem( localeId: Long, siteId: Long, categoryId: Long, itemName: String, taxId: Long, currencyId: Long, price: BigDecimal, listPrice: Option[BigDecimal], costPrice: BigDecimal, description: String, isCoupon: Boolean )( implicit itemRepo: ItemRepo, siteRepo: SiteRepo ) { def save(hide: Boolean)(implicit conn: Connection) { itemRepo.createItem(this, hide) } def site(implicit conn: Connection) = siteRepo(siteId) }
ruimo/store2
app/models/CreateItem.scala
Scala
apache-2.0
497
package dhg.ccg.parse.pcfg import dhg.ccg.cat._ import dhg.ccg.rule._ import dhg.ccg.tagdict.TagDictionary import dhg.util._ import scalaz._ import Scalaz._ import dhg.ccg.prob._ import dhg.ccg.parse._ import dhg.ccg.util._ trait PcfgWeighterI extends Serializable { def logWeight(tree: CcgTreeI, logRootDist: IndirectSparseVec[Double], // t -> p logBinyDist: Array[IndirectSparseVec[IndirectSparseVec[Double]]], // t -> u -> v -> p logUnryDist: IndirectSparseVec[IndirectSparseVec[Double]], // t -> u -> p logTermDist: Array[Vec[Double]], // t -> w -> p logPmixDist: Array[Array[Double]]) // t -> p : Double } class SimplePcfgWeighterI() { // extends PcfgWeighterI { def logWeight(tree: CcgTreeI, logRootDist: IndirectSparseVec[Double], // t -> p logBinyDist: Array[IndirectSparseVec[IndirectSparseVec[Double]]], // t -> u -> v -> p logUnryDist: IndirectSparseVec[IndirectSparseVec[Double]], // t -> u -> p logTermDist: Array[Vec[Double]], // t -> w -> p logPmixDist: Array[Array[Double]]) // t -> p : Double = { def p(tree: CcgTreeI): Double = tree match { case CcgBinodeI(ij, ik, kj) => logBinyDist(ij)(ik.cat)(kj.cat) + logPmixDist(ij)(0) + p(ik) + p(kj) case CcgUnodeI(ij, sub) => logUnryDist(ij)(sub.cat) + logPmixDist(ij)(1) + p(sub) case CcgLeafI(ij, word) => logTermDist(ij)(word) + logPmixDist(ij)(2) } logRootDist(tree.cat) + p(tree) //assert(r.nonZero && !r.isNaN, f"pcfgWeight($tree) = ${r.logValue} ${r.logValue > Double.NegativeInfinity}") } }
dhgarrette/2015-ccg-parsing
src/main/scala/dhg/ccg/parse/pcfg/PcfgWeighterI.scala
Scala
apache-2.0
1,811
package io.surfkit.clientlib import scala.scalajs.js import js.annotation._ import js.JSConverters._ package components { import org.scalajs.dom.Event import org.scalajs.dom.CanvasRenderingContext2D trait ChartDataSet extends js.Object { var label: String = js.native var fillColor: String = js.native var strokeColor: String = js.native var pointColor: String = js.native var pointStrokeColor: String = js.native var pointHighlightFill: String = js.native var pointHighlightStroke: String = js.native var highlightFill: String = js.native var highlightStroke: String = js.native var data: js.Array[Double] = js.native } object ChartDataSet { def apply( label: String = null, fillColor: String = null, strokeColor: String = null, pointColor: String = null, pointStrokeColor: String = null, pointHighlightFill: String = null, pointHighlightStroke: String = null, highlightFill: String = null, highlightStroke: String = null, data: Seq[Double] = Nil ): ChartDataSet = { js.Dynamic.literal( label = label, fillColor = fillColor, strokeColor = strokeColor, pointColor = pointColor, pointStrokeColor = pointStrokeColor, pointHighlightFill = pointHighlightFill, pointHighlightStroke = pointHighlightStroke, highlightFill = highlightFill, highlightStroke = highlightStroke, data = data.toJSArray ).asInstanceOf[ChartDataSet] } } trait LinearChartData extends js.Object { var labels: js.Array[String] = js.native var datasets: js.Array[ChartDataSet] = js.native } object LinearChartData { def apply( labels: Seq[String] = null, datasets: Seq[ChartDataSet] = null ): LinearChartData = { js.Dynamic.literal( labels = labels.toJSArray, datasets = datasets.toJSArray ).asInstanceOf[LinearChartData] } } trait CircularChartData extends js.Object { var value: Double = js.native var color: String = js.native var highlight: String = js.native var label: String = js.native } object CircularChartData { def apply( value: Double = 1, color: String = null, highlight: String = null, label: String = null ): CircularChartData = { js.Dynamic.literal( value = value, color = color, highlight = highlight, label = label ).asInstanceOf[CircularChartData] } } trait ChartSettings extends js.Object { var animation: Boolean = js.native var animationSteps: Double = js.native var animationEasing: String = js.native var showScale: Boolean = js.native var scaleOverride: Boolean = js.native var scaleLineColor: String = js.native var scaleLineWidth: Double = js.native var scaleShowLabels: Boolean = js.native var scaleLabel: String = js.native var scaleIntegersOnly: Boolean = js.native var scaleBeginAtZero: Boolean = js.native var scaleFontFamily: String = js.native var scaleFontSize: Double = js.native var scaleFontStyle: String = js.native var scaleFontColor: String = js.native var responsive: Boolean = js.native var maintainAspectRatio: Boolean = js.native var showTooltips: Boolean = js.native var tooltipEvents: js.Array[String] = js.native var tooltipFillColor: String = js.native var tooltipFontFamily: String = js.native var tooltipFontSize: Double = js.native var tooltipFontStyle: String = js.native var tooltipFontColor: String = js.native var tooltipTitleFontFamily: String = js.native var tooltipTitleFontSize: Double = js.native var tooltipTitleFontStyle: String = js.native var tooltipTitleFontColor: String = js.native var tooltipYPadding: Double = js.native var tooltipXPadding: Double = js.native var tooltipCaretSize: Double = js.native var tooltipCornerRadius: Double = js.native var tooltipXOffset: Double = js.native var tooltipTemplate: String = js.native var multiTooltipTemplate: String = js.native var onAnimationProgress: js.Function0[Any] = js.native var onAnimationComplete: js.Function0[Any] = js.native } object ChartSettings { def apply( animation: Boolean = true, animationSteps: Double = 60, animationEasing: String = "easeOutQuart", showScale: Boolean = true, scaleOverride: Boolean = false, scaleLineColor: String = "rgba(0,0,0,0.1)", scaleLineWidth: Double = 1, scaleShowLabels: Boolean = true, scaleLabel: String = "<%value%>", scaleIntegersOnly: Boolean = true, scaleBeginAtZero: Boolean = false, scaleFontFamily: String = "'Helvetica Neue', 'Helvetica', 'Arial', sans-serif", scaleFontSize: Double = 12, scaleFontStyle: String = "normal", scaleFontColor: String = "#666", responsive: Boolean = true, maintainAspectRatio: Boolean = false, showTooltips: Boolean = true, tooltipEvents: Seq[String] = Seq("mousemove", "touchstart", "touchmove"), tooltipFillColor: String = "rgba(0,0,0,0.8)", tooltipFontFamily: String = "'Helvetica Neue', 'Helvetica', 'Arial', sans-serif", tooltipFontSize: Double = 14, tooltipFontStyle: String = "normal", tooltipFontColor: String = "#fff", tooltipTitleFontFamily: String = "'Helvetica Neue', 'Helvetica', 'Arial', sans-serif", tooltipTitleFontSize: Double = 14, tooltipTitleFontStyle: String = "bold", tooltipTitleFontColor: String = "#fff", tooltipYPadding: Double = 6, tooltipXPadding: Double = 6, tooltipCaretSize: Double = 8, tooltipCornerRadius: Double = 6, tooltipXOffset: Double = 10, tooltipTemplate: String = "<%if (label){%><%=label%>: <%}%><%= value %>", multiTooltipTemplate: String = "<%= value %>", onAnimationProgress: js.Function0[Any] = () => {}, onAnimationComplete: js.Function0[Any] = () => {} ): ChartSettings = { js.Dynamic.literal( animation = animation, animationSteps = animationSteps, animationEasing = animationEasing, showScale = showScale, scaleOverride = scaleOverride, scaleLineColor = scaleLineColor, scaleLineWidth = scaleLineWidth, scaleShowLabels = scaleShowLabels, scaleLabel = scaleLabel, scaleIntegersOnly = scaleIntegersOnly, scaleBeginAtZero = scaleBeginAtZero, scaleFontFamily = scaleFontFamily, scaleFontSize = scaleFontSize, scaleFontStyle = scaleFontStyle, scaleFontColor = scaleFontColor, responsive = responsive, maintainAspectRatio = maintainAspectRatio, showTooltips = showTooltips, tooltipEvents = tooltipEvents.toJSArray, tooltipFillColor = tooltipFillColor, tooltipFontFamily = tooltipFontFamily, tooltipFontSize = tooltipFontSize, tooltipFontStyle = tooltipFontStyle, tooltipFontColor = tooltipFontColor, tooltipTitleFontFamily = tooltipTitleFontFamily, tooltipTitleFontSize = tooltipTitleFontSize, tooltipTitleFontStyle = tooltipTitleFontStyle, tooltipTitleFontColor = tooltipTitleFontColor, tooltipYPadding = tooltipYPadding, tooltipXPadding = tooltipXPadding, tooltipCaretSize = tooltipCaretSize, tooltipCornerRadius = tooltipCornerRadius, tooltipXOffset = tooltipXOffset, tooltipTemplate = tooltipTemplate, multiTooltipTemplate = multiTooltipTemplate, onAnimationProgress = onAnimationProgress, onAnimationComplete = onAnimationComplete ).asInstanceOf[ChartSettings] } } trait ChartOptions extends js.Object { var scaleShowGridLines: Boolean = js.native var scaleGridLineColor: String = js.native var scaleGridLineWidth: Double = js.native var legendTemplate: String = js.native } object ChartOptions { def apply( scaleShowGridLines: Boolean = false, scaleGridLineColor: String = null, scaleGridLineWidth: Double = 1.0, legendTemplate: String = null ): ChartOptions = { js.Dynamic.literal( scaleShowGridLines = scaleShowGridLines, scaleGridLineColor = scaleGridLineColor, scaleGridLineWidth = scaleGridLineWidth, legendTemplate = legendTemplate ).asInstanceOf[ChartOptions] } } trait PointsAtEvent extends js.Object { var value: Double = js.native var label: String = js.native var datasetLabel: String = js.native var strokeColor: String = js.native var fillColor: String = js.native var highlightFill: String = js.native var highlightStroke: String = js.native var x: Double = js.native var y: Double = js.native } trait ChartInstance extends js.Object { var clear: js.Function0[Unit] = js.native var stop: js.Function0[Unit] = js.native var resize: js.Function0[Unit] = js.native var destroy: js.Function0[Unit] = js.native var toBase64Image: js.Function0[String] = js.native var generateLegend: js.Function0[String] = js.native } trait LinearInstance extends ChartInstance { var getPointsAtEvent: js.Function1[Event, js.Array[PointsAtEvent]] = js.native var update: js.Function0[Unit] = js.native var addData: js.Function2[js.Array[Double], String, Unit] = js.native var removeData: js.Function0[Unit] = js.native } trait CircularInstance extends ChartInstance { var getSegmentsAtEvent: js.Function1[Event, js.Array[js.Any]] = js.native var update: js.Function0[Unit] = js.native var addData: js.Function2[CircularChartData, Double, Unit] = js.native var removeData: js.Function1[Double, Unit] = js.native var segments: js.Array[CircularChartData] = js.native } trait LineChartOptions extends ChartOptions { var scaleShowHorizontalLines: Boolean = js.native var scaleShowVerticalLines: Boolean = js.native var bezierCurve: Boolean = js.native var bezierCurveTension: Double = js.native var pointDot: Boolean = js.native var pointDotRadius: Double = js.native var pointDotStrokeWidth: Double = js.native var pointHitDetectionRadius: Double = js.native var datasetStroke: Boolean = js.native var datasetStrokeWidth: Double = js.native var datasetFill: Boolean = js.native var responsive: Boolean = js.native var maintainAspectRatio: Boolean = js.native } object LineChartOptions { def apply( scaleShowHorizontalLines: Boolean = true, scaleShowVerticalLines: Boolean = true, bezierCurve: Boolean = true, bezierCurveTension: Double = 1.0, pointDot: Boolean = true, pointDotRadius: Double = 2.0, pointDotStrokeWidth: Double = 1.0, pointHitDetectionRadius: Double = 2.0, datasetStroke: Boolean = true, datasetStrokeWidth: Double = 1.0, datasetFill: Boolean = true, responsive: Boolean = true, maintainAspectRatio: Boolean = false ): LineChartOptions = { js.Dynamic.literal( scaleShowHorizontalLines = scaleShowHorizontalLines, scaleShowVerticalLines = scaleShowVerticalLines, bezierCurve = bezierCurve, bezierCurveTension = bezierCurveTension, pointDot = pointDot, pointDotRadius = pointDotRadius, pointDotStrokeWidth = pointDotStrokeWidth, pointHitDetectionRadius = pointHitDetectionRadius, datasetStroke = datasetStroke, datasetStrokeWidth = datasetStrokeWidth, datasetFill = datasetFill, responsive = responsive, maintainAspectRatio = maintainAspectRatio ).asInstanceOf[LineChartOptions] } } trait BarChartOptions extends ChartOptions { var scaleBeginAtZero: Boolean = js.native var scaleShowHorizontalLines: Boolean = js.native var scaleShowVerticalLines: Boolean = js.native var barShowStroke: Boolean = js.native var barStrokeWidth: Double = js.native var barValueSpacing: Double = js.native var barDatasetSpacing: Double = js.native var responsive: Boolean = js.native var maintainAspectRatio: Boolean = js.native } object BarChartOptions { def apply( scaleBeginAtZero: Boolean = true, scaleShowHorizontalLines: Boolean = true, scaleShowVerticalLines: Boolean = true, barShowStroke: Boolean = true, barStrokeWidth: Double = 2.0, barValueSpacing: Double = 2.0, barDatasetSpacing: Double = 2.0, responsive: Boolean = true, maintainAspectRatio: Boolean = false ): BarChartOptions = { js.Dynamic.literal( scaleBeginAtZero = scaleBeginAtZero, scaleShowHorizontalLines = scaleShowHorizontalLines, scaleShowVerticalLines = scaleShowVerticalLines, barShowStroke = barShowStroke, barValueSpacing = barValueSpacing, barStrokeWidth = barStrokeWidth, barDatasetSpacing = barDatasetSpacing, responsive = responsive, maintainAspectRatio = maintainAspectRatio ).asInstanceOf[BarChartOptions] } } trait RadarChartOptions extends js.Object { var scaleShowLine: Boolean = js.native var angleShowLineOut: Boolean = js.native var scaleShowLabels: Boolean = js.native var scaleBeginAtZero: Boolean = js.native var angleLineColor: String = js.native var angleLineWidth: Double = js.native var pointLabelFontFamily: String = js.native var pointLabelFontStyle: String = js.native var pointLabelFontSize: Double = js.native var pointLabelFontColor: String = js.native var pointDot: Boolean = js.native var pointDotRadius: Double = js.native var pointDotStrokeWidth: Double = js.native var pointHitDetectionRadius: Double = js.native var datasetStroke: Boolean = js.native var datasetStrokeWidth: Double = js.native var datasetFill: Boolean = js.native var legendTemplate: String = js.native } object RadarChartOptions { def apply( scaleShowLine: Boolean = true, angleShowLineOut: Boolean = true, scaleShowLabels: Boolean = false, scaleBeginAtZero: Boolean = true, angleLineColor: String = "rgba(0,0,0,.1)", angleLineWidth: Double = 1, pointLabelFontFamily: String = "'Arial'", pointLabelFontStyle: String = "normal", pointLabelFontSize: Double = 10, pointLabelFontColor: String = "#666", pointDot: Boolean = true, pointDotRadius: Double = 3, pointDotStrokeWidth: Double = 1, pointHitDetectionRadius: Double = 20, datasetStroke: Boolean = true, datasetStrokeWidth: Double = 2, datasetFill: Boolean = true, legendTemplate: String = "<ul class=\\"<%=name.toLowerCase()%>-legend\\"><% for (var i=0; i<datasets.length; i++){%><li><span style=\\"background-color:<%=datasets[i].strokeColor%>\\"></span><%if(datasets[i].label){%><%=datasets[i].label%><%}%></li><%}%></ul>" ): RadarChartOptions = { js.Dynamic.literal( scaleShowLine = scaleShowLine, angleShowLineOut = angleShowLineOut, scaleShowLabels = scaleShowLabels, scaleBeginAtZero = scaleBeginAtZero, angleLineColor = angleLineColor, angleLineWidth = angleLineWidth, pointLabelFontFamily = pointLabelFontFamily, pointLabelFontStyle = pointLabelFontStyle, pointLabelFontSize = pointLabelFontSize, pointLabelFontColor = pointLabelFontColor, pointDot = pointDot, pointDotRadius = pointDotRadius, pointDotStrokeWidth = pointDotStrokeWidth, pointHitDetectionRadius = pointHitDetectionRadius, datasetStroke = datasetStroke, datasetStrokeWidth = datasetStrokeWidth, datasetFill = datasetFill, legendTemplate = legendTemplate ).asInstanceOf[RadarChartOptions] } } trait PolarAreaChartOptions extends js.Object { var scaleShowLabelBackdrop: Boolean = js.native var scaleBackdropColor: String = js.native var scaleBeginAtZero: Boolean = js.native var scaleBackdropPaddingY: Double = js.native var scaleBackdropPaddingX: Double = js.native var scaleShowLine: Boolean = js.native var segmentShowStroke: Boolean = js.native var segmentStrokeColor: String = js.native var segmentStrokeWidth: Double = js.native var animationSteps: Double = js.native var animationEasing: String = js.native var animateRotate: Boolean = js.native var animateScale: Boolean = js.native var legendTemplate: String = js.native } object PolarAreaChartOptions { def apply ( scaleShowLabelBackdrop: Boolean = true, scaleBackdropColor: String = "rgba(255,255,255,0.75)", scaleBeginAtZero: Boolean = true, scaleBackdropPaddingY: Double = 2, scaleBackdropPaddingX: Double = 2, scaleShowLine: Boolean = true, segmentShowStroke: Boolean = true, segmentStrokeColor: String = "#fff", segmentStrokeWidth: Double = 2, animationSteps: Double = 100, animationEasing: String = "easeOutBounce", animateRotate: Boolean = true, animateScale: Boolean = false, legendTemplate: String = "<ul class=\\"<%=name.toLowerCase()%>-legend\\"><% for (var i=0; i<segments.length; i++){%><li><span style=\\"background-color:<%=segments[i].fillColor%>\\"></span><%if(segments[i].label){%><%=segments[i].label%><%}%></li><%}%></ul>" ):PolarAreaChartOptions = { js.Dynamic.literal( scaleShowLabelBackdrop = scaleShowLabelBackdrop, scaleBackdropColor = scaleBackdropColor, scaleBeginAtZero = scaleBeginAtZero, scaleBackdropPaddingY = scaleBackdropPaddingY, scaleBackdropPaddingX = scaleBackdropPaddingX, scaleShowLine = scaleShowLine, segmentShowStroke = segmentShowStroke, segmentStrokeColor = segmentStrokeColor, segmentStrokeWidth = segmentStrokeWidth, animationEasing = animationEasing, animateRotate = animateRotate, animateScale = animateScale, legendTemplate = legendTemplate ).asInstanceOf[PolarAreaChartOptions] } } trait PieChartOptions extends js.Object { var segmentShowStroke: Boolean = js.native var segmentStrokeColor: String = js.native var segmentStrokeWidth: Double = js.native var percentageInnerCutout: Double = js.native var animationSteps: Double = js.native var animationEasing: String = js.native var animateRotate: Boolean = js.native var animateScale: Boolean = js.native var legendTemplate: String = js.native } object PieChartOptions { def apply( segmentShowStroke: Boolean = true, segmentStrokeColor: String = "#fff", segmentStrokeWidth: Double = 2, percentageInnerCutout: Double = 0, // Default is 50 for doughnut animationSteps: Double = 100, animationEasing: String = "easeOutBounce", animateRotate: Boolean = true, animateScale: Boolean = false, legendTemplate: String = "<ul class=\\"<%=name.toLowerCase()%>-legend\\"><% for (var i=0; i<segments.length; i++){%><li><span style=\\"background-color:<%=segments[i].fillColor%>\\"></span><%if(segments[i].label){%><%=segments[i].label%><%}%></li><%}%></ul>" ):PieChartOptions = { js.Dynamic.literal( segmentShowStroke = segmentShowStroke, segmentStrokeColor = segmentStrokeColor, segmentStrokeWidth = segmentStrokeWidth, percentageInnerCutout = percentageInnerCutout, animationSteps = animationSteps, animationEasing = animationEasing, animateRotate = animateRotate, animateScale = animateScale, legendTemplate = legendTemplate ).asInstanceOf[PieChartOptions] } } class Chart protected() extends js.Object { def this(context: CanvasRenderingContext2D) = this() def Line(data: LinearChartData, options: LineChartOptions = null): LinearInstance = js.native def Bar(data: LinearChartData, options: BarChartOptions = null): LinearInstance = js.native def Radar(data: LinearChartData, options: RadarChartOptions = null): LinearInstance = js.native def PolarArea(data: js.Array[CircularChartData], options: PolarAreaChartOptions = null): CircularInstance = js.native def Pie(data: js.Array[CircularChartData], options: PieChartOptions = null): CircularInstance = js.native def Doughnut(data: js.Array[CircularChartData], options: PieChartOptions = null): CircularInstance = js.native } object Chart extends js.Object { var defaults: js.Any = js.native } }
saileshs/scalajs-chart
src/main/scala/io/surfkit/clientlib/Chart.scala
Scala
mit
19,977
package pl.touk.nussknacker.ui.security.oauth2 import java.net.URI import java.nio.charset.{Charset, StandardCharsets} import java.security.PublicKey import com.typesafe.config.Config import pl.touk.nussknacker.engine.util.Implicits.SourceIsReleasable import pl.touk.nussknacker.ui.security.CertificatesAndKeys import pl.touk.nussknacker.ui.security.api.AuthenticationConfiguration import pl.touk.nussknacker.ui.security.oauth2.ProfileFormat.ProfileFormat import sttp.model.{HeaderNames, MediaType, Uri} import scala.concurrent.duration.{FiniteDuration, HOURS} import scala.io.Source import scala.util.Using case class OAuth2Configuration(usersFile: URI, authorizeUri: URI, clientSecret: String, clientId: String, profileUri: URI, profileFormat: Option[ProfileFormat], accessTokenUri: URI, redirectUri: Option[URI], implicitGrantEnabled: Boolean = false, jwt: Option[JwtConfiguration], accessTokenParams: Map[String, String] = Map.empty, authorizeParams: Map[String, String] = Map.empty, headers: Map[String, String] = Map.empty, authorizationHeader: String = HeaderNames.Authorization, accessTokenRequestContentType: String = MediaType.ApplicationJson.toString(), defaultTokenExpirationTime: FiniteDuration = FiniteDuration(1, HOURS), anonymousUserRole: Option[String] = None ) extends AuthenticationConfiguration { override def name: String = OAuth2Configuration.name def authorizeUrl: Option[URI] = Option( Uri(authorizeUri) .param("client_id", clientId) .param("redirect_uri", redirectUri.map(_.toString)) .params(authorizeParams)) .map(_.toJavaUri) def authSeverPublicKey: Option[PublicKey] = Option.empty def idTokenNonceVerificationRequired: Boolean = jwt.exists(_.idTokenNonceVerificationRequired) } object OAuth2Configuration { import AuthenticationConfiguration._ import JwtConfiguration.jwtConfigurationVR import pl.touk.nussknacker.engine.util.config.CustomFicusInstances._ import net.ceedubs.ficus.readers.ArbitraryTypeReader._ import net.ceedubs.ficus.readers.EnumerationReader._ val name = "OAuth2" def create(config: Config): OAuth2Configuration = config.as[OAuth2Configuration](authenticationConfigPath) } object ProfileFormat extends Enumeration { type ProfileFormat = Value val GITHUB = Value("github") val OIDC = Value("oidc") } trait JwtConfiguration { def accessTokenIsJwt: Boolean def userinfoFromIdToken: Boolean def authServerPublicKey: Option[PublicKey] def idTokenNonceVerificationRequired: Boolean def audience: Option[String] } object JwtConfiguration { import net.ceedubs.ficus.readers.ValueReader import net.ceedubs.ficus.Ficus._ import net.ceedubs.ficus.readers.ArbitraryTypeReader._ import pl.touk.nussknacker.engine.util.config.ConfigEnrichments._ implicit val jwtConfigurationVR: ValueReader[JwtConfiguration] = ValueReader.relative(_.rootAs[JwtConfig]) private case class JwtConfig(accessTokenIsJwt: Boolean = false, userinfoFromIdToken: Boolean = false, audience: Option[String], publicKey: Option[String], publicKeyFile: Option[String], certificate: Option[String], certificateFile: Option[String], idTokenNonceVerificationRequired: Boolean = false) extends JwtConfiguration { def authServerPublicKey: Some[PublicKey] = { val charset: Charset = StandardCharsets.UTF_8 def getContent(content: Option[String], file: Option[String]): Option[String] = content.orElse(file map { path => Using.resource(Source.fromFile(path, StandardCharsets.UTF_8.name))(_.mkString) }) getContent(publicKey, publicKeyFile).map(CertificatesAndKeys.publicKeyFromString(_, charset)) orElse getContent(certificate, certificateFile).map(CertificatesAndKeys.publicKeyFromStringCertificate(_, charset)) match { case x@Some(_) => x case _ => throw new Exception("one of the: 'publicKey', 'publicKeyFile', 'certificate', 'certificateFile' fields should be provided in the authentication.jwt configuration") } } } }
TouK/nussknacker
security/src/main/scala/pl/touk/nussknacker/ui/security/oauth2/OAuth2Configuration.scala
Scala
apache-2.0
4,746
package de.tototec.sbuild.eclipse.plugin.container import org.eclipse.core.runtime.IPath import org.eclipse.core.runtime.NullProgressMonitor import org.eclipse.jdt.core.ClasspathContainerInitializer import org.eclipse.jdt.core.IClasspathContainer import org.eclipse.jdt.core.IJavaProject import org.eclipse.jdt.core.JavaCore import de.tototec.sbuild.eclipse.plugin.Logger.debug class SBuildClasspathContainerInitializer extends ClasspathContainerInitializer { override def initialize(containerPath: IPath, project: IJavaProject): Unit = { debug(s"${project.getProject.getName()}: intialize(containerPath=${containerPath},project=${project.getProject.getName})") setClasspathContainer(containerPath, project) } override def canUpdateClasspathContainer(containerPath: IPath, project: IJavaProject): Boolean = true override def requestClasspathContainerUpdate(containerPath: IPath, project: IJavaProject, containerSuggestion: IClasspathContainer) { debug(s"requestClasspathContainerUpdate(containerPath=${containerPath},project=${project.getProject.getName})") setClasspathContainer(containerPath, project) } def setClasspathContainer(containerPath: IPath, project: IJavaProject) { val container = new SBuildClasspathContainer(containerPath, project) JavaCore.setClasspathContainer(containerPath, Array(project), Array(container), new NullProgressMonitor()) } }
SBuild-org/sbuild-eclipse-plugin
de.tototec.sbuild.eclipse.plugin/src/main/scala/de/tototec/sbuild/eclipse/plugin/container/SBuildClasspathContainerInitializer.scala
Scala
apache-2.0
1,455
package com.sidemash.redson.scalatest import org.scalatest._ abstract class UnitSpec extends FlatSpec with Matchers with OptionValues with Inside with Inspectors
sidemash/redson
src/test/scala/com/sidemash/redson/scalatest/UnitSpec.scala
Scala
mit
165
/* * This file is part of AckCord, licensed under the MIT License (MIT). * * Copyright (c) 2019 Katrix * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package ackcord import java.time.{Instant, OffsetDateTime} import ackcord.data._ /** * Base trait normal messages. */ sealed trait APIMessage { /** * The current state of the cache. Contains both a snapshot of how the * cache looks like after this message (the current one), and a snapshot for * how the cache looked like before this message. */ def cache: CacheState } object APIMessage { /** * Sent to the client when Discord is ready to serve requests. No requests * should be sent before this has been received. */ case class Ready(cache: CacheState) extends APIMessage /** * Sent to the client when a previously interrupted connection is resumed. */ case class Resumed(cache: CacheState) extends APIMessage /** * Trait that covers all channel messages */ sealed trait ChannelMessage extends APIMessage { /** * The channel that was acted upon. */ def channel: Channel } /** * Sent to the client when a new channel is created. * @param channel The channel that was created. */ case class ChannelCreate(channel: GuildChannel, cache: CacheState) extends ChannelMessage /** * Sent to the client when a channel is edited or updated. * @param channel The channel that was edited. This will always be a * guild channel */ case class ChannelUpdate(channel: GuildChannel, cache: CacheState) extends ChannelMessage /** * Sent to the client when a channel is deleted. The current snapshot will * not contain the channel. * @param channel The channel that was deleted. */ case class ChannelDelete(channel: GuildChannel, cache: CacheState) extends ChannelMessage /** * Sent to the client when a message is pinned or unpinned in a text * channel. This is not sent when a pinned message is deleted. * @param channel The channel where the change happened * @param mostRecent The time the most recent pinned message was pinned */ case class ChannelPinsUpdate(channel: TChannel, mostRecent: Option[OffsetDateTime], cache: CacheState) extends ChannelMessage /** * Trait that covers all guild messages. */ sealed trait GuildMessage extends APIMessage { /** * The guild that was acted upon. */ def guild: Guild } /** * Sent to the client after the client connects to the gateway, when a * previously unavailable guild becomes available, and when the client * joins a new guild. * @param guild The created guild object */ case class GuildCreate(guild: Guild, cache: CacheState) extends GuildMessage /** * Sent to the client when the guild object is updated. * @param guild The updated guild. */ case class GuildUpdate(guild: Guild, cache: CacheState) extends GuildMessage /** * Sent to the client either if a guild becomes unavailable due to and * outage, or if the client leaves or is kicked from a guild. * @param guild The deleted guild * @param unavailable If an outage caused this event */ case class GuildDelete(guild: Guild, unavailable: Boolean, cache: CacheState) extends GuildMessage /** * Sent to the client when an user is banned from a guild. If you need the * [[ackcord.data.GuildMember]] object of the user, you can find it in [[cache.previous]]. * @param guild The guild the user was banned from. * @param user The banned user. */ case class GuildBanAdd(guild: Guild, user: User, cache: CacheState) extends GuildMessage /** * Sent to the client when an user is unbanned from a guild. * @param guild The guild where the user was previously banned. * @param user The previously banned user. */ case class GuildBanRemove(guild: Guild, user: User, cache: CacheState) extends GuildMessage /** * Sent to the client when the emojis of a guild have been updated. If you * need the old emojis, you can find them in [[cache.previous]]. * @param guild The guild where the update occurred. * @param emojis The new emojis. */ case class GuildEmojiUpdate(guild: Guild, emojis: Seq[Emoji], cache: CacheState) extends GuildMessage /** * Sent to the client when the integrations of a guild were updated. You * have to fetch the integrations yourself. * @param guild The guild where the update occurred. */ case class GuildIntegrationsUpdate(guild: Guild, cache: CacheState) extends GuildMessage /** * Sent to the client when a user joins the guild. * @param member The new member * @param guild The joined guild */ case class GuildMemberAdd(member: GuildMember, guild: Guild, cache: CacheState) extends GuildMessage /** * Sent to the client when a user leaves the guild (or is kicked or banned). * If you need the [[ackcord.data.GuildMember]], you can find it in [[cache.previous]]. * @param user The user that left * @param guild The guild the user left */ case class GuildMemberRemove(user: User, guild: Guild, cache: CacheState) extends GuildMessage /** * Sent to the client when a guild member is updated. The fields seen here * are all the fields that can change. Looking at the users [[ackcord.data.GuildMember]] * for changes is pointless. * @param guild The guild of the guild member * @param roles Thew new roles for the guild member * @param user The user of the updated guild member * @param nick Nick of the user if one was set */ case class GuildMemberUpdate( guild: Guild, roles: Seq[Role], user: User, nick: Option[String], cache: CacheState ) extends GuildMessage /** * Sent to the client if the client requests to get all members * (even offline ones) for large guilds using [[ackcord.gateway.RequestGuildMembers]]. * @param guild The guild requested for. * @param members The guild members in this chunk. */ case class GuildMembersChunk(guild: Guild, members: Seq[GuildMember], cache: CacheState) extends GuildMessage /** * Sent to the client when a new role is created. * @param guild The guild of the new role * @param role The new role */ case class GuildRoleCreate(guild: Guild, role: Role, cache: CacheState) extends GuildMessage /** * Sent to the client when a role is updated. * @param guild The guild of the updated role * @param role The updated role */ case class GuildRoleUpdate(guild: Guild, role: Role, cache: CacheState) extends GuildMessage /** * Sent to the client when a role is deleted * @param guild The guild of the deleted role * @param roleId The deleted role. */ case class GuildRoleDelete(guild: Guild, roleId: Role, cache: CacheState) extends GuildMessage /** * Trait that covers all message messages. */ sealed trait MessageMessage extends APIMessage { /** * The message that was acted upon. */ def message: Message } /** * Sent to the client when a message is created (posted). * @param message The sent message */ case class MessageCreate(message: Message, cache: CacheState) extends MessageMessage /** * Sent to the client when a message is updated. * @param message The new message. The check changes, the old message can * be found in [[cache.previous]]. */ case class MessageUpdate(message: Message, cache: CacheState) extends MessageMessage /** * Sent to the client when a message is deleted. * @param message The deleted message. * @param channel The channel of the message. */ case class MessageDelete(message: Message, channel: TChannel, cache: CacheState) extends MessageMessage with ChannelMessage /** * Sent to the client when multiple messages are deleted at the same time. * Often this is performed by a bot. * @param messages The deleted messages * @param channel The channel of the deleted messages */ case class MessageDeleteBulk(messages: Seq[Message], channel: TChannel, cache: CacheState) extends ChannelMessage /** * Sent to the client when a user adds a reaction to a message. * @param user The user that added the reaction. * @param channel The channel of the message. * @param message The message the user added an reaction to. * @param emoji The emoji the user reacted with */ case class MessageReactionAdd( user: User, channel: TChannel, message: Message, emoji: PartialEmoji, cache: CacheState ) extends MessageMessage with ChannelMessage /** * Sent to the client when a user removes a reaction from a message. * @param user The user that removed the reaction. * @param channel The channel of the message. * @param message The message the user removed an reaction from. * @param emoji The emoji the user reacted with */ case class MessageReactionRemove( user: User, channel: TChannel, message: Message, emoji: PartialEmoji, cache: CacheState ) extends MessageMessage with ChannelMessage /** * Sent to the client when a user removes all reactions from a message. * The emojis of the message can be found in [[cache.previous]]. * @param channel The channel of the message. * @param message The message the user removed the reactions from. */ case class MessageReactionRemoveAll(channel: TChannel, message: Message, cache: CacheState) extends MessageMessage with ChannelMessage /** * Sent to the client when the presence of a user updates. * @param guild The guild where the update took place * @param user The user of the presence * @param roleIds The roles of the user * @param presence The new presence */ case class PresenceUpdate( guild: Guild, user: User, roleIds: Seq[RoleId], presence: Presence, cache: CacheState ) extends GuildMessage /** * Sent to the client when a user starts typing in a channel * @param channel The channel where the typing happened * @param user The user that began typing * @param timestamp When user started typing */ case class TypingStart(channel: TChannel, user: User, timestamp: Instant, cache: CacheState) extends ChannelMessage /** * Sent to the client when a user object is updated. * @param user The new user. */ case class UserUpdate(user: User, cache: CacheState) extends APIMessage /** * Sent to the client when a user joins/leaves/moves voice channels * @param voiceState New voice states */ case class VoiceStateUpdate(voiceState: VoiceState, cache: CacheState) extends APIMessage /** * Sent a guilds voice server is updated. Also used when connecting to a voice channel. * @param token The voice connection token * @param guild The guild of the update * @param endpoint The voice server */ case class VoiceServerUpdate(token: String, guild: Guild, endpoint: String, cache: CacheState) extends GuildMessage /** * Sent to the client when guilds webhooks are updated. * @param guild The guild of the updated webhook * @param channel The channel for the webhook */ case class WebhookUpdate(guild: Guild, channel: GuildChannel, cache: CacheState) extends GuildMessage with ChannelMessage }
Katrix-/AckCord
core/src/main/scala/ackcord/APIMessage.scala
Scala
mit
12,562
// Copyright (c) 2010 Sean C. Rhea <[email protected]> // All rights reserved. // // See the file LICENSE included in this distribution for details. package org.srhea.scalaqlite import org.scalatest.FlatSpec import org.scalatest.Matchers import scala.util.{ Failure, Try } class SqliteDbSpec extends FlatSpec with Matchers { val db = new SqliteDb(":memory:") "database connections" should "work in multithreaded mode" in { val db2 = new SqliteDb(":memory:", SqliteDb.BaseFlags | Sqlite3C.SQLITE_OPEN_NOMUTEX) db2.execute("CREATE TABLE foo (i INTEGER, f DOUBLE, t TEXT);") db2.execute("INSERT INTO foo (i, f, t) VALUES (1, 2.0, 'foo');") db2.foreachRow("SELECT count(*) FROM foo;") (_ should equal (SqlLong(1) :: Nil)) } "CREATE TABLE" should "not throw any exceptions" in { db.execute("CREATE TABLE foo (i INTEGER, f DOUBLE, t TEXT);") } "INSERT" should "add rows to the table" in { db.execute("INSERT INTO foo (i, f, t) VALUES (1, 2.0, 'foo');") db.execute("INSERT INTO foo (i, f, t) VALUES (3, NULL, 'bar');") db.foreachRow("SELECT count(*) FROM foo;") { row => row(0) should equal (SqlLong(2)) } } "a prepared statement INSERT" should "add rows to the table" in { db.prepare("INSERT INTO foo (i, f, t) VALUES (?, ?, ?);") { stmt => stmt.execute(SqlLong(5), SqlDouble(10.0), SqlText("foobar")) stmt.execute(SqlLong(5), SqlDouble(11.0), SqlText("notfoobar")) } db.foreachRow("SELECT count(*) FROM foo WHERE i > 4") { row => row(0) should equal (SqlLong(2)) } } "SELECT *" should "output all the rows" in { var list: List[String] = Nil db.foreachRow("SELECT * FROM foo;") { row => list = row.map(_.toString).mkString(" ") :: list } list.reverse.mkString("\\n") should equal ("1 2.0 foo\\n3 NULL bar\\n5 10.0 foobar\\n5 11.0 notfoobar") } "SqliteResultSet.map" should "work" in { val s = db.query("SELECT * FROM foo;") { _.map(_.mkString(" ")).mkString("\\n") } s should equal ("1 2.0 foo\\n3 NULL bar\\n5 10.0 foobar\\n5 11.0 notfoobar") } "SqliteResultSet.filter" should "work" in { db.foreachRow("SELECT * FROM foo") { row => if (row(2).toString == "bar") row.mkString(" ") should equal ("3 NULL bar") } } "doubles" should "have full precision" in { db.foreachRow("SELECT 1234567890123.0;") { row => row(0).toDouble should equal (1234567890123.0) } } "longs" should "have full precision" in { val low = Integer.MIN_VALUE - 1L val high = Integer.MAX_VALUE + 1L db.foreachRow("SELECT " + low + ";") { row => row(0).isInstanceOf[SqlLong] should equal (true) row(0).toLong should equal (low) } db.foreachRow("SELECT " + high + ";") { row => row(0).isInstanceOf[SqlLong] should equal (true) row(0).toLong should equal (high) } } "values that fit in an int" should "be returned as an int" in { db.foreachRow("SELECT " + Integer.MIN_VALUE + ";") { row => row(0).isInstanceOf[SqlLong] should equal (true) } db.foreachRow("SELECT " + Integer.MAX_VALUE + ";") { row => row(0).isInstanceOf[SqlLong] should equal (true) } } "values that don't fit in an int" should "throw an exception on toInt" in { intercept[SqlException] { db.foreachRow("SELECT " + (Integer.MAX_VALUE + 1L) + ";") { row => row(0).toInt } } } "Non-ascii characters" should "be bound correctly" in { val str = "百" db.execute("CREATE TABLE non_ascii_test (str TEXT)") db.execute("INSERT INTO non_ascii_test VALUES (?1)", SqlText(str)) db.foreachRow("SELECT str FROM non_ascii_test") { case Seq(SqlText(s)) => s should equal(str) s.getBytes should equal(Array[Byte](-25, -103, -66)) } } "Prepared statements" should "properly reset themselves" in { db.execute("CREATE TABLE bar (i INTEGER, d DOUBLE);") db.execute("INSERT INTO bar (i, d) VALUES (1, 2.0);") db.execute("INSERT INTO bar (i, d) VALUES (1, 3.0);") db.execute("INSERT INTO bar (i, d) VALUES (1, 4.0);") db.execute("INSERT INTO bar (i, d) VALUES (2, 5.0);") db.prepare("SELECT * FROM bar WHERE i = ?;") { stmt => stmt.query(SqlLong(1)) { i => i.hasNext should equal(true) i.next()(1).toDouble should equal (2.0) } stmt.query(SqlLong(2)) { i => i.hasNext should equal(true) i.next()(1).toDouble should equal (5.0) } } } "Bind columns " should " have correct type affinity" in { val barBlob = SqlBlob("bar".map(_.toByte)) val barText = SqlText("bar") val barBlobString = "X'" + "bar".map(c => "%02X".format(c.toByte)).mkString("") + "'" db.getRows("SELECT 1 AS x WHERE ?1 = ?2", barBlob, barText).length should equal (0) db.getRows("SELECT 1 AS x WHERE ?1 = " + barBlobString, barBlob).length should equal (1) db.getRows("SELECT 1 AS x WHERE ?1 = 'bar'", barBlob).length should equal (0) db.getRows("SELECT 1 AS x WHERE ?1 = 'bar'", barText).length should equal (1) db.execute("CREATE TEMP TABLE blob_test (b BLOB, t TEXT)") db.execute("INSERT INTO blob_test (b, t) VALUES (?1, ?2)", barBlob, barText) db.getRows("SELECT COUNT(*) FROM blob_test")(0)(0).toLong should equal(1) db.getRows("SELECT COUNT(*) FROM blob_test WHERE b = " + barBlobString)(0)(0).toLong should equal(1) db.getRows("SELECT COUNT(*) FROM blob_test WHERE b = 'bar'")(0)(0).toLong should equal(0) db.getRows("SELECT COUNT(*) FROM blob_test WHERE t = " + barBlobString)(0)(0).toLong should equal(0) db.getRows("SELECT COUNT(*) FROM blob_test WHERE t = 'bar'")(0)(0).toLong should equal(1) val types = db.mapRows("SELECT TYPEOF(b), TYPEOF(t) FROM blob_test")(_.map(_.toString).mkString(", ")) types.mkString should equal ("blob, text") db.execute("DROP TABLE blob_test") } "Sql nulls" should "work" in { db.getRows("SELECT NULL").head.head should equal (SqlNull) } "Pattern matching" should "work for longs and nulls" in { db.foreachRow("SELECT 1, NULL") { case Seq(SqlLong(i), SqlNull) => i should equal (1) } } "Inserting nulls" should "work" in { db.execute("CREATE TABLE null_test (x, y)") db.execute("INSERT INTO null_test (x, y) VALUES (?, ?)", SqlLong(1), SqlNull) db.execute("INSERT INTO null_test (x, y) VALUES (?, ?)", SqlLong(2)) db.execute("INSERT INTO null_test (x, y) VALUES (?, ?)", SqlNull, SqlLong(3)) db.getRows("SELECT * FROM null_test WHERE x = 1").head should equal (Seq(SqlLong(1), SqlNull)) db.getRows("SELECT * FROM null_test WHERE x = 2").head should equal (Seq(SqlLong(2), SqlNull)) db.getRows("SELECT * FROM null_test WHERE y IS NOT NULL").head should equal (Seq(SqlNull, SqlLong(3))) } "Error messages" should "contain accurate descriptions" in { db.execute("CREATE TABLE error_test (x PRIMARY KEY)") db.execute("INSERT INTO error_test VALUES (1)") val Failure(SqlException(msg)) = Try(db.execute("INSERT INTO error_test VALUES (1)")) msg should include ("Abort due to constraint violation") msg should include ("UNIQUE constraint failed: error_test.x") val Failure(SqlException(msg2)) = Try(db.foreachRow("SELECT * FROM blah_blah")(_ => 1)) } }
meraki/scalaqlite
src/test/scala/SqliteDbSpec.scala
Scala
mit
7,461
package com.typesafe.sbt.packager.universal import com.typesafe.sbt.packager._ import com.typesafe.sbt.packager.permissions import org.scalatest._ import java.io.File import java.nio.file.{ Path, Paths, Files } import java.nio.file.attribute.PosixFilePermission._ import scala.collection.JavaConversions._ class ZipHelperSpec extends WordSpec with Matchers with BeforeAndAfterEach with BeforeAndAfterAll { var tmp: Path = _ val toDelete = scala.collection.mutable.ListBuffer[Path]() override def beforeEach { tmp = Files createTempDirectory "_sbt-native-packager" toDelete += tmp } override def afterAll { toDelete foreach { dir => scala.util.Try { Files.walkFileTree(dir, new DeleteDirectoryVisitor) } } } "The ZipHelper.zip" should { "create a zip with a single file" taggedAs (LinuxTag, WindowsTag) in { zipSingleFile(ZipHelper.zip) } // ignores empty directories "create a zip with nested directories" taggedAs (LinuxTag, WindowsTag) ignore { zipNestedFile(ZipHelper.zip) } "create a zip with nested directories containing file" taggedAs (LinuxTag, WindowsTag) in { zipNestedDirsWithFiles(ZipHelper.zip) } "create directories if necessary" taggedAs (LinuxTag, WindowsTag) in { createNecessaryDirectories(ZipHelper.zip) } // works only on some systems "preserve the executable bit" taggedAs (LinuxTag, WindowsTag) ignore { preserveExecutableBit(ZipHelper.zip) } } "The ZipHelper.zipNIO" should { "create a zip with a single file" taggedAs (LinuxTag, WindowsTag) in { zipSingleFile(ZipHelper.zipNIO) } "create a zip with nested directories" taggedAs (LinuxTag, WindowsTag) in { zipNestedFile(ZipHelper.zipNIO) } "create a zip with nested directories containing file" taggedAs (LinuxTag, WindowsTag) in { zipNestedDirsWithFiles(ZipHelper.zipNIO) } "create directories if necessary" taggedAs (LinuxTag, WindowsTag) in { createNecessaryDirectories(ZipHelper.zipNIO) } // never works "preserve the executable bit" taggedAs (LinuxTag, WindowsTag) ignore { preserveExecutableBit(ZipHelper.zipNIO) } } "The ZipHelper.zipNative" should { "create a zip with a single file" taggedAs (LinuxTag) in { zipSingleFile(ZipHelper.zipNative) } "create a zip with nested directories" taggedAs (LinuxTag) in { zipNestedFile(ZipHelper.zipNative) } "create a zip with nested directories containing file" taggedAs (LinuxTag) in { zipNestedDirsWithFiles(ZipHelper.zipNative) } "create directories if necessary" taggedAs (LinuxTag) in { createNecessaryDirectories(ZipHelper.zipNative) } // never works "preserve the executable bit" taggedAs (LinuxTag) ignore { preserveExecutableBit(ZipHelper.zipNative) } } /* ========================================================== */ /* ========================================================== */ /* ========================================================== */ private type Zipper = (Traversable[(File, String)], File) => Unit private def zipSingleFile(zipper: Zipper) { val out = tmp resolve "single.zip" val file = Files createFile (tmp resolve "single.txt") zipper(List(file.toFile -> "single.txt"), out.toFile) ZipHelper.withZipFilesystem(out.toFile, false) { system => val zippedFile = system getPath "single.txt" Files exists zippedFile should be(true) } } private def zipNestedFile(zipper: Zipper) { // setup val out = tmp resolve "nested.zip" val dir = tmp resolve "dir" val nested = dir resolve "nested" Files createDirectories nested zipper(List(nested.toFile -> "dir/nested"), out.toFile) ZipHelper.withZipFilesystem(out.toFile, false) { system => val zDir = system getPath "dir" Files exists zDir should be(true) Files isDirectory zDir should be(true) val zNested = zDir resolve "nested" Files exists zNested should be(true) Files isDirectory zNested should be(true) } } private def zipNestedDirsWithFiles(zipper: Zipper) { // setup val out = tmp resolve "nested-containing.zip" val dir = tmp resolve "dir" val file = dir resolve "file.txt" Files createDirectories dir Files createFile file zipper(List(file.toFile -> "dir/file.txt"), out.toFile) ZipHelper.withZipFilesystem(out.toFile, false) { system => val zDir = system getPath "dir" Files exists zDir should be(true) Files isDirectory zDir should be(true) val zFile = zDir resolve "file.txt" Files exists zFile should be(true) Files isDirectory zFile should be(false) } } private def createNecessaryDirectories(zipper: Zipper) { val out = tmp resolve "dir-creation.zip" val file = tmp resolve "dir-file.txt" Files createFile file zipper(List(file.toFile -> "dir/file.txt"), out.toFile) ZipHelper.withZipFilesystem(out.toFile, false) { system => val zDir = system getPath "dir" Files exists zDir should be(true) Files isDirectory zDir should be(true) val zFile = zDir resolve "file.txt" Files exists zFile should be(true) Files isDirectory zFile should be(false) } } private def preserveExecutableBit(zipper: Zipper) { val out = tmp resolve "exec.zip" val exec = tmp resolve "exec" Files createFile exec Files.setPosixFilePermissions(exec, permissions("0755")) val perms = Files getPosixFilePermissions exec perms should contain only (OWNER_READ, OWNER_WRITE, OWNER_EXECUTE, GROUP_READ, GROUP_EXECUTE, OTHERS_READ, OTHERS_EXECUTE) zipper(List(exec.toFile -> "exec"), out.toFile) Files exists out should be(true) val unzipped = tmp resolve "unzipped-exec" ZipHelper.withZipFilesystem(out.toFile, false) { system => val zippedFile = system getPath "exec" Files exists zippedFile should be(true) Files.copy(zippedFile, unzipped) } // checking permissions val unzippedPerms = Files getPosixFilePermissions unzipped unzippedPerms should contain only (OWNER_READ, OWNER_WRITE, OWNER_EXECUTE, GROUP_READ, GROUP_EXECUTE, OTHERS_READ, OTHERS_EXECUTE) } }
benmccann/sbt-native-packager
src/test/scala/com/typesafe/sbt/packager/universal/ZipHelperSpec.scala
Scala
bsd-2-clause
6,299
package demesne.index import scala.reflect._ import akka.actor.{ ActorLogging, ActorRef, Props } import akka.cluster.Cluster import akka.cluster.pubsub.DistributedPubSub import akka.event.LoggingReceive import akka.persistence.{ PersistentActor, SnapshotOffer } import cats.syntax.either._ import omnibus.core.syntax.clazz._ import omnibus.identifier.{ Id, Identifying } //import demesne.EventLike object IndexAggregateProtocol { sealed trait Event { type TID = Id[_] def sourceId: TID } // //todo does this work? test whether the extractor results in proper Id[E] type // object Event { // type Aux[E] = Event { type TID = Id[E] } // // def unapply[E]( e: Event.Aux[E] ): Option[Id[E]] = { // Some( e.sourceId.asInstanceOf[Id[E]] ) // } // } /** * Index key to identfier recorded. */ case class Recorded( override val sourceId: Recorded#TID, key: Any, id: Any, value: Any ) extends Event case class Withdrawn( override val sourceId: Withdrawn#TID, key: Option[Any], id: Any ) extends Event case class KeyRevised( override val sourceId: KeyRevised#TID, oldKey: Any, newKey: Any ) extends Event case class ValueRevised( override val sourceId: ValueRevised#TID, key: Any, oldValue: Any, newValue: Any ) extends Event } object IndexAggregate { /** * Create an Akka Props for the [[IndexAggregate]] actor corresponding to a specific key-to-identifier index. */ def props[K: ClassTag, I: ClassTag, V: ClassTag]( topic: String ): Props = Props( new IndexAggregate[K, I, V]( topic ) ) def mapTo[T]( v: Any )( implicit tag: ClassTag[T] ): T = { val boxedClass = { val c = tag.runtimeClass if (c.isPrimitive) toBoxed( c ) else c } require( boxedClass ne null ) boxedClass.cast( v ).asInstanceOf[T] } private val toBoxed: Map[Class[_], Class[_]] = Map( classOf[Boolean] -> classOf[java.lang.Boolean], classOf[Byte] -> classOf[java.lang.Byte], classOf[Char] -> classOf[java.lang.Character], classOf[Short] -> classOf[java.lang.Short], classOf[Int] -> classOf[java.lang.Integer], classOf[Long] -> classOf[java.lang.Long], classOf[Float] -> classOf[java.lang.Float], classOf[Double] -> classOf[java.lang.Double], classOf[Unit] -> classOf[scala.runtime.BoxedUnit] ) } //todo maybe leverage agent projection? /** * [[IndexAggregate]] maintains the logical index for an Aggregate Root. Index keys to identifier values are * [[demesne.index.Directive.Record]]ed. Recorded events are published via a distrubuted pub/sub mechanism to a relay who * makes sure the index is recorded in a local Index Akka Agent for easier access. * Created by damonrolfs on 10/26/14. */ class IndexAggregate[K: ClassTag, I: ClassTag, V: ClassTag]( topic: String ) extends PersistentActor with ActorLogging { outer => import akka.cluster.pubsub.DistributedPubSubMediator.Publish import demesne.index.{ IndexAggregateProtocol => P, Directive => D } implicit val identifying = { val id = IndexIdentifier.make[K, I, V]( topic ) Identifying.pure[State, IndexIdentifier]( zeroValueFn = id, nextValueFn = () => id, valueFromRepFn = _ => id ) } val tid: Id[State] = identifying.next val KeyType: ClassTag[K] = classTag[K] val IdType: ClassTag[I] = classTag[I] val ValueType: ClassTag[V] = classTag[V] /** * Distributed pub/sub channel used to deliver news of aggregate root indexing. */ val mediator: ActorRef = DistributedPubSub( context.system ).mediator // persistenceId must include cluster role to support multiple masters override lazy val persistenceId: String = { val root = Cluster( context.system ).selfRoles .find { _.startsWith( "index-" ) } .map { _ + "-master" } .getOrElse { "index-master" } root + "/" + topic } type State = Map[K, IndexedValue[I, V]] private var state: State = Map.empty[K, IndexedValue[I, V]] /** * Update the state with the new index. */ private def updateState( event: Any ): Unit = { log.debug( "IndexAggregate[{}]: BEFORE updateState: state:[{}]", self.path, state.mkString( "\n", "\n", "\n" ) ) event match { case P.Recorded( _, KeyType( key ), IdType( id ), ValueType( value ) ) => { val iValue = IndexedValue[I, V]( id, value ) log.debug( "IndexAggregate[{}] RECORDED: {} -> {}", self.path, key, iValue ) state += (key -> iValue) } case P.Withdrawn( _, Some( KeyType( key ) ), _ ) if state contains key => { log.debug( "IndexAggregate[{}] WITHDRAWN via KEY: {}", self.path, key ) state -= key } case P.Withdrawn( _, None, IdType( id ) ) if state.exists { case ( _, IndexedValue( i, _ ) ) => i == id } => { log.debug( "IndexAggregate[{}] WITHDRAWN via ID: {}", self.path, id ) val key = state collectFirst { case ( k, IndexedValue( i, _ ) ) if i == id => k } key match { case Some( k ) => { log.debug( "IndexAggregate removed key:[{}]", k ) state -= k } case None => log.debug( "IndexAggregate could not find identifier [{}] to withdraw", id ) } } case P.KeyRevised( _, KeyType( oldKey ), KeyType( newKey ) ) if state contains oldKey => { val value = state( oldKey ) state += (newKey -> value) state -= oldKey log.debug( "IndexAggregate[{}] REVISED: {} to {}", self.path, oldKey, newKey ) } case P.ValueRevised( _, KeyType( key ), ValueType( oldValue ), ValueType( newValue ) ) if state contains key => { val iValue = state( key ) state += (key -> iValue.copy( value = newValue )) log.debug( "IndexAggregate[{}] REVISED Key:[{}] VALUE: {} to {}", self.path, key, oldValue, newValue ) } case e: P.Event => { log.warning( "IndexAggregate[{}]: asked to update for unrecognized event: [{}]", self.path, e ) } } log.debug( "IndexedAggregate[{}]: AFTER updateState: state:[{}]", self.path, state.mkString( "\n", "\n", "\n" ) ) } /** * Akka Persistence handler used to rehydrate aggregate from event journal. */ override val receiveRecover: Receive = LoggingReceive { case e: P.Recorded => updateState( e ) case e: P.Withdrawn => updateState( e ) case e: P.KeyRevised => updateState( e ) case e: P.ValueRevised => updateState( e ) case SnapshotOffer( _, snapshot ) => state = snapshot.asInstanceOf[State] } /** * Akka Persistence handler used to receive command when the aggregate actor is active. * Record commands are processed asynchronously to update the index with a new logical key to identifier mapping. */ override def receiveCommand: Receive = LoggingReceive { // Record commands are processed asynchronously to update the index with a new logical key to identifier mapping. case D.Record( KeyType( k ), IdType( i ), ValueType( v ) ) => { persist( P.Recorded( sourceId = tid, key = k, id = i, value = v ) ) { e => updateState( e ) mediator ! Publish( topic = topic, msg = e ) } } case D.Withdraw( IdType( i ), Some( KeyType( k ) ) ) if state contains k => { persist( P.Withdrawn( sourceId = tid, key = Option( k ), id = i ) ) { e => updateState( e ) mediator ! Publish( topic = topic, msg = e ) } } case D.Withdraw( IdType( evtId ), None ) if state.exists { case ( _, IndexedValue( id, _ ) ) => id == evtId } => { persist( P.Withdrawn( sourceId = tid, key = None, id = evtId ) ) { e => updateState( e ) mediator ! Publish( topic = topic, msg = e ) } } case D.ReviseKey( KeyType( oldKey ), KeyType( newKey ) ) if state contains oldKey => { persist( P.KeyRevised( sourceId = tid, oldKey = oldKey, newKey = newKey ) ) { e => updateState( e ) mediator ! Publish( topic = topic, msg = e ) } } case D.ReviseValue( KeyType( key ), ValueType( oldValue ), ValueType( newValue ) ) if state contains key => { persist( P.ValueRevised( sourceId = tid, key = key, oldValue = oldValue, newValue = newValue ) ) { e => updateState( e ) mediator ! Publish( topic = topic, msg = e ) } } case D.ReviseValue( KeyType( key ), ValueType( _ ), ValueType( _ ) ) => { log.warning( "IndexAggregate[{}]: UNHANDLED ReviseValue missing key: key=[{}] state=[{}]", self.path, ( key, KeyType.runtimeClass ), state.mkString( "\n", "\n", "\n" ) ) } case av @ D.AlterValue( KeyType( key ) ) if state contains key => { val event = for { oldIndexedValue <- Either.fromOption( state.get( key ), new java.util.NoSuchElementException( s"IndexAggregate does not contain a state entry for key:[${key}]" ) ) newValue <- Either catchNonFatal { av.alter( oldIndexedValue.value ).asInstanceOf[V] } } yield P.ValueRevised( sourceId = tid, key = key, oldValue = oldIndexedValue.value, newValue = newValue ) event match { case Right( evt ) => { persist( evt ) { e => updateState( e ) mediator ! Publish( topic = topic, msg = e ) } } case Left( ex ) => log.error( ex, "IndexAggregate[{}]: key:[{}] alteration failed", self.path, key ) } } // Index actors dependent on the aggregate issue a WaitForStart message case WaitingForStart => { log.debug( "recd WaitForStart: sending Started to {}", sender() ) sender() ! Started } case D.Ignore => {} } override def unhandled( message: Any ): Unit = { message match { case _: akka.persistence.RecoveryCompleted => () case D.Withdraw( id, k ) => { log.warning( s"IndexAggregate[${self.path}] UNHANDLED: [${message}] " + s"id:[${id}] type:[${IdType.runtimeClass.safeSimpleName}] " + s"key:[${k.toString}] key-class:[${k.getClass.safeSimpleName}] " + s"state:[${state}]" ) } case D.Record( k, i, v ) => { log.warning( s"topic:[${topic}] + tid:[${tid}] ~> actor:[${self.path}] UNHANDLED [${message}] - " + "verify AggregateRootType.indexes types match Record: key-types:[{}] id-types:[{}] value-types[{}]", ( KeyType.runtimeClass, k.getClass ), ( IdType.runtimeClass, i.getClass ), ( ValueType, v.getClass ) ) } case Directive.ReviseKey( oldKey, newKey ) => { log.warning( "IndexAggregate[{}] UNHANDLED KEY REVISION [{}] - " + s"verify AggregateRootType indexes() type parameterization old:[${oldKey.toString}] new:[${newKey.toString}] :" + "[{}] identifier:[{}]", ( topic, self.path ), message, tid ) } case Directive.ReviseValue( _, _, _ ) => { log.warning( "IndexAggregate[{}] UNHANDLED VALUE REVISION:[{}] - " + "verify AggregateRootType indexes type parameterization identifier:[{}]", ( topic, self.path ), message, tid ) } case _ => log.warning( "IndexAggregate[{}] identifier:[{}] UNHANDLED message:[{}]", self.path, tid, message ) } } }
dmrolfs/demesne
core/src/main/scala/demesne/index/IndexAggregate.scala
Scala
apache-2.0
11,779
package com.mentatlabs.nsa package scalac package options /* -Xno-patmat-analysis * ==================== * 2.10.0 - 2.12.0: Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation. */ case object ScalacXNoPatmatAnalysis extends ScalacOptionBoolean("-Xno-patmat-analysis", ScalacVersions.`2.10.0`)
mentat-labs/sbt-nsa
nsa-core/src/main/scala/com/mentatlabs/nsa/scalac/options/advanced/ScalacXNoPatmatAnalysis.scala
Scala
bsd-3-clause
335
package org.psliwa.idea.composerJson.intellij.codeAssist.composer import com.intellij.codeInsight.intention.IntentionAction import com.intellij.openapi.editor.Editor import com.intellij.openapi.fileEditor.FileDocumentManager import com.intellij.openapi.project.Project import com.intellij.psi.PsiFile import org.psliwa.idea.composerJson.ComposerBundle import org.psliwa.idea.composerJson.composer._ import org.psliwa.idea.composerJson.composer.command.DefaultPackagesInstaller import org.psliwa.idea.composerJson.composer.model.PackageName import org.psliwa.idea.composerJson.intellij.PsiElements._ import org.psliwa.idea.composerJson.intellij.codeAssist.composer.NotInstalledPackages._ private object InstallPackagesAction extends IntentionAction { override def getText: String = ComposerBundle.message("inspection.quickfix.installNotInstalledPackages") override def getFamilyName: String = ComposerBundle.message("inspection.group") override def invoke(project: Project, editor: Editor, file: PsiFile): Unit = { val documentManager = FileDocumentManager.getInstance() for { document <- Option(documentManager.getDocument(file.getVirtualFile)) } yield documentManager.saveDocument(document) val installedPackages = InstalledPackages.forFile(file.getVirtualFile) val packages = for { jsonFile <- ensureJsonFile(file).toList topValue <- Option(jsonFile.getTopLevelValue).toList packageName <- getNotInstalledPackageProperties(topValue, installedPackages).map( property => PackageName(property.getName) ) } yield packageName if (packages.nonEmpty) { new DefaultPackagesInstaller(project, file).install(packages) } } override def startInWriteAction(): Boolean = false override def isAvailable(project: Project, editor: Editor, file: PsiFile): Boolean = true }
psliwa/idea-composer-plugin
src/main/scala/org/psliwa/idea/composerJson/intellij/codeAssist/composer/InstallPackagesAction.scala
Scala
mit
1,884
package net.resonious.sburb.entities import net.resonious.sburb.game.After import cpw.mods.fml.relauncher.Side; import cpw.mods.fml.relauncher.SideOnly; import net.resonious.sburb.abstracts.Vector3 import net.resonious.sburb.Sburb import net.minecraft.entity.player.EntityPlayer import net.minecraft.entity.Entity import cpw.mods.fml.common.registry.IEntityAdditionalSpawnData import net.minecraft.client.renderer.entity.Render import net.minecraft.client.renderer.Tessellator import net.minecraft.world.World import org.lwjgl.opengl.GL11 import net.minecraft.nbt.NBTTagCompound import net.minecraft.util.ResourceLocation import scala.math._ import scala.util.Random import net.resonious.sburb.game.SburbProperties import net.minecraft.util.ResourceLocation import net.minecraft.client.Minecraft import net.minecraft.client.renderer.RenderHelper import net.resonious.sburb.packets.ActivePacket import net.resonious.sburb.abstracts.PacketPipeline import com.xcompwiz.mystcraft.api.impl.InternalAPI import com.xcompwiz.mystcraft.world.agedata.AgeData import net.minecraftforge.common._ import cpw.mods.fml.common.registry._ import io.netty.buffer.ByteBuf abstract class Portal(world: World) extends Entity(world) with IEntityAdditionalSpawnData { var targetPos: Vector3[Int] = new Vector3[Int](0, 50, 0) var targetDim: Int = 0 var color: Vector3[Float] = new Vector3[Float](1, 1, 1) val warpRadius: Double = 1.25 def setColorFromString(colorStr: String) = { color = colorStr match { case "Black" => new Vector3[Float](0f, 0f, 0f) case "Red" => new Vector3[Float](1f, 0f, 0f) case "Green" => new Vector3[Float](0f, 1f, 0f) case "Blue" => new Vector3[Float](0f, 0f, 1f) case "Yellow" => new Vector3[Float](1f, 0.9f, 0f) case "White" => new Vector3[Float](0f, 0f, 0f) case what => { if (what != "any") Sburb log "Unexpected color "+what+" for portal!" new Vector3[Float](rand.nextFloat, rand.nextFloat, rand.nextFloat) } } } def setColorFromWorld(): Portal = { if (InternalAPI.dimension.isMystcraftAge(world.provider.dimensionId)) { val age = AgeData.getAge(world.provider.dimensionId, Sburb.isClient) val colorStr = age.cruft.get("sburbcolor") setColorFromString(colorStr.toString.replace("\"", "")) } else setColorFromString("any") this } override def onCollideWithPlayer(player: EntityPlayer): Unit = { if (Sburb.isServer) { (posX - player.posX, posZ - player.posZ) match { case (x, y) => if (sqrt(x*x+y*y) <= warpRadius) { val props = SburbProperties of player if (!props.serverMode.activated) Sburb.warpPlayer(player, targetDim, targetPos) } } } } override def writeSpawnData(buf: ByteBuf): Unit = { buf.writeFloat(color.r) buf.writeFloat(color.g) buf.writeFloat(color.b) } override def readSpawnData(buf: ByteBuf): Unit = { color.r = buf.readFloat color.g = buf.readFloat color.b = buf.readFloat } override def writeEntityToNBT(comp: NBTTagCompound): Unit = { comp.setInteger("targetDim", targetDim) comp.setInteger("targetPosX", targetPos.x) comp.setInteger("targetPosY", targetPos.y) comp.setInteger("targetPosZ", targetPos.z) comp.setFloat("colorR", color.r) comp.setFloat("colorG", color.g) comp.setFloat("colorB", color.b) } override def readEntityFromNBT(comp: NBTTagCompound): Unit = { targetDim = comp.getInteger("targetDim") targetPos.x = comp.getInteger("targetPosX") targetPos.y = comp.getInteger("targetPosY") targetPos.z = comp.getInteger("targetPosZ") color.r = comp.getFloat("colorR") color.g = comp.getFloat("colorG") color.b = comp.getFloat("colorB") } }
Resonious/mcsburb
src/main/scala/net/resonious/sburb/entities/Portal.scala
Scala
mit
3,799
/* * Copyright (c) 2012 Miles Sabin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package shapeless /** * Generic Zipper for any type with a representation via `Generic`. * * @author Miles Sabin */ case class Zipper[C, L <: HList, R <: HList, P](prefix : L, suffix : R, parent : P) { import Zipper._ import Nat._ type Self = Zipper[C, L, R, P] /** Move the cursor one place to the right. Available only if not already at the rightmost element. */ def right(implicit right : Right[Self]) : right.Out = right(this) /** Move the cursor one place to the left. Available only if not already at the leftmost element. */ def left(implicit left : Left[Self]) : left.Out = left(this) /** Moves the cursor to the leftmost position. */ def first(implicit first : First[Self]) : first.Out = first(this) /** Moves the cursor to the rightmost position. */ def last(implicit last : Last[Self]) : last.Out = last(this) /** Move the cursor ''n'' places to the right. Requires an explicit type argument. Available only if there are * ''n'' places to the right of the cursor. */ def rightBy[N <: Nat](implicit rightBy : RightBy[Self, N]) = rightBy(this) /** Move the cursor ''n'' places to the right. Available only if there are ''n'' places to the right of the cursor. */ def rightBy[N <: Nat](n : N)(implicit rightBy : RightBy[Self, N]) = rightBy(this) /** Move the cursor ''n'' places to the left. Requires an explicit type argument. Available only if there are * ''n'' places to the left of the cursor. */ def leftBy[N <: Nat](implicit leftBy : LeftBy[Self, N]) = leftBy(this) /** Move the cursor ''n'' places to the left. Available only if there are ''n'' places to the right of the cursor. */ def leftBy[N <: Nat](n : N)(implicit leftBy : LeftBy[Self, N]) = leftBy(this) /** Move the cursor to the first element of type `T` to the right. Available only if there is an element of type `T` * to the right of the cursor. */ def rightTo[T](implicit rightTo : RightTo[Self, T]) = rightTo(this) /** Move the cursor to the first element of type `T` to the left. Available only if there is an element of type `T` * to the left of the cursor. */ def leftTo[T](implicit leftTo : LeftTo[Self, T]) = leftTo(this) /** Moves the cursor up to the next level. The element at the new cursor position will be updated with the * reification of the current level. */ def up(implicit up : Up[Self]) : up.Out = up(this) /** Moves the cursor down to the next level, placing it at the first element on the left. Available only if the * element current at the cursor has a representation via `Generic`. */ def down(implicit down : Down[Self]) : down.Out = down(this) /** Moves the cursor to root of this Zipper. */ def root(implicit root : Root[Self]) : root.Out = root(this) /** Returns the element at the cursor. Available only if the underlying `HList` is non-empty. */ def get(implicit get : Get[Self]) : get.Out = get(this) /** Replaces the element at the cursor. Available only if the underlying `HList` is non-empty. */ def put[E](e : E)(implicit put : Put[Self, E]) : put.Out = put(this, e) /** Inserts a new element to the left of the cursor. */ def insert[E](e : E)(implicit insert : Insert[Self, E]) : insert.Out = insert(this, e) /** Removes the element at the cursor. Available only if the underlying `HList` is non-empty. */ def delete(implicit delete : Delete[Self]) : delete.Out = delete(this) /** Reifies the current level of this `Zipper`. */ def reify(implicit reify : Reify[Self]) : reify.Out = reify(this) } object Zipper { def apply[C, CL <: HList](c : C)(implicit gen : GenericAux[C, CL]) : Zipper[C, HNil, CL, None.type] = Zipper[C, HNil, CL, None.type](HNil, gen.to(c), None) /** Enhances values of any type with a representation via `Generic` with a method supporting conversion to a `Zipper`. */ class ToZipper[C](c : C) { def toZipper[CL <: HList](implicit gen : GenericAux[C, CL]) = Zipper(c) } implicit def toZipper[C](c : C) = new ToZipper(c) trait ZipperOp0[Z] { type Out def apply(z : Z) : Out } trait ZipperOp1[Z, T] { type Out def apply(z : Z, t : T) : Out } trait ZipperOp0Nat[Z, N <: Nat] { type Out def apply(z : Z) : Out } trait ZipperOp0T[Z, T] { type Out def apply(z : Z) : Out } trait Right[Z] extends ZipperOp0[Z] object Right { implicit def right[C, L <: HList, RH, RT <: HList, P] = new Right[Zipper[C, L, RH :: RT, P]] { type Out = Zipper[C, RH :: L, RT, P] def apply(z : Zipper[C, L, RH :: RT, P]) = Zipper(z.suffix.head :: z.prefix, z.suffix.tail, z.parent) } } trait Left[Z] extends ZipperOp0[Z] object Left { implicit def left[C, LH, LT <: HList, R <: HList, P] = new Left[Zipper[C, LH :: LT, R, P]] { type Out = Zipper[C, LT, LH :: R, P] def apply(z : Zipper[C, LH :: LT, R, P]) = Zipper(z.prefix.tail, z.prefix.head :: z.suffix, z.parent) } } trait First[Z] extends ZipperOp0[Z] object First { implicit def first[C, L <: HList, R <: HList, RP <: HList, P](implicit rp : ReversePrependAux[L, R, RP]) = new First[Zipper[C, L, R, P]] { type Out = Zipper[C, HNil, RP, P] def apply(z : Zipper[C, L, R, P]) = Zipper(HNil, z.prefix reverse_::: z.suffix, z.parent) } } trait Last[Z] extends ZipperOp0[Z] object Last { implicit def last[C, L <: HList, R <: HList, RP <: HList, P](implicit rp : ReversePrependAux[R, L, RP]) = new Last[Zipper[C, L, R, P]] { type Out = Zipper[C, RP, HNil, P] def apply(z : Zipper[C, L, R, P]) = Zipper(z.suffix reverse_::: z.prefix, HNil, z.parent) } } trait RightBy[Z, N <: Nat] extends ZipperOp0Nat[Z, N] object RightBy { import HList._ implicit def rightBy[C, L <: HList, R <: HList, P, N <: Nat, LP <: HList, RS <: HList] (implicit split : SplitAux[R, N, LP, RS], reverse : ReversePrepend[LP, L]) = new RightBy[Zipper[C, L, R, P], N] { type Out = Zipper[C, reverse.Out, RS, P] def apply(z : Zipper[C, L, R, P]) = { val (p, s) = z.suffix.split[N] Zipper(p reverse_::: z.prefix, s, z.parent) } } } trait LeftBy[Z, N <: Nat] extends ZipperOp0Nat[Z, N] object LeftBy { import HList._ implicit def leftBy[C, L <: HList, R <: HList, P, N <: Nat, RP <: HList, LS <: HList] (implicit split : SplitAux[L, N, RP, LS], reverse : ReversePrepend[RP, R]) = new LeftBy[Zipper[C, L, R, P], N] { type Out = Zipper[C, LS, reverse.Out, P] def apply(z : Zipper[C, L, R, P]) = { val (p, s) = z.prefix.split[N] Zipper(s, p reverse_::: z.suffix, z.parent) } } } trait RightTo[Z, T] extends ZipperOp0T[Z, T] object RightTo { import HList._ implicit def rightTo[C, L <: HList, R <: HList, P, T, LP <: HList, RS <: HList] (implicit split : SplitLeftAux[R, T, LP, RS], reverse : ReversePrepend[LP, L]) = new RightTo[Zipper[C, L, R, P], T] { type Out = Zipper[C, reverse.Out, RS, P] def apply(z : Zipper[C, L, R, P]) = { val (p, s) = z.suffix.splitLeft[T] Zipper(p reverse_::: z.prefix, s, z.parent) } } } trait LeftTo[Z, T] extends ZipperOp0T[Z, T] object LeftTo { import HList._ implicit def leftTo[C, L <: HList, R <: HList, P, T, RP <: HList, R0 <: HList] (implicit split : SplitLeftAux[L, T, RP, R0], reverse : ReversePrepend[RP, R], cons : IsHCons[R0]) = new LeftTo[Zipper[C, L, R, P], T] { type Out = Zipper[C, cons.T, cons.H :: reverse.Out, P] def apply(z : Zipper[C, L, R, P]) = { val (p, s) = z.prefix.splitLeft[T] Zipper(s.tail, s.head :: (p reverse_::: z.suffix), z.parent) } } } trait Up[Z] extends ZipperOp0[Z] object Up { implicit def up[C, L <: HList, R <: HList, P] (implicit rz : Reify[Zipper[C, L, R, Some[P]]] { type Out = C }, pp : Put[P, C]) = new Up[Zipper[C, L, R, Some[P]]] { type Out = pp.Out def apply(z : Zipper[C, L, R, Some[P]]) = pp(z.parent.get, z.reify) } } trait Down[Z] extends ZipperOp0[Z] object Down { implicit def down[C, L <: HList, RH, RT <: HList, P, RHL <: HList](implicit gen : GenericAux[RH, RHL]) = new Down[Zipper[C, L, RH :: RT, P]] { type Out = Zipper[RH, HNil, RHL, Some[Zipper[C, L, RH :: RT, P]]] def apply(z : Zipper[C, L, RH :: RT, P]) = Zipper(HNil, gen.to(z.suffix.head), Some(z)) } } trait Root[Z] extends ZipperOp0[Z] object Root extends { implicit def rootRoot[C, L <: HList, R <: HList] = new Root[Zipper[C, L, R, None.type]] { type Out = Zipper[C, L, R, None.type] def apply(z : Zipper[C, L, R, None.type]) = z } implicit def nonRootRoot[C, L <: HList, R <: HList, P, U] (implicit up : Up[Zipper[C, L, R, Some[P]]] { type Out = U }, pr : Root[U]) = new Root[Zipper[C, L, R, Some[P]]] { type Out = pr.Out def apply(z : Zipper[C, L, R, Some[P]]) = pr(z.up) } } trait Get[Z] extends ZipperOp0[Z] object Get { implicit def get[C, L <: HList, RH, RT <: HList, P] = new Get[Zipper[C, L, RH :: RT, P]] { type Out = RH def apply(z : Zipper[C, L, RH :: RT, P]) = z.suffix.head } } trait Put[Z, E] extends ZipperOp1[Z, E] trait LowPriorityPut { implicit def put[C, L <: HList, RH, RT <: HList, P, E, CL <: HList] (implicit gen : GenericAux[C, CL], rp : ReversePrependAux[L, E :: RT, CL]) = new Put[Zipper[C, L, RH :: RT, P], E] { type Out = Zipper[C, L, E :: RT, P] def apply(z : Zipper[C, L, RH :: RT, P], e : E) = Zipper(z.prefix, e :: z.suffix.tail, z.parent) } } object Put extends LowPriorityPut { implicit def hlistPut[C <: HList, L <: HList, RH, RT <: HList, P, E, CL <: HList] (implicit rp : ReversePrependAux[L, E :: RT, CL]) = new Put[Zipper[C, L, RH :: RT, P], E] { type Out = Zipper[CL, L, E :: RT, P] def apply(z : Zipper[C, L, RH :: RT, P], e : E) = Zipper(z.prefix, e :: z.suffix.tail, z.parent) } } trait Insert[Z, E] extends ZipperOp1[Z, E] object Insert { implicit def hlistInsert[C <: HList, L <: HList, R <: HList, P, E, CL <: HList] (implicit rp : ReversePrependAux[E :: L, R, CL]) = new Insert[Zipper[C, L, R, P], E] { type Out = Zipper[CL, E :: L, R, P] def apply(z : Zipper[C, L, R, P], e : E) = Zipper(e :: z.prefix, z.suffix, z.parent) } } trait Delete[Z] extends ZipperOp0[Z] object Delete { implicit def hlistDelete[C <: HList, L <: HList, RH, RT <: HList, P, CL <: HList] (implicit rp : ReversePrependAux[L, RT, CL]) = new Delete[Zipper[C, L, RH :: RT, P]] { type Out = Zipper[CL, L, RT, P] def apply(z : Zipper[C, L, RH :: RT, P]) = Zipper(z.prefix, z.suffix.tail, z.parent) } } trait Reify[Z] extends ZipperOp0[Z] object Reify { implicit def reify[C, L <: HList, R <: HList, P, CL <: HList] (implicit gen : GenericAux[C, CL], rp : ReversePrependAux[L, R, CL]) = new Reify[Zipper[C, L, R, P]] { type Out = C def apply(z : Zipper[C, L, R, P]) = gen.from(z.prefix reverse_::: z.suffix) } } }
non/shapeless
core/src/main/scala/shapeless/zipper.scala
Scala
apache-2.0
12,052
package com.arcusys.learn.models.Gradebook import org.joda.time.DateTime /** * Activity attempt model for response */ case class StatementAttemptResponse(id: Int, date: DateTime, grade: Int, userResponse: String, correctResponse: String)
ViLPy/Valamis
learn-portlet/src/main/scala/com/arcusys/learn/models/Gradebook/StatementAttemptResponse.scala
Scala
lgpl-3.0
250
package pl.writeonly.son2.impl import java.io.FileNotFoundException import pl.writeonly.son2.funs.streamers.StreamerPipeForeach import pl.writeonly.son2.jack.core.FormatsJack import pl.writeonly.son2.jack.glue.CreatorConverterJack import pl.writeonly.scalaops.specs.BlackSpec class StreamerYamlFeatureSpec extends BlackSpec { info("StreamerPipe with ProviderYaml") val given = () => new StreamerPipeForeach(CreatorConverterJack(FormatsJack.YAML).get) val outName = (name: String) => Features.outputPathname(Types.STREAMER, name, FormatsJack.YAML) feature(classOf[StreamerYamlFeatureSpec].getSimpleName) { scenario("Apply with null pathname") { Given("converter FileJson2Yaml") val streamer = given() val name: String = null When("should produce null when consume null") val caught = intercept[NullPointerException] { streamer.convertFile(name, name) } Then("null == messag") val message = caught.getMessage assert(null == message) } scenario("Apply with empty pathname") { Given("converter FileJson2Yaml") val streamer = given() When("should produce empty when consume empty") assertThrows[FileNotFoundException] { streamer.convertFile("", "") } } scenario("Apply with pathname") { Given("converter FileJson2Yaml") val streamer = given() val in = Features.inputPathname val out = outName("pathname") When("should produce null when consume null") streamer.convertFile(in, out) } scenario("Apply with uri") { Given("converter FileJson2Yaml") val streamer = given() val in = Features.inputURI val out = Features.toURI(outName("uri")) When("should produce null when consume null") streamer.convertFile(in, out) } scenario("Apply with streamer") { Given("converter FileJson2Yaml") val streamer = given() val in = Features.inputFile val out = Features.toFile(outName("streamer")) When("should produce out when consume in") streamer.convertFile(in, out) } } }
writeonly/son2
scallions-clis/scallions-main/src/test/scala/pl/writeonly/son2/impl/StreamerYamlFeatureSpec.scala
Scala
apache-2.0
2,136
package com.codahale.jersey.params import javax.ws.rs.WebApplicationException import javax.ws.rs.core.Response import javax.ws.rs.core.Response.Status /** * An abstract base class from which to build parameter classes. */ abstract class AbstractParam[A](val input: String) { val value: A = try { parse(input) } catch { case e => throw new WebApplicationException(onError(input, e)) } /** * Given a string representation, parse it and return an instance of the * parameter type. */ protected def parse(input: String): A /** * Given a string representation which was unable to be parsed and the * exception thrown, produce a Response to be sent to the client. * * By default, generates a 400 Bad Request with a plain text entity generated * by renderError. */ protected def onError(input: String, e: Throwable): Response = { Response.status(status).entity(renderError(input, e)).build } /** * Given a string representation which was unable to be parsed, produce a * Status for the Response to be sent to the client. */ protected def status: Response.Status = Status.BAD_REQUEST /** * Given a string representation which was unable to be parsed and the * exception thrown, produce a plain text entity to be sent to the client. */ protected def renderError(input: String, e: Throwable): String = { "Invalid parameter: %s (%s)".format(input, e.getMessage) } override def toString = value.toString }
codahale/jersey-scala
src/main/scala/com/codahale/jersey/params/AbstractParam.scala
Scala
mit
1,492
package com.github.akiomik.leap_scala import com.leapmotion.leap.Gesture.State object StaticState extends StaticState trait StaticState { val Invalid = State.STATE_INVALID val Start = State.STATE_START val Stop = State.STATE_STOP val Update = State.STATE_UPDATE }
akiomik/leap-scala
src/main/scala/com/github/akiomik/leap_scala/StaticState.scala
Scala
mit
282
package lila.chat import lila.user.User import chess.Color sealed trait Line { def text: String def author: String def deleted: Boolean def isSystem = author == systemUserId def isHuman = !isSystem def humanAuthor = isHuman option author } case class UserLine( username: String, text: String, troll: Boolean, deleted: Boolean) extends Line { def author = username def userId = User normalize username def delete = copy(deleted = true) def isVisible = !troll && !deleted } case class PlayerLine( color: Color, text: String) extends Line { def deleted = false def author = color.name } object Line { import lila.db.BSON import reactivemongo.bson.{ BSONHandler, BSONString } import org.apache.commons.lang3.StringEscapeUtils.unescapeHtml4 private val invalidLine = UserLine("", "[invalid character]", troll = false, deleted = true) def userLineBSONHandler(encoded: Boolean) = new BSONHandler[BSONString, UserLine] { def read(bsonStr: BSONString) = strToUserLine { if (encoded) unescapeHtml4(bsonStr.value) else bsonStr.value } | invalidLine def write(x: UserLine) = BSONString(userLineToStr(x)) } def lineBSONHandler(encoded: Boolean) = new BSONHandler[BSONString, Line] { def read(bsonStr: BSONString) = strToLine { if (encoded) unescapeHtml4(bsonStr.value) else bsonStr.value } | invalidLine def write(x: Line) = BSONString(lineToStr(x)) } private val UserLineRegex = """^([\\w-]{2,})(.)(.+)$""".r def strToUserLine(str: String): Option[UserLine] = str match { case UserLineRegex(username, " ", text) => UserLine(username, text, troll = false, deleted = false).some case UserLineRegex(username, "!", text) => UserLine(username, text, troll = true, deleted = false).some case UserLineRegex(username, "?", text) => UserLine(username, text, troll = false, deleted = true).some case _ => none } def userLineToStr(x: UserLine) = { val sep = if (x.troll) "!" else if (x.deleted) "?" else " " s"${x.username}$sep${x.text}" } def strToLine(str: String): Option[Line] = strToUserLine(str) orElse { str.headOption flatMap Color.apply map { color => PlayerLine(color, str drop 2) } } def lineToStr(x: Line) = x match { case u: UserLine => userLineToStr(u) case p: PlayerLine => s"${p.color.letter} ${p.text}" } }
clarkerubber/lila
modules/chat/src/main/Line.scala
Scala
agpl-3.0
2,422
package com.shorrockin.narrator /** * sent from the master to a slave to register the work which needs * to be performed. */ case class RegisterWork(source:(String, String, Int), target:Slave, workload:Seq[Workload]) /** * sent from the slave back to the master to indicate that it is ready * to start executing the workload. */ case class ReadyToStart(source:Slave) /** * sent from the master to the slave to indicate that we should start * executing all pending workloads. */ case class StartWork()
shorrockin/narrator
src/main/scala/Messages.scala
Scala
apache-2.0
512
package demo.components import japgolly.scalajs.react._ import scala.scalajs.js object CallbackDebug { trait Print[T] { def print(t: T): String } trait PrintLower { final implicit def PrintAny[T]: Print[T] = new Print[T]{ override def print(t: T): String = if (t == js.undefined) "undefined" else t.toString } } object Print extends PrintLower { def apply[T: Print](t: T): String = implicitly[Print[T]].print(t) implicit def PrintEvent[E <: ReactEvent]: Print[E] = new Print[E] { override def print(e: E): String = { val d = e.asInstanceOf[js.Dynamic] val u = js.undefined.asInstanceOf[js.Dynamic] val event = if (d.clipboardData != u) "ReactClipboardEvent" else if (d.data != u) "ReactCompositionEvent" else if (d.dataTransfer != u) "ReactDragEvent" else if (d.relatedTarget != u) "ReactFocusEvent" else if (d.locale != u) "ReactKeyboardEvent" else if (d.buttons != u) "ReactMouseEvent" else if (d.touches != u) "ReactTouchEvent" else if (d.detail != u) "ReactUIEvent" else if (d.deltaZ != u) "ReactWheelEvent" else "ReactEvent" val t = e.target.asInstanceOf[js.Dynamic] val target = if (t.value != u) "I" else if (t.offsetTop != u) "H" else "" s"$event$target: t.value: ${t.value}, t.offsetTop: ${t.offsetTop}" } } } private def base(name: String, params: String*): Callback = Callback.info(s"Event handler: $name(${params.mkString(", ")})") def f0(name: String): Callback = base(name) def f1[T1: Print](name: String): js.UndefOr[T1 => Callback] = (_1: T1) => base(name, Print(_1)) def f2[T1: Print, T2: Print](name: String): js.UndefOr[(T1, T2) => Callback] = (_1: T1, _2: T2) => base(name, Print(_1), Print(_2)) def f3[T1: Print, T2: Print, T3: Print](name: String): js.UndefOr[(T1, T2, T3) => Callback] = (_1: T1, _2: T2, _3: T3) => base(name, Print(_1), Print(_2), Print(_3)) }
elacin/scalajs-react-components
demo/src/main/scala/demo/components/CallbackDebug.scala
Scala
apache-2.0
2,235
/* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.hmrc.domain import play.api.libs.json.{Reads, Writes} case class SaUtr(utr: String) extends TaxIdentifier with SimpleName { override def toString = utr def value = utr val name = "sautr" } object SaUtr extends (String => SaUtr) { implicit val saUtrWrite: Writes[SaUtr] = new SimpleObjectWrites[SaUtr](_.value) implicit val saUtrRead: Reads[SaUtr] = new SimpleObjectReads[SaUtr]("utr", SaUtr.apply) }
hmrc/domain
src/main/scala/uk/gov/hmrc/domain/SaUtr.scala
Scala
apache-2.0
1,033
package mayton.primes import java.awt.Color import java.awt.image.BufferedImage import java.io.FileOutputStream import java.nio.file.Files import javax.imageio.ImageIO object PrimeSpace { def main(args: Array[String]): Unit = { val SIZE = 1024 val PIXEL = 4 val CYAN = 0x12c2e8 val RED = 0xd22d96 val image = new BufferedImage(SIZE, SIZE, BufferedImage.TYPE_INT_RGB) val g2d = image.createGraphics for(i <- 0 to SIZE / PIXEL) { for(j <- 0 to SIZE / PIXEL) { g2d.setColor(if (PrimeLib.mutuallyPrime(i,j)) Color.RED else Color.CYAN) g2d.fillRect(i * PIXEL, j * PIXEL, PIXEL, PIXEL) } } ImageIO.write( image, "PNG", new FileOutputStream(s"out/mutually-prime-${SIZE}x${SIZE}.png")) } }
Mark-Kovalyov/primegen-experiments
mayton/mtn-primelib/src/main/scala/mayton/primes/PrimeSpace.scala
Scala
gpl-3.0
780
package helpers import javax.inject.{Inject, Singleton} import play.api.Logger import twitter4j.{OEmbedRequest, Status, Twitter, TwitterFactory} import twitter4j.conf.ConfigurationBuilder class TwitterAdapter( consumerKey: String, secretKey: String, accessToken: String, accessTokenSecret: String, cacheDurationInMilli: Long = 5 * 60 * 1000, cache: Cache ) { private val twitter = new TwitterFactory( new ConfigurationBuilder() .setOAuthConsumerKey(consumerKey) .setOAuthConsumerSecret(secretKey) .setOAuthAccessToken(accessToken) .setOAuthAccessTokenSecret(accessTokenSecret) .build() ).getInstance() def getLatestTweet(screenName: String): () => Option[Status] = cache.mayBeCached[Option[Status]]( gen = () => { val z: java.util.Iterator[Status] = twitter.getUserTimeline(screenName).iterator if (z.hasNext) Some(z.next) else None }, expirationInMillis = Some(cacheDurationInMilli) ) def getLatestTweetEmbed( screenName: String, omitScript: Boolean = true, maxWidth: Option[Int] = None ): () => Option[(String, java.time.Instant)] = cache.mayBeCached[Option[(String, java.time.Instant)]]( gen = () => getLatestTweet(screenName)().map { st => val tweetId = st.getId val req = new OEmbedRequest(tweetId, "https://twitter.com/" + screenName + "/status/" + tweetId) req.setOmitScript(omitScript) req.setHideMedia(false) maxWidth.foreach { mw => req.setMaxWidth(mw) } twitter.getOEmbed(req).getHtml -> java.time.Instant.ofEpochMilli(st.getCreatedAt.getTime) }, expirationInMillis = Some(cacheDurationInMilli) ) } @Singleton class TwitterAdapterRepo @Inject() ( cache: Cache ) { val logger = Logger(getClass) def apply( consumerKey: String, secretKey: String, accessToken: String, accessTokenSecret: String, cacheDurationInMilli: Long = 5 * 60 * 1000 ) = new TwitterAdapter( consumerKey, secretKey, accessToken, accessTokenSecret, cacheDurationInMilli, cache ) }
ruimo/store2
app/helpers/TwitterAdapter.scala
Scala
apache-2.0
2,023
package com.softwaremill.codebrag.common.config import org.scalatest.FlatSpec import org.scalatest.matchers.ShouldMatchers import com.typesafe.config.{ConfigParseOptions, ConfigResolveOptions, ConfigFactory} class ConfigWithDefaultSpec extends FlatSpec with ShouldMatchers { case class Spec[T](path: String, default: T, expectedValue: T, get: (String, T) => T) val configName: String = "test.conf" val config = new ConfigWithDefault { def rootConfig = { ConfigFactory.load(configName, ConfigParseOptions.defaults.setAllowMissing(false), ConfigResolveOptions.defaults) } } val booleans = List( new Spec("codebrag.booleanTrue", false, true, config.getBoolean), new Spec("codebrag.booleanFalse", true, false, config.getBoolean), new Spec("codebrag.booleanNonExists", false, false, config.getBoolean), new Spec("codebrag.booleanNonExists", true, true, config.getBoolean) ) val ints = List( new Spec("codebrag.int10", 0, 10, config.getInt), new Spec("codebrag.int0", 10, 0, config.getInt), new Spec("codebrag.intNotExist", 10, 10, config.getInt) ) val strings = List( new Spec("codebrag.stringTest", "wrong", "test", config.getString), new Spec("codebrag.stringEmpty", "wrong", "", config.getString), new Spec("codebrag.stringNotExists", "defaultString", "defaultString", config.getString) ) val optionalStrings = List( new Spec("codebrag.optionalStringDefined", None, Some("defined"), config.getOptionalString), new Spec("codebrag.optionalStringUndefined", None, None, config.getOptionalString) ) for (spec <- booleans) { doTest(spec) } for (spec <- ints) { doTest(spec) } for (spec <- strings) { doTest(spec) } for (spec <- optionalStrings) { doTest(spec) } def doTest[T](spec: Spec[T]) { s"Value from from $configName (path:${spec.path})" should s"be ${spec.expectedValue} (with default as ${spec.default})" in { //given (spec) //when val actual = spec.get(spec.path, spec.default) //then actual should be(spec.expectedValue) } } }
softwaremill/codebrag
codebrag-common/src/test/scala/com/softwaremill/codebrag/common/config/ConfigWithDefaultSpec.scala
Scala
agpl-3.0
2,105
package com.twitter.zipkin.receiver.kafka import com.twitter.zipkin.thriftscala.{Span => ThriftSpan} import com.twitter.util.{Closable, CloseAwaitably, FuturePool, Future, Time} import java.util.concurrent.{TimeUnit, Executors} import java.util.Properties import kafka.consumer.{Consumer, ConsumerConfig, ConsumerConnector} import kafka.serializer.{Decoder, StringDecoder} object KafkaProcessor{ type KafkaDecoder = Decoder[Option[List[ThriftSpan]]] val defaultKeyDecoder = new StringDecoder() def apply[T]( topics:Map[String, Int], config: Properties, process: Seq[ThriftSpan] => Future[Unit], keyDecoder: Decoder[T], valueDecoder: KafkaDecoder ): KafkaProcessor[T] = new KafkaProcessor(topics, config, process, keyDecoder, valueDecoder) } class KafkaProcessor[T]( topics: Map[String, Int], config: Properties, process: Seq[ThriftSpan] => Future[Unit], keyDecoder: Decoder[T], valueDecoder: KafkaProcessor.KafkaDecoder ) extends Closable with CloseAwaitably { private[this] val processorPool = { val consumerConnector: ConsumerConnector = Consumer.create(new ConsumerConfig(config)) val threadCount = topics.foldLeft(0) { case (sum, (_, a)) => sum + a } val pool = Executors.newFixedThreadPool(threadCount) for { (topic, streams) <- consumerConnector.createMessageStreams(topics, keyDecoder, valueDecoder) stream <- streams } pool.submit(new KafkaStreamProcessor(stream, process)) pool } def close(deadline: Time): Future[Unit] = closeAwaitably { FuturePool.unboundedPool { processorPool.shutdown() processorPool.awaitTermination(deadline.inMilliseconds, TimeUnit.MILLISECONDS) } } }
travisbrown/zipkin
zipkin-receiver-kafka/src/main/scala/com/twitter/zipkin/receiver/kafka/KafkaProcessor.scala
Scala
apache-2.0
1,694
/* * Copyright (c) 2021, salesforce.com, inc. * All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause */ package com.krux.hyperion.client import scala.jdk.CollectionConverters._ import com.amazonaws.services.datapipeline.DataPipeline import com.amazonaws.services.datapipeline.model.{CreatePipelineRequest, InvalidRequestException, PipelineObject, PutPipelineDefinitionRequest, Tag} import org.slf4j.LoggerFactory import com.krux.hyperion.DataPipelineDefGroup import com.krux.hyperion.PipelineLifeCycle.Status import com.krux.stubborn.policy.ExponentialBackoffAndJitter import com.krux.stubborn.Retryable import scala.collection.compat.immutable.LazyList case class UploadPipelineObjectsTrans( client: DataPipeline, pipelineDef: DataPipelineDefGroup, override val maxRetry: Int ) extends Transaction[Option[Unit], AwsClientForId] with Retryable with ExponentialBackoffAndJitter { val log = LoggerFactory.getLogger(getClass) val parameterObjects = pipelineDef.toAwsParameters val keyObjectsMap = pipelineDef.toAwsPipelineObjects private def createAndUploadObjects(name: String, objects: Seq[PipelineObject]): Option[String] = { val pipelineId = client .createPipeline( new CreatePipelineRequest() .withUniqueId(name) .withName(name) .withTags( pipelineDef.tags.toSeq .map { case (k, v) => new Tag().withKey(k).withValue(v.getOrElse("")) } .asJava ) ) .retry() .getPipelineId log.info(s"Created pipeline $pipelineId ($name)") log.info(s"Uploading pipeline definition to $pipelineId") pipelineDef.pipelineLifeCycle.onCreated(pipelineId, name, Status.Success) try { val putDefinitionResult = client .putPipelineDefinition( new PutPipelineDefinitionRequest() .withPipelineId(pipelineId) .withPipelineObjects(objects.asJava) .withParameterObjects(parameterObjects.asJava) ) .retry() putDefinitionResult.getValidationErrors.asScala .flatMap(err => err.getErrors.asScala.map(detail => s"${err.getId}: $detail")) .foreach(log.error) putDefinitionResult.getValidationWarnings.asScala .flatMap(err => err.getWarnings.asScala.map(detail => s"${err.getId}: $detail")) .foreach(log.warn) if (putDefinitionResult.getErrored) { log.error(s"Failed to upload pipeline definition to pipeline $pipelineId") log.error(s"Deleting the just created pipeline $pipelineId") AwsClientForId(client, Set(pipelineId), maxRetry).deletePipelines() //Pipeline Creation Failed. Update pipelineLifeCycle. pipelineDef.pipelineLifeCycle.onUploaded(name, pipelineId, Status.Fail) None } else if (putDefinitionResult.getValidationErrors.isEmpty && putDefinitionResult.getValidationWarnings.isEmpty) { log.info("Successfully created pipeline") //Pipeline Created Successfully. Update pipelineLifeCycle. pipelineDef.pipelineLifeCycle.onUploaded(name, pipelineId, Status.Success) Option(pipelineId) } else { log.warn("Successful with warnings") //Pipeline Created with warnings. Update pipelineLifeCycle. pipelineDef.pipelineLifeCycle.onUploaded(name, pipelineId, Status.SuccessWithWarnings) Option(pipelineId) } } catch { case e: InvalidRequestException => log.error(s"InvalidRequestException (${e.getErrorCode}): ${e.getErrorMessage}") log.error("Deleting the just created pipeline") AwsClientForId(client, Set(pipelineId), maxRetry).deletePipelines() //Pipeline Creation Failed. Update pipelineLifeCycle. pipelineDef.pipelineLifeCycle.onUploaded(name, pipelineId, Status.Fail) None } } def action() = AwsClientForId( client, keyObjectsMap .to(LazyList) // there is no need to keep perform createAndUploadObojects if one failed .map { case (key, objects) => log.info(s"Creating pipeline and uploading ${objects.size} objects") createAndUploadObjects(pipelineDef.nameForKey(key), objects) } .takeWhile(_.nonEmpty) .flatten .toSet, maxRetry ) def validate(result: AwsClientForId) = result.pipelineIds.size == keyObjectsMap.size def rollback(result: AwsClientForId) = result.deletePipelines() }
realstraw/hyperion
core/src/main/scala/com/krux/hyperion/client/UploadPipelineObjectsTrans.scala
Scala
bsd-3-clause
4,546
package ui.shader.builder import ui.shader.builder.types.GlType import ui.shader.builder.value.{GlValue, GlVec4Val} class GlAssign[T <: GlType](val variable: GlVar[T], val expr: GlValue[T]) extends GlCommand { override def toGlsl: String = { variable.toGlsl + " = " + expr.toGlsl + ";" } } object GlAssign { def apply[T <: GlType](variable: GlVar[T], expr: GlValue[T]): GlAssign[T] = { new GlAssign[T](variable, expr) } def init[T <: GlType](variable: GlVar[T], expr: GlValue[T]): GlAssign[T] = { new GlAssignInit[T](variable, expr) } def incr[T <: GlType](variable: GlVar[T], expr: GlValue[T]): GlAssign[T] = { new GlAssignIncr[T](variable, expr) } def decr[T <: GlType](variable: GlVar[T], expr: GlValue[T]): GlAssign[T] = { new GlAssignDecr[T](variable, expr) } } class GlAssignInit[T <: GlType](variable: GlVar[T], expr: GlValue[T]) extends GlAssign[T](variable, expr) { override def toGlsl: String = { s"${variable.glType.toGlsl} ${variable.toGlsl} = ${expr.toGlsl};" } } class GlAssignIncr[T <: GlType](variable: GlVar[T], expr: GlValue[T]) extends GlAssign[T](variable, expr) { override def toGlsl: String = { variable.toGlsl + " += " + expr.toGlsl + ";" } } class GlAssignDecr[T <: GlType](variable: GlVar[T], expr: GlValue[T]) extends GlAssign[T](variable, expr) { override def toGlsl: String = { variable.toGlsl + " -= " + expr.toGlsl + ";" } }
gvatn/play-scalajs-webgl-spark
client/src/main/scala/ui/shader/builder/GlAssign.scala
Scala
mit
1,704
package japgolly.scalajs.react.internal.monocle import japgolly.scalajs.react.extra.{StateSnapshot, StateSnapshotF} import japgolly.scalajs.react.util.NotAllowed import scala.annotation.nowarn @nowarn("cat=unused") trait MonocleExtStateSnapshot { import MonocleExtStateSnapshot._ @inline final implicit def MonocleReactExt_StateSnapshotNR(x: StateSnapshot.type): ObjectWithoutReuse.type = ObjectWithoutReuse @inline final implicit def MonocleReactExt_StateSnapshotWR(x: StateSnapshot.withReuse.type): ObjectWithReuse.type = ObjectWithReuse @inline final implicit def MonocleReactExt_StateSnapshot[F[_], A[_], S](x: StateSnapshotF[F, A, S]): Instance[F, A, S] = new Instance(x) @inline final implicit def MonocleReactExt_StateSnapshotWR[F[_], A[_], S](x: StateSnapshotF.InstanceMethodsWithReuse[F, A, S]): InstanceWithReuse[F, A, S] = new InstanceWithReuse(x) } object MonocleExtStateSnapshot { object ObjectWithoutReuse { def zoomL[S, T](lens: monocle.Lens[S, T]) = StateSnapshot.zoom(lens.get)(lens.replace) } object ObjectWithReuse { def zoomL[S, T](lens: monocle.Lens[S, T]) = StateSnapshot.withReuse.zoom(lens.get)(lens.replace) } final class Instance[F[_], A[_], S](private val self: StateSnapshotF[F, A, S]) extends AnyVal { def setStateL[T](l: monocle.Lens[S, T]): T => F[Unit] = b => self.setState(l.replace(b)(self.value)) def modStateL[T](l: monocle.Lens[S, T])(f: T => T): F[Unit] = self.setState(l.modify(f)(self.value)) /** THIS WILL VOID REUSABILITY. * * The resulting `StateSnapshot[T]` will not be reusable. */ def xmapStateL[T](iso: monocle.Iso[S, T]): StateSnapshotF[F, A, T] = self.xmapState(iso.get)(iso.reverseGet) /** THIS WILL VOID REUSABILITY. * * The resulting `StateSnapshot[T]` will not be reusable. */ def zoomStateL[T](lens: monocle.Lens[S, T]): StateSnapshotF[F, A, T] = self.zoomState(lens.get)(lens.replace) /** THIS WILL VOID REUSABILITY. * * The resulting `StateSnapshot[T]` will not be reusable. */ def zoomStateO[T](o: monocle.Optional[S, T]): Option[StateSnapshotF[F, A, T]] = self.zoomStateOption(o.getOption)(o.replace) } final class InstanceWithReuse[F[_], A[_], S](private val self: StateSnapshotF.InstanceMethodsWithReuse[F, A, S]) extends AnyVal { @deprecated("This ability doesn't work. See https://github.com/japgolly/scalajs-react/issues/721 for an explanation, and https://japgolly.github.io/scalajs-react/#examples/state-snapshot-2 for the alternative.", "1.7.1") def xmapStateL(no: NotAllowed) = no.result @deprecated("This ability doesn't work. See https://github.com/japgolly/scalajs-react/issues/721 for an explanation, and https://japgolly.github.io/scalajs-react/#examples/state-snapshot-2 for the alternative.", "1.7.1") def zoomStateL(no: NotAllowed) = no.result @deprecated("This ability doesn't work. See https://github.com/japgolly/scalajs-react/issues/721 for an explanation, and https://japgolly.github.io/scalajs-react/#examples/state-snapshot-2 for the alternative.", "1.7.1") def zoomStateO(no: NotAllowed) = no.result } }
japgolly/scalajs-react
extraExtMonocle3/src/main/scala/japgolly/scalajs/react/internal/monocle/MonocleExtStateSnapshot.scala
Scala
apache-2.0
3,207
package com.nutomic.ensichat.core.messages.body import java.nio.ByteBuffer import com.nutomic.ensichat.core.routing.Address import com.nutomic.ensichat.core.util.BufferUtils private[core] object RouteRequest { val Type = 2 /** * Constructs [[RouteRequest]] instance from byte array. */ def read(array: Array[Byte]): RouteRequest = { val b = ByteBuffer.wrap(array) val requested = new Address(BufferUtils.getByteArray(b, Address.Length)) val origSeqNum = BufferUtils.getUnsignedShort(b) val originMetric = BufferUtils.getUnsignedShort(b) val targSeqNum = b.getInt() new RouteRequest(requested, origSeqNum, targSeqNum, originMetric) } } private[core] case class RouteRequest(requested: Address, originSeqNum: Int, targSeqNum: Int, originMetric: Int) extends MessageBody { override def protocolType = RouteRequest.Type override def contentType = -1 override def write: Array[Byte] = { val b = ByteBuffer.allocate(length) b.put(requested.bytes) BufferUtils.putUnsignedShort(b, originSeqNum) BufferUtils.putUnsignedShort(b, originMetric) b.putInt(targSeqNum) b.array() } override def length = 8 + Address.Length }
Nutomic/ensichat
core/src/main/scala/com/nutomic/ensichat/core/messages/body/RouteRequest.scala
Scala
mpl-2.0
1,196
object i0 { inline def i1[i2[-i3]](implicit i1: i4[Int]): Unit = { val i2: i2[i1] = new i0(0) def this(i3: String) = new i1(10 => i3.i3()) } }
som-snytt/dotty
tests/fuzzy/dea0cf7fd832a6f39963ddda6ffd89b336d18808.scala
Scala
apache-2.0
142
package org.loom.scene import org.loom.geometry._ import java.awt._ trait Drawable { def update(): Unit def draw (g2D: Graphics2D): Unit }
brogan/Loom
src/org/loom/scene/Drawable.scala
Scala
gpl-3.0
145
/* * Copyright 2014 Timothy Danford * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.bdgenomics.scheduling import java.util.UUID /** * This is a little tricky -- the Provider isn't an EventSource in its own right, * it's a component to another EventSource (in this case, the Scheduler). Therefore, * it provides routines for constructing Events corresponding to the user's decisions, * but it doesn't live as a first-class EventSource on its own. */ trait Provider { def createResource( history : EventHistory, params : Parameters, component : Component ) : Event def shutdownResource( history : EventHistory, params : Parameters, resource : Resource ) : Event } class CloudProvider extends Provider { def createId() : String = UUID.randomUUID().toString override def createResource(history: EventHistory, params: Parameters, component: Component): Event = { val startupTime = history.currentTime + params.sampleResourceStartupTime() StartupEvent(startupTime, Resource(createId(), component, startupTime)) } override def shutdownResource(history: EventHistory, params: Parameters, resource: Resource): Event = ShutdownEvent(history.currentTime, resource) }
tdanford/scheduling-simulator
scheduling-core/src/main/scala/org/bdgenomics/scheduling/Provider.scala
Scala
apache-2.0
1,721
package com.github.luzhuomi.regex.deriv.diagnosis import scala.collection.Map._ import com.github.luzhuomi.regex.deriv.RE._ import com.github.luzhuomi.regex.deriv.Common._ import com.github.luzhuomi.regex.deriv.Parse._ import com.github.luzhuomi.regex.deriv.diagnosis.Ambiguity._ object Universality { def allDerivs(sigma:List[Char],r:RE):List[RE] = { def go(sofar:List[RE],rs:List[RE]):List[RE] = rs match { case Nil => sofar case _ => { val rsp = nub(rs.flatMap((r:RE) => { for { l <- sigma } yield simp(deriv(r,l)) }).filter((r:RE) => !sofar.contains(r))) go(nub(sofar++rs),rsp) } } go(Nil,List(r)) } def universal(sigma:List[Char],r:RE)(implicit m:PosEps[RE]):Boolean = { allDerivs(ascii,r) forall(m.posEps(_)) } val ascii : List[Char] = (0 to 255).map(_.toChar).toList }
luzhuomi/scala-deriv
src/main/scala/com/github/luzhuomi/regex/deriv/diagnosis/Universality.scala
Scala
apache-2.0
838