code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package org.sisioh.aws4s.s3.model
import java.util.Date
import com.amazonaws.services.s3.model.{ MultipartUpload, Owner }
import org.sisioh.aws4s.PimpedType
object RichMultipartUploadFactory {
def create(): MultipartUpload = new MultipartUpload()
}
class RichMultipartUpload(val underlying: MultipartUpload)
extends AnyVal with PimpedType[MultipartUpload] {
def keyOpt: Option[String] = Option(underlying.getKey)
def keyOpt_=(value: Option[String]): Unit =
underlying.setKey(value.orNull)
// ---
def uploadIdOpt = Option(underlying.getUploadId)
def uploadIdOpt_=(value: Option[String]): Unit =
underlying.setUploadId(value.orNull)
// ---
def ownerOpt: Option[Owner] = Option(underlying.getOwner)
def ownerOpt_=(value: Option[Owner]): Unit =
underlying.setOwner(value.orNull)
// ---
def initiatorOpt: Option[Owner] = Option(underlying.getInitiator)
def initiatorOpt_=(value: Option[Owner]): Unit =
underlying.setInitiator(value.orNull)
// ---
def storageClassOpt: Option[String] = Option(underlying.getStorageClass)
def storageClassOpt_=(value: Option[String]): Unit =
underlying.setStorageClass(value.orNull)
// ---
def initiatedOpt: Option[Date] = Option(underlying.getInitiated)
def initiatedOpt_=(value: Option[Date]): Unit =
underlying.setInitiated(value.orNull)
}
|
everpeace/aws4s
|
aws4s-s3/src/main/scala/org/sisioh/aws4s/s3/model/RichMultipartUpload.scala
|
Scala
|
mit
| 1,357 |
package com.rasterfoundry.database
import com.rasterfoundry.datamodel._
import com.rasterfoundry.datamodel.{AuthFailure, PageRequest, Project, ToolRun}
import cats.Applicative
import cats.implicits._
import doobie._
import doobie.implicits._
import doobie.implicits.javasql._
import doobie.postgres.implicits._
import java.sql.Timestamp
import java.util.UUID
object MapTokenDao extends Dao[MapToken] {
val tableName = "map_tokens"
val selectF =
sql"""
SELECT
id, created_at, created_by, modified_at,
owner, name, project_id, toolrun_id
FROM
""" ++ tableF
def insert(
newMapToken: MapToken.Create,
user: User
): ConnectionIO[MapToken] = {
val id = UUID.randomUUID()
val now = new Timestamp(new java.util.Date().getTime())
val ownerId = util.Ownership.checkOwner(user, newMapToken.owner)
sql"""
INSERT INTO map_tokens
(id, created_at, created_by, modified_at, owner, name, project_id, toolrun_id)
VALUES
(${id}, ${now}, ${user.id}, ${now}, ${ownerId}, ${newMapToken.name},
${newMapToken.project}, ${newMapToken.toolRun})
""".update.withUniqueGeneratedKeys[MapToken](
"id",
"created_at",
"created_by",
"modified_at",
"owner",
"name",
"project_id",
"toolrun_id"
)
}
def authorize(
mapTokenId: UUID,
user: User,
actionType: ActionType
): ConnectionIO[Boolean] =
for {
mapTokenO <- MapTokenDao.query.filter(mapTokenId).selectOption
projAuthed = (
mapTokenO flatMap { _.project } map { (projectId: UUID) =>
{
ProjectDao.authorized(
user,
ObjectType.Project,
projectId,
actionType
)
}
}
).getOrElse(Applicative[ConnectionIO].pure(AuthFailure[Project]()))
toolRunAuthed = (
mapTokenO flatMap { _.toolRun } map { (toolRunId: UUID) =>
{
ToolRunDao.authorized(
user,
ObjectType.Analysis,
toolRunId,
actionType
)
}
}
).getOrElse(Applicative[ConnectionIO].pure(AuthFailure[ToolRun]()))
authTuple <- (projAuthed, toolRunAuthed).tupled
} yield { authTuple._1.toBoolean || authTuple._2.toBoolean }
def listAuthorizedMapTokens(
user: User,
mapTokenParams: CombinedMapTokenQueryParameters,
page: PageRequest
): ConnectionIO[PaginatedResponse[MapToken]] = {
val authedProjectsIO = ProjectDao.authQuery(user, ObjectType.Project).list
val authedAnalysesIO = ToolRunDao.authQuery(user, ObjectType.Analysis).list
for {
projAndAnalyses <- (authedProjectsIO, authedAnalysesIO).tupled
(authedProjects, authedAnalyses) = projAndAnalyses
projIdsF: Option[Fragment] = (authedProjects map { _.id }).toNel map {
Fragments.in(fr"project_id", _)
}
analysesIdsF: Option[Fragment] = (authedAnalyses map { _.id }).toNel map {
Fragments.in(fr"toolrun_id", _)
}
authFilterF: Fragment = projIdsF orElse analysesIdsF getOrElse Fragment.empty
mapTokens <- MapTokenDao.query
.filter(mapTokenParams)
.filter(authFilterF)
.page(page)
} yield { mapTokens }
}
def update(mapToken: MapToken, id: UUID): ConnectionIO[Int] = {
val updateTime = new Timestamp((new java.util.Date()).getTime)
val idFilter = fr"id = ${id}"
(sql"""
UPDATE map_tokens
SET
modified_at = ${updateTime},
owner = ${mapToken.owner},
name = ${mapToken.name},
project_id = ${mapToken.project},
toolrun_id = ${mapToken.toolRun}
""" ++ Fragments.whereAndOpt(Some(idFilter))).update.run
}
def create(
user: User,
owner: Option[String],
name: String,
project: Option[UUID],
toolRun: Option[UUID]
): ConnectionIO[MapToken] = {
val ownerId = util.Ownership.checkOwner(user, owner)
val newMapToken = MapToken.Create(name, project, toolRun, Some(ownerId))
insert(newMapToken, user)
}
def checkProject(
projectId: UUID
)(mapToken: UUID): ConnectionIO[Option[MapToken]] =
query
.filter(fr"project_id=${projectId}")
.filter(fr"id=${mapToken}")
.selectOption
def checkAnalysis(
analysisId: UUID
)(mapToken: UUID): ConnectionIO[Option[MapToken]] =
query
.filter(fr"toolrun_id=${analysisId}")
.filter(fr"id=${mapToken}")
.selectOption
}
|
raster-foundry/raster-foundry
|
app-backend/db/src/main/scala/MapTokenDao.scala
|
Scala
|
apache-2.0
| 4,558 |
/*
* Copyright (C) 2012 The Regents of The University California.
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shark.memstore2.column
import java.nio.ByteBuffer
import java.nio.ByteOrder
/**
* Reads a nullable column. Expects the byte buffer to contain as first element
* the null count, followed by the null indices, and finally the non nulls.
* Reading of non nulls is delegated by setting the buffer position to the first
* non null.
*/
class NullableColumnIterator(buffer: ByteBuffer) extends ColumnIterator {
private var _d: ByteBuffer = _
private var _nullCount: Int = _
private var _nulls = 0
private var _isNull = false
private var _currentNullIndex: Int = _
private var _pos = 0
private var _delegate: ColumnIterator = _
override def init() {
_d = buffer.duplicate()
_d.order(ByteOrder.nativeOrder())
_nullCount = _d.getInt()
_currentNullIndex = if (_nullCount > 0) _d.getInt() else Integer.MAX_VALUE
_pos = 0
// Move the buffer position to the non-null region.
buffer.position(buffer.position() + 4 + _nullCount * 4)
_delegate = ColumnIterator.newNonNullIterator(buffer)
}
override def next() {
if (_pos == _currentNullIndex) {
_nulls += 1
if (_nulls < _nullCount) {
_currentNullIndex = _d.getInt()
}
_isNull = true
} else {
_isNull = false
_delegate.next()
}
_pos += 1
}
override def hasNext: Boolean = (_nulls < _nullCount) || _delegate.hasNext
def current: Object = if (_isNull) null else _delegate.current
}
|
mattyb149/blinkdb
|
src/main/scala/shark/memstore2/column/NullableColumnIterator.scala
|
Scala
|
apache-2.0
| 2,106 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.rethink.config
import com.datamountaineer.kcql.WriteModeEnum
import com.datamountaineer.streamreactor.connect.rethink.TestBase
import org.mockito.MockitoSugar
import scala.collection.JavaConverters._
/**
* Created by [email protected] on 21/06/16.
* stream-reactor-maven
*/
class TestReThinkSinkSettings extends TestBase with MockitoSugar {
"should create a RethinkSetting for INSERT with all fields" in {
val config = ReThinkSinkConfig(getProps)
val settings = ReThinkSinkSettings(config)
val routes = settings.kcql.head
routes.getSource shouldBe TOPIC
routes.getTarget shouldBe TABLE
routes.getWriteMode shouldBe WriteModeEnum.INSERT
val conflict = settings.conflictPolicy(TABLE)
conflict shouldBe ReThinkConfigConstants.CONFLICT_ERROR
routes.getIgnoredFields.isEmpty shouldBe true
}
"should create a RethinkSetting for UPSERT with fields selection with RETRY" in {
val config = ReThinkSinkConfig(getPropsUpsertSelectRetry)
val settings = ReThinkSinkSettings(config)
val routes = settings.kcql.head
routes.getSource shouldBe TOPIC
routes.getTarget shouldBe TABLE
routes.getWriteMode shouldBe WriteModeEnum.UPSERT
val conflict = settings.conflictPolicy(TABLE)
conflict shouldBe ReThinkConfigConstants.CONFLICT_REPLACE
routes.getIgnoredFields.isEmpty shouldBe true
val fields = routes.getFields.asScala.toList
fields.size shouldBe 2
}
}
|
datamountaineer/stream-reactor
|
kafka-connect-rethink/src/test/scala/com/datamountaineer/streamreactor/connect/rethink/config/TestReThinkSinkSettings.scala
|
Scala
|
apache-2.0
| 2,092 |
/*
* =========================================================================================
* Copyright © 2013-2017 the kamon project <http://kamon.io/>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
* =========================================================================================
*/
package kamon.netty
import io.netty.bootstrap.ServerBootstrap
import io.netty.buffer.{ByteBuf, Unpooled}
import io.netty.channel.epoll.{EpollEventLoopGroup, EpollServerSocketChannel}
import io.netty.channel.nio.NioEventLoopGroup
import io.netty.channel.socket.SocketChannel
import io.netty.channel.socket.nio.NioServerSocketChannel
import io.netty.channel.{ChannelFutureListener, _}
import io.netty.handler.codec.http._
import io.netty.handler.stream.ChunkedWriteHandler
import io.netty.util.CharsetUtil
class NioEventLoopBasedServer(port: Int) {
val bossGroup = new NioEventLoopGroup(1)
val workerGroup = new NioEventLoopGroup
val b = new ServerBootstrap
b.group(bossGroup, workerGroup)
.channel(classOf[NioServerSocketChannel])
.childHandler(new HttpServerInitializer)
val channel: Channel = b.bind(port).sync.channel
def close(): Unit = {
channel.close
bossGroup.shutdownGracefully()
workerGroup.shutdownGracefully()
}
}
class EpollEventLoopBasedServer(port: Int) {
val bossGroup = new EpollEventLoopGroup(1)
val workerGroup = new EpollEventLoopGroup
val b = new ServerBootstrap
b.group(bossGroup, workerGroup)
.channel(classOf[EpollServerSocketChannel])
.childHandler(new HttpServerInitializer)
val channel: Channel = b.bind(port).sync.channel
def close(): Unit = {
channel.close
bossGroup.shutdownGracefully()
workerGroup.shutdownGracefully()
}
}
object Servers {
def withNioServer[A](port:Int = 9001)(thunk: Int => A): A = {
val server = new NioEventLoopBasedServer(port)
try thunk(port) finally server.close()
}
def withEpollServer[A](port:Int = 9001)(thunk: Int => A): A = {
val server = new EpollEventLoopBasedServer(port)
try thunk(port) finally server.close()
}
}
private class HttpServerInitializer extends ChannelInitializer[SocketChannel] {
override def initChannel(ch: SocketChannel): Unit = {
val p = ch.pipeline
p.addLast(new HttpRequestDecoder(4096, 8192, 8192))
p.addLast(new HttpResponseEncoder())
p.addLast(new ChunkedWriteHandler)
p.addLast(new HttpServerHandler)
}
}
private class HttpServerHandler extends ChannelInboundHandlerAdapter {
private val ContentOk = Array[Byte]('H', 'e', 'l', 'l', 'o', ' ', 'W', 'o', 'r', 'l', 'd')
private val ContentError = Array[Byte]('E', 'r', 'r', 'o', 'r')
override def channelRead(ctx: ChannelHandlerContext, msg: scala.Any): Unit = {
if (msg.isInstanceOf[HttpRequest]) {
val request = msg.asInstanceOf[HttpRequest]
val isKeepAlive = HttpHeaders.isKeepAlive(request)
if (request.getUri.contains("/error")) {
val response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.INTERNAL_SERVER_ERROR, Unpooled.wrappedBuffer(ContentError))
response.headers.set("Content-Type", "text/plain")
response.headers.set("Content-Length", response.content.readableBytes)
val channelFuture = ctx.write(response)
addCloseListener(isKeepAlive)(channelFuture)
} else if (request.getUri.contains("/fetch-in-chunks")) {
val response = new DefaultHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK)
HttpHeaders.setTransferEncodingChunked(response)
response.headers.set("Content-Type", "text/plain")
ctx.write(response)
.addListener(new ChannelFutureListener {
override def operationComplete(cf: ChannelFuture): Unit =
writeChunk(cf.channel()).addListener(new ChannelFutureListener {
override def operationComplete(cf: ChannelFuture): Unit =
writeChunk(cf.channel()).addListener(new ChannelFutureListener {
override def operationComplete(cf: ChannelFuture) =
writeChunk(cf.channel()).addListener(new ChannelFutureListener {
override def operationComplete(cf: ChannelFuture) =
(writeLastContent _).andThen(addCloseListener(isKeepAlive))(cf.channel())})})})})
} else {
val response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK, Unpooled.wrappedBuffer(ContentOk))
response.headers.set("Content-Type", "text/plain")
response.headers.set("Content-Length", response.content.readableBytes)
val channelFuture = ctx.write(response)
addCloseListener(isKeepAlive)(channelFuture)
}
}
}
override def channelReadComplete(ctx: ChannelHandlerContext): Unit =
ctx.flush()
override def exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable): Unit =
ctx.close()
private def writeChunk(channel: Channel, content: ByteBuf = Unpooled.wrappedBuffer(ContentOk)): ChannelFuture = {
channel.writeAndFlush(new DefaultHttpContent(Unpooled.copiedBuffer("chunkkkkkkkkkkkkk", CharsetUtil.UTF_8)))
}
private def writeLastContent(channel: Channel): ChannelFuture = {
channel.writeAndFlush(new DefaultLastHttpContent(Unpooled.EMPTY_BUFFER))
}
private def addCloseListener(isKeepAlive: Boolean)(f: ChannelFuture): Unit = {
if (!isKeepAlive) f.addListener(ChannelFutureListener.CLOSE)
}
}
|
kamon-io/kamon-netty
|
src/test/scala/kamon/netty/Servers.scala
|
Scala
|
apache-2.0
| 5,962 |
package org.jetbrains.plugins.scala.failed.annotator
import org.jetbrains.plugins.scala.PerfCycleTests
import org.jetbrains.plugins.scala.base.ScalaLightCodeInsightFixtureTestAdapter
import org.junit.experimental.categories.Category
/**
* @author Roman.Shein
* @since 28.03.2016.
*/
@Category(Array(classOf[PerfCycleTests]))
class OverridingAnnotatorTest2 extends ScalaLightCodeInsightFixtureTestAdapter {
//TODO: the issue does not reproduce when test is performed using OverridingAnnotatorTest
def testSCL3807(): Unit = {
checkTextHasNoErrors(
"""
|trait A {
| def foo(f: (=> A) => Int) = {_: A => 42}
|}
|
|object A extends A{
| def foo(f: (A) => Int) = null
|}
""".stripMargin)
}
def testScl6729(): Unit = {
checkTextHasNoErrors(
"""
|trait Foo
|
|trait Bar {
| def foo: Foo = _
|}
|
|trait Baz extends Bar {
| override object foo extends Foo
|}
""".stripMargin)
}
val START = ScalaLightCodeInsightFixtureTestAdapter.SELECTION_START
val END = ScalaLightCodeInsightFixtureTestAdapter.SELECTION_END
def testScl7536() {
checkTextHasError(
s"""
|class Abs(var name: String){ } |
|class AbsImpl(${START}override${END} var name: String) extends Abs(name){ }
""".stripMargin, "overriding variable name in class Abs of type String")
}
}
|
katejim/intellij-scala
|
test/org/jetbrains/plugins/scala/failed/annotator/OverridingAnnotatorTest2.scala
|
Scala
|
apache-2.0
| 1,471 |
package com.mesosphere.cosmos.handler
import com.mesosphere.cosmos.error.CosmosException
import com.mesosphere.cosmos.handler.ServiceUpdateHandler._
import com.mesosphere.cosmos.rpc
import io.circe.syntax._
import org.scalatest.FreeSpec
import org.scalatest.Matchers
class ServiceUpdateHandlerSpec extends FreeSpec with Matchers {
import ServiceUpdateHandlerSpec._
"The mergeStoredAndProvided() method" - {
"should always return provided when replace is true" in {
val replace = true
val provided = configA
mergeStoredAndProvided(None, provided, replace) shouldBe provided
mergeStoredAndProvided(configB, provided, replace) shouldBe provided
mergeStoredAndProvided(configC, provided, replace) shouldBe provided
}
"should return the merge of stored and provided when" +
" replace is false, and stored is present" in {
val replace = false
val stored = configA
mergeStoredAndProvided(stored, None, replace) shouldBe stored
mergeStoredAndProvided(stored, configB, replace) shouldBe configAB
mergeStoredAndProvided(stored, configC, replace) shouldBe configAC
}
"should throw an error when replace is false and there are no stored options" in {
val replace = false
val stored = None
val errorType = "OptionsNotStored"
val error1 = rpc.v1.model.ErrorResponse(
intercept[CosmosException](
mergeStoredAndProvided(stored, None, replace)
).error
)
error1.`type` shouldBe errorType
val error2 = rpc.v1.model.ErrorResponse(
intercept[CosmosException](
mergeStoredAndProvided(stored, configA, replace)
).error
)
error2.`type` shouldBe errorType
}
}
}
object ServiceUpdateHandlerSpec {
// scalastyle:off multiple.string.literals
val configA = Map(
"a" -> "aa-val",
"b" -> "ab-val",
"c" -> "ac-val"
).asJson.asObject
val configB = Map(
"d" -> "bd-val",
"e" -> "be-val"
).asJson.asObject
val configC = Map(
"c" -> "cc-val",
"d" -> "cd-val",
"e" -> "ce-val"
).asJson.asObject
val configAB =
Map(
"a" -> "aa-val",
"b" -> "ab-val",
"c" -> "ac-val",
"d" -> "bd-val",
"e" -> "be-val"
).asJson.asObject
val configAC =
Map(
"a" -> "aa-val",
"b" -> "ab-val",
"c" -> "cc-val",
"d" -> "cd-val",
"e" -> "ce-val"
).asJson.asObject
// scalastyle:on multiple.string.literals
}
|
dcos/cosmos
|
cosmos-test-common/src/test/scala/com/mesosphere/cosmos/handler/ServiceUpdateHandlerSpec.scala
|
Scala
|
apache-2.0
| 2,490 |
/**
* Angles
* Copyright (C) 2014 Sebastian Schelter
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package io.ssc.angles.pipeline
import java.io.StringReader
import io.ssc.angles.pipeline.data.Storage
import io.ssc.angles.pipeline.http.MetadataExtractor
import org.apache.commons.lang.StringEscapeUtils
import org.htmlcleaner.HtmlCleaner
import org.joda.time.DateTime
import org.slf4j.LoggerFactory
import scala.collection.mutable
class ExtractMetadata extends Step {
val log = LoggerFactory.getLogger(classOf[ExtractMetadata])
val cleaner = new HtmlCleaner
val cleanerProperties = cleaner.getProperties
cleanerProperties.setTranslateSpecialEntities(true)
cleanerProperties.setAdvancedXmlEscape(true)
cleanerProperties.setTransResCharsToNCR(true)
cleanerProperties.setTransSpecialEntitiesToNCR(true)
cleanerProperties.setAllowHtmlInsideAttributes(false)
cleanerProperties.setKeepWhitespaceAndCommentsInHead(false)
cleanerProperties.setOmitCdataOutsideScriptAndStyle(true)
cleanerProperties.setOmitComments(true)
cleanerProperties.setIgnoreQuestAndExclam(true)
cleanerProperties.setRecognizeUnicodeChars(true)
cleanerProperties.setOmitComments(true)
override def execute(since: DateTime): Unit = {
log.info("Extracting metadata ...")
val websites = Storage.crawledWebsites(since)
//TODO respect noindex nosnippet none noarchive noimageindex unavailable_after: [RFC-850 date/time] in meta robots
websites
.filter { website =>
log.debug("Looking at metadata for {}", website.realUri)
Storage.metadataFor(website.id).isEmpty
}
.foreach { website =>
log.info("Extracting metadata from {}", website.realUri)
val attributes = mutable.Map[String, List[String]]()
//TODO Charset?
val reader = new StringReader(website.html)
val root = cleaner.clean(reader)
attributes += "uri" -> List(website.realUri)
val extractor = new MetadataExtractor(root)
attributes += "title" -> extractor.title
attributes += "meta-author" -> extractor.metaName("author")
attributes += "meta-date" -> extractor.metaName("date")
attributes += "meta-content-language" -> extractor.metaName("Content-Language")
attributes += "meta-fulltitle" -> extractor.metaName("fulltitle")
attributes += "meta-description" -> extractor.metaName("description")
attributes += "meta-publisher" -> extractor.metaName("publisher")
attributes += "meta-keywords" -> extractor.metaKeywords
attributes += "meta-og:url" -> extractor.metaProperty("og:url")
attributes += "meta-og:type" -> extractor.metaProperty("og:type")
attributes += "meta-og:title" -> extractor.metaProperty("og:title")
attributes += "meta-og:description" -> extractor.metaProperty("og:description")
attributes += "meta-og:sitename" -> extractor.metaProperty("og:site_name")
attributes += "meta-og:image" -> extractor.metaProperty("og:image")
attributes += "meta-og:image:width" -> extractor.metaProperty("og:image:width")
attributes += "meta-og:image:height" -> extractor.metaProperty("og:image:height")
attributes += "meta-og:locale" -> extractor.metaProperty("og:locale")
attributes += "meta-website:published-time" -> extractor.metaProperty("website:published_time")
attributes += "meta-website:modified-time" -> extractor.metaProperty("website:modified_time")
attributes += "meta-website:author" -> extractor.metaProperty("website:author")
attributes += "meta-website:publisher" -> extractor.metaProperty("website:publisher")
attributes += "meta-website:section" -> extractor.metaProperty("website:section")
attributes += "meta-website:tag" -> extractor.metaProperty("website:tag")
attributes += "meta-dc:title" -> extractor.metaName("DC.title")
attributes += "meta-dc:description" -> extractor.metaName("DC.description")
attributes += "meta-dc:creator" -> extractor.metaName("DC.creator")
attributes += "meta-dc:date" -> extractor.metaName("DC.Date")
attributes += "meta-dc:subject" -> extractor.metaName("DC.Subject")
attributes += "meta-twitter:title" -> extractor.metaName("twitter:title")
attributes += "meta-twitter:description" -> extractor.metaName("twitter:description")
attributes += "meta-twitter:url" -> extractor.metaName("twitter:url")
attributes += "meta-twitter:image" -> extractor.metaName("twitter:image")
attributes += "meta-twitter:image:width" -> extractor.metaName("twitter:image:width")
attributes += "meta-twitter:image:height" -> extractor.metaName("twitter:image:height")
attributes += "meta-twitter:creator" -> extractor.metaName("twitter:creator")
attributes += "meta-twitter:creator:id" -> extractor.metaName("twitter:creator:id")
attributes += "meta-vr:published" -> extractor.metaProperty("vr:published")
attributes += "meta-vr:author" -> extractor.metaProperty("vr:author")
attributes += "meta-vr:type" -> extractor.metaProperty("vr:type")
attributes += "meta-vr:category" -> extractor.metaProperty("vr:category")
attributes += "meta-itemprob:name" -> extractor.metaItemProb("name")
attributes += "meta-itemprob:description" -> extractor.metaItemProb("description")
//TODO analyze <link rel="profile" href="http://gmpg.org/xfn/11" />?
/*
<meta charset="UTF-8">
<html lang="en">
<link rel="canonical" href="http://www.sueddeutsche.de/medien/fuehrungsschwaeche-in-magazinverlagen-schnappatmung-1.2108507">
<link rel='shortlink' href='http://blog.wawzyniak.de/?p=8420' />
<base href="https://www.freitag.de/autoren/felix-werdermann/neue-gruene-asylpolitik" />
<link rel="alternate" title="sueddeutsche.de Medien RSS Feed" href="http://rss.sueddeutsche.de/rss/Medien" type="application/rss+xml">
<link rel="alternate" title="sueddeutsche.de RSS Feed" href="http://rssfeed.sueddeutsche.de/c/795/f/449002/index.rss" type="application/rss+xml">
<link rel="alternate" title="Süddeutsche.de Der Spiegel RSS Feed" href="http://rss.sueddeutsche.de/rss/Thema/Der+Spiegel" type="application/rss+xml">
<link rel="image_src" href="http://twitpic.com/show/thumb/eaztf0.jpg" />
*/
val existingAttributes = attributes.filter { case (key, values) => !values.isEmpty }
.map { case (key, values) => key -> values.map { clean(_) } }
val sortedKeys = existingAttributes.keys.toSeq.sorted
for (key <- sortedKeys) {
log.info("{}", Array(key.toUpperCase, existingAttributes(key).mkString(" - ")))
log.info("Saving metadata for website {}", website.id)
for (attribute <- existingAttributes(key)) {
Storage.saveMetadata(website.id, key, attribute)
}
}
log.info("")
}
}
def clean(str: String) = {
StringEscapeUtils.unescapeHtml(str.trim)
}
}
|
nwolber/angles
|
src/main/scala/io/ssc/angles/pipeline/ExtractMetadata.scala
|
Scala
|
gpl-3.0
| 7,698 |
/**
* Copyright (c) 2016 Bernard Leach
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.github.leachbj.newrelic.akka.http.scaladsl
import akka.http.scaladsl.model.HttpResponse
import com.newrelic.api.agent.{HeaderType, InboundHeaders}
case class InboundHttpHeaders(originalResponse: HttpResponse) extends InboundHeaders {
def getHeader(name: String): String = originalResponse.headers.find(_.is(name.toLowerCase)).map(_.value).orNull
def getHeaderType: HeaderType = HeaderType.HTTP
}
|
leachbj/akka-http-newrelic
|
src/main/scala/com/github/leachbj/newrelic/akka/http/scaladsl/InboundHttpHeaders.scala
|
Scala
|
mit
| 1,534 |
/*
* Copyright 2016 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* The main goal here is to prevent overlapping links
* that we see in our own code (i.e. NodeKind.scala)
* Currenttly at "Abs", we see 1 definition: good
* and 5 references, not so good
* i.e. For this definition:
* case object Abs extends NodeKind("abs")
* we create references to these 5 methods:
* "java.lang.IndexOutOfBoundsException.<init>"
* "scala.Any.toString"
* "scala.runtime.ScalaRunTime.typedProductIterator"
* "scala.runtime.ScalaRunTime"
* "com.google.devtools.codeindex.indexers.scala.NodeKind.Abs"
*
* At Kind, we see 2 anchors, one ref for Kind and one decl for Kind.<init>
* We want to ensure there is no <init> method
*
* I think it is okay to callgraph entries, but we don't want to have links
* for people to click on.
*/
//- @Kind defines/binding KindClass
sealed abstract class Kind(val kind: String)
/**
* Okay, this verifier dsl is so magical, its hard to
* read to figure out what it is trying to say.
* see //third_party/kythe/kythe/cxx/indexer/cxx/testdata/rec/rec_implicit_anchors.cc
*
* We are verifying that the symbol abs
* defines a new class.
* But more importantly, we are ensuring that all the references
* to methods inside the generated functions don't show up as anchors.
*
* At the string "Kind" in "extends Kinds", we ensure there is an anchor
* that refers to an earlier binding.
* We also ensure that at "kind", there are no anchors for new bindings.
*
* */
object Kind {
//- @Kind ref KindClass
//- [email protected]/kind anchor
//- SingleRefAnchor.loc/start @^K
//- SingleRefAnchor.loc/end @$Kind
case object Abs extends Kind("abs")
}
|
kythe/kythe-contrib
|
kythe/scala/com/google/devtools/kythe/analyzers/scala/testdata/verified/object_extension.scala
|
Scala
|
apache-2.0
| 2,263 |
// Project: Default (Template) Project
// Module:
// Description:
// Copyright (c) 2016. Distributed under the MIT License (see included LICENSE file).
package routing
import angulate2.std._
import angulate2.ext.tags.simple._
import scalatags.Text.all._
@Component(
template = tpl(
h2("Page not found")
)
)
class PageNotFoundComponent {
}
|
jokade/angulate2-examples
|
routing/src/main/scala/routing/PageNotFoundComponent.scala
|
Scala
|
mit
| 360 |
package controllers
import Common.PrototypeHtml
import helpers.disposal_of_vehicle.CookieFactoryForUnitSpecs
import helpers.{UnitSpec, TestWithApplication}
import org.mockito.Mockito.when
import pages.disposal_of_vehicle.{BeforeYouStartPage, SetupTradeDetailsPage, VehicleLookupPage}
import play.api.test.FakeRequest
import play.api.test.Helpers.{LOCATION, OK, contentAsString, defaultAwaitTimeout}
import uk.gov.dvla.vehicles.presentation.common.clientsidesession.ClientSideSessionFactory
import utils.helpers.Config
class VehicleLookupFailureUnitSpec extends UnitSpec {
"present" should {
"display the page" in new TestWithApplication {
whenReady(present) { r =>
r.header.status should equal(OK)
}
}
"redirect to setuptraderdetails on if traderDetailsModel is not in cache" in new TestWithApplication {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.bruteForcePreventionViewModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupFormModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupResponse())
val result = vehicleLookupFailure.present(request)
whenReady(result) { r =>
r.header.headers.get(LOCATION) should equal(Some(SetupTradeDetailsPage.address))
}
}
"redirect to setuptraderdetails on if bruteForcePreventionViewModel is not in cache" in new TestWithApplication {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.traderDetailsModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupFormModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupResponse())
val result = vehicleLookupFailure.present(request)
whenReady(result) { r =>
r.header.headers.get(LOCATION) should equal(Some(SetupTradeDetailsPage.address))
}
}
"redirect to setuptraderdetails on if VehicleLookupFormModelCache is not in cache" in new TestWithApplication {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.traderDetailsModel())
.withCookies(CookieFactoryForUnitSpecs.bruteForcePreventionViewModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupResponse())
val result = vehicleLookupFailure.present(request)
whenReady(result) { r =>
r.header.headers.get(LOCATION) should equal(Some(SetupTradeDetailsPage.address))
}
}
"display prototype message when config set to true" in new TestWithApplication {
contentAsString(present) should include(PrototypeHtml)
}
"not display prototype message when config set to false" in new TestWithApplication {
val request = FakeRequest()
implicit val clientSideSessionFactory = injector.getInstance(classOf[ClientSideSessionFactory])
implicit val config: Config = mock[Config]
when(config.isPrototypeBannerVisible).thenReturn(false) // Stub this config value.
val vehicleLookupFailurePrototypeNotVisible = new VehicleLookupFailure()
val result = vehicleLookupFailurePrototypeNotVisible.present(request)
contentAsString(result) should not include PrototypeHtml
}
}
"submit" should {
"redirect to vehiclelookup on submit" in new TestWithApplication {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.traderDetailsModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupFormModel())
val result = vehicleLookupFailure.submit(request)
whenReady(result) { r =>
r.header.headers.get(LOCATION) should equal(Some(VehicleLookupPage.address))
}
}
"redirect to setuptraderdetails on submit when cache is empty" in new TestWithApplication {
val request = FakeRequest()
val result = vehicleLookupFailure.submit(request)
whenReady(result) { r =>
r.header.headers.get(LOCATION) should equal(Some(BeforeYouStartPage.address))
}
}
}
private lazy val vehicleLookupFailure = {
injector.getInstance(classOf[VehicleLookupFailure])
}
private lazy val present = {
val request = FakeRequest()
.withCookies(CookieFactoryForUnitSpecs.traderDetailsModel())
.withCookies(CookieFactoryForUnitSpecs.bruteForcePreventionViewModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupFormModel())
.withCookies(CookieFactoryForUnitSpecs.vehicleLookupResponse())
vehicleLookupFailure.present(request)
}
}
|
dvla/vehicles-online
|
test/controllers/VehicleLookupFailureUnitSpec.scala
|
Scala
|
mit
| 4,440 |
package org.psesd.srx.shared.core
import org.json4s.JValue
import org.psesd.srx.shared.core.config.Environment
import org.psesd.srx.shared.core.exceptions.{ArgumentInvalidException, SrxRequestActionNotAllowedException}
import org.psesd.srx.shared.core.sif.SifRequestAction.SifRequestAction
import org.psesd.srx.shared.core.sif._
import scala.xml.Node
object TestValues {
lazy val sifAuthenticationMethod = SifAuthenticationMethod.SifHmacSha256
lazy val sessionToken = SifProviderSessionToken("ad53dbf6-e0a0-469f-8428-c17738eba43e")
lazy val sharedSecret = SifProviderSharedSecret("pHkAuxdGGMWS")
lazy val sifUrl: SifProviderUrl = SifProviderUrl("http://localhost:%s".format(Environment.getPropertyOrElse("SERVER_PORT", "80")))
lazy val sifProvider = new SifProvider(sifUrl, sessionToken, sharedSecret, sifAuthenticationMethod)
lazy val timestamp: SifTimestamp = SifTimestamp("2015-02-24T20:51:59.878Z")
lazy val authorization = new SifAuthorization(sifProvider, timestamp)
val srxServiceBuildComponents = List[SrxServiceComponent](
new SrxServiceComponent("jdk", "1.8"),
new SrxServiceComponent("scala", "2.11.8"),
new SrxServiceComponent("sbt", "0.13.12")
)
val srxService = new SrxService(new SrxServiceComponent("srx-shared-core-test", "1.0.1"), srxServiceBuildComponents)
val testEntitiesResource = "testEntities"
class TestEntity(val id: String) extends SrxResource
object TestEntity {
def apply(requestBody: SrxRequestBody, parameters: Option[List[SifRequestParameter]]): TestEntity = {
val rootElementName = requestBody.getXml.get.label
if(rootElementName != "test") {
throw new ArgumentInvalidException("root element '%s'".format(rootElementName))
}
new TestEntity("123")
}
}
class TestEntityResult(val requestAction: SifRequestAction, val id: String) extends SrxResourceResult {
statusCode = SifRequestAction.getSuccessStatusCode(requestAction)
def toJson: Option[JValue] = None
def toXml: Option[Node] = Some(<test id={id}/>)
}
object TestEntityService extends SrxResourceService {
def delete(parameters: List[SifRequestParameter]): SrxResourceResult = {
SrxResourceErrorResult(SifHttpStatusCode.MethodNotAllowed, new SrxRequestActionNotAllowedException(SifRequestAction.Delete, testEntitiesResource))
}
def create(srxResource: SrxResource, parameters: List[SifRequestParameter]): SrxResourceResult = {
val testEntity = srxResource.asInstanceOf[TestEntity]
new TestEntityResult(SifRequestAction.Create, testEntity.id)
}
def query(parameters: List[SifRequestParameter]): SrxResourceResult = {
for(p <- parameters) {
println("PARAM %s = %s".format(p.key, p.value))
}
val id = getIdFromRequestParameters(parameters)
new TestEntityResult(SifRequestAction.Query, id.getOrElse(""))
}
def update(srxResource: SrxResource, parameters: List[SifRequestParameter]): SrxResourceResult = {
SrxResourceErrorResult(SifHttpStatusCode.MethodNotAllowed, new SrxRequestActionNotAllowedException(SifRequestAction.Update, testEntitiesResource))
}
}
}
|
PSESD/srx-shared-core
|
src/test/scala/org/psesd/srx/shared/core/TestValues.scala
|
Scala
|
mit
| 3,146 |
import sbt._
import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._
/**
* Application settings. Configure the build for your application here.
* You normally don't have to touch the actual build definition after this.
*/
object Settings {
/** The name of your application */
val name = "YOUR PROJECT NAME"
/** The version of your application */
val version = "1.0.2"
/** Options for the scala compiler */
val scalacOptions = Seq(
"-Xlint",
"-unchecked",
"-deprecation",
"-feature"
)
/** Set some basic options when running the project with Revolver */
val jvmRuntimeOptions = Seq(
"-Xmx1G"
)
/** Declare global dependency versions here to avoid mismatches in multi part dependencies */
object versions {
val scala = "2.11.7"
val scalaDom = "0.8.1"
val scalajsReact = "0.9.1"
val scalaCSS = "0.3.0"
val scalaRx = "0.2.8"
val log4js = "1.4.10"
val autowire = "0.2.5"
val booPickle = "1.1.0"
val uTest = "0.3.1"
val react = "0.12.2"
val jQuery = "1.11.1"
val bootstrap = "3.3.2"
val chartjs = "1.0.1"
val playScripts = "0.3.0"
}
/**
* These dependencies are shared between JS and JVM projects
* the special %%% function selects the correct version for each project
*/
val sharedDependencies = Def.setting(Seq(
"com.lihaoyi" %%% "autowire" % versions.autowire,
"com.lihaoyi" %%% "utest" % versions.uTest,
"com.github.nscala-time" %% "nscala-time" % "2.4.0",
"com.lihaoyi" %%% "upickle" % "0.3.6"
))
/** Dependencies only used by the JVM project */
val jvmDependencies = Def.setting(Seq(
"com.vmunier" %% "play-scalajs-scripts" % versions.playScripts,
"org.webjars" % "font-awesome" % "4.3.0-1" % Provided,
"org.webjars" % "bootstrap" % versions.bootstrap % Provided,
"com.typesafe.play" %% "play-slick" % "1.1.0",
"com.typesafe.play" %% "play-slick-evolutions" % "1.1.0",
"org.postgresql" % "postgresql" % "9.3-1102-jdbc41"
))
/** Dependencies only used by the JS project (note the use of %%% instead of %%) */
val scalajsDependencies = Def.setting(Seq(
"com.github.japgolly.scalajs-react" %%% "core" % versions.scalajsReact,
"com.github.japgolly.scalajs-react" %%% "extra" % versions.scalajsReact,
"com.github.japgolly.scalacss" %%% "ext-react" % versions.scalaCSS,
"org.scala-js" %%% "scalajs-dom" % versions.scalaDom,
"com.lihaoyi" %%% "scalarx" % versions.scalaRx
))
/** Dependencies for external JS libs that are bundled into a single .js file according to dependency order */
val jsDependencies = Def.setting(Seq(
"org.webjars" % "react" % versions.react / "react-with-addons.js" minified "react-with-addons.min.js" commonJSName "React",
"org.webjars" % "jquery" % versions.jQuery / "jquery.js" minified "jquery.min.js",
"org.webjars" % "bootstrap" % versions.bootstrap / "bootstrap.js" minified "bootstrap.min.js" dependsOn "jquery.js",
"org.webjars" % "log4javascript" % versions.log4js / "js/log4javascript_uncompressed.js" minified "js/log4javascript.js"
))
}
|
mitchdzugan/PSPSjsAR-stack-template
|
project/Settings.scala
|
Scala
|
apache-2.0
| 3,093 |
package uk.gov.gds.router.controller
import org.scalatra.ScalatraFilter
import javax.servlet.http.Cookie
import scala.collection.JavaConversions._
class MainTestHarnessController extends ScalatraFilter {
before() {
response.setContentType("text/html")
}
get("/test/test-harness") {
output(dumpParams)
}
get("/someprefix/unregistered") {
output("unregistered")
}
get("/someprefix") {
output("prefix route")
}
get("/test/test-harness/") {
output(dumpParams)
}
post("/test/test-harness") {
output(dumpParams)
}
get("/a-prefix-route/foo") {
output("foo!")
}
get("/a-prefix-route/foo/bar") {
output("bar!")
}
get("/foo") {
output("fooOnly")
}
get("/foo/*") {
output("fooOnly")
}
get("/test/timeout") {
try {
Thread.sleep(20000)
}
catch {
case e: InterruptedException => // no-op
}
}
get("/football") {
output("football")
}
get("/test/redirect") {
redirect("http://www.alphagov.co.uk")
}
post("/test/redirect") {
redirect("http://www.alphagov.co.uk")
}
get("/test/this-route-returns-an-error") {
halt(500)
}
get("/test/410") {
halt(410)
}
get("/test/incoming-headers") {
output(dumpHeaders)
}
get("/test/incoming-cookies") {
output(dumpCookies)
}
get("/test/set-header") {
response.addHeader("X-Test", "test")
}
get("/test/set-rack-cache-header") {
response.addHeader("X-Rack-Cache", "test")
}
get("/test/outgoing-cookies") {
response.addCookie(new Cookie("test-cookie", "this is a cookie"))
}
get("/test/not-modified") {
halt(304)
}
post("/test/not-modified") {
halt(304)
}
get("/test/runtime-exception") {
throw new RuntimeException("exception")
}
get("/test/exception") {
throw new Exception("exception")
}
get("/punchhole/prefix/route") {
output("prefix route")
}
get("/punchhole/full/route") {
output("full route")
}
private def output(block: => String) =
<html>
<head>
<title>Test harness</title>
</head>
<body>
{block}
</body>
</html>
private def dumpHeaders = request.getHeaderNames().toSeq.map {
case headerName: String =>
headerName + "=" + request.getHeader(headerName)
}.mkString("\\n")
private def dumpCookies = request.multiCookies.map {
case (name, values) => values.map(name + "=" + _)
}.flatten.mkString("\\n")
private def dumpParams = multiParams.map {
case (paramName, values) => values.map(paramName + "=" + _)
}.flatten.mkString("\\n")
}
|
gds-attic/scala-router
|
router/router-test-harness-main-host/src/main/scala/uk/gov/gds/router/controller/MainTestHarnessController.scala
|
Scala
|
mit
| 2,603 |
package scala.meta.tests.metacp
import java.nio.charset.StandardCharsets
import java.nio.file.FileVisitResult
import java.nio.file.Files
import java.nio.file.Path
import java.nio.file.attribute.BasicFileAttributes
import munit.FunSuite
import munit.Ignore
import scala.meta.tests.Slow
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import scala.meta.internal.io._
import scala.meta.internal.javacp._
import scala.meta.internal.javacp.asm._
import scala.meta.internal.metacp._
import scala.meta.io.AbsolutePath
import scala.meta.testkit._
import scala.tools.asm.tree.ClassNode
import scala.tools.asm.tree.FieldNode
import scala.tools.asm.tree.MethodNode
import scala.util.control.NonFatal
class SignatureSuite extends FunSuite {
// Validates that pretty(parse(signature) == signature
def assertSignatureRoundtrip(
signature: String,
visitor: TypedSignatureVisitor[Printable]
): Unit = {
val obtained = JavaTypeSignature.parse[Printable](signature, visitor).pretty
assertNoDiff(obtained, signature)
}
// Validates that all signatures of the classfiles in the given
// library pass assertSignatureRoundtrip
def checkSignatureRoundtrip(library: Library): Unit = {
test(library.name.tag(Slow)) {
val failingSignatures = ArrayBuffer.empty[String]
library.classpath().visit { root =>
new java.nio.file.SimpleFileVisitor[Path] {
override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = {
if (PathIO.extension(file) == "class") {
val node = AbsolutePath(file).toClassNode
val tests = checkAllSignatures(node)
tests.foreach { case (signature, unsafe) =>
try {
unsafe()
} catch {
case NonFatal(e) =>
println(signature)
failingSignatures += signature
}
}
}
FileVisitResult.CONTINUE
}
}
}
if (failingSignatures.nonEmpty) {
Files.write(
java.nio.file.Paths.get("signatures.txt"),
failingSignatures.mkString("\\n").getBytes(StandardCharsets.UTF_8)
)
fail("failures! See signatures.txt")
}
}
}
def checkFields(node: ClassNode): List[(String, () => Unit)] =
node.fields.asScala.map { field: FieldNode =>
val signature = if (field.signature == null) field.desc else field.signature
(signature, { () => assertSignatureRoundtrip(signature, new FieldSignatureVisitor()) })
}.toList
def checkMethods(node: ClassNode): List[(String, () => Unit)] =
node.methods.asScala.map { method: MethodNode =>
val signature = if (method.signature == null) method.desc else method.signature
(signature, { () => assertSignatureRoundtrip(signature, new MethodSignatureVisitor()) })
}.toList
def checkClass(node: ClassNode): List[(String, () => Unit)] =
if (node.signature == null) Nil
else {
List(
(
node.signature,
{ () =>
assertSignatureRoundtrip(node.signature, new ClassSignatureVisitor)
}
)
)
}
def checkAllSignatures(node: ClassNode): List[(String, () => Unit)] = {
checkFields(node) ::: checkMethods(node) ::: checkClass(node)
}
Libraries.suite.foreach(checkSignatureRoundtrip)
}
|
scalameta/scalameta
|
tests/jvm/src/test/scala-2.12/scala/meta/tests/metacp/SignatureSuite.scala
|
Scala
|
bsd-3-clause
| 3,429 |
package dzufferey.utils
import scala.collection.mutable.HashMap
class Namer {
private val map = new java.util.concurrent.ConcurrentHashMap[String, java.util.concurrent.atomic.AtomicInteger]()
private def counter = new java.util.concurrent.atomic.AtomicInteger()
def getPrefixAndVersion(prefix: String) = {
val idx = prefix.lastIndexOf("$")
if (idx == -1) {
(prefix, 0)
} else {
try {
(prefix.substring(0, idx), prefix.substring(idx+1).toInt)
} catch {
case e: java.lang.NumberFormatException =>
(prefix, 0)
}
}
}
private def extractPrefix(prefix: String, preserve: Boolean) = {
if (preserve) {
(prefix, 0)
} else {
getPrefixAndVersion(prefix)
}
}
private def getCounter(prefix: String) = {
val c1 = counter
val c2 = map.putIfAbsent(prefix, c1)
if (c2 == null) c1 else c2
}
def warmup(prefix: String, preserve: Boolean = false): Unit = {
val (realPrefix, current) = extractPrefix(prefix, preserve)
val c3 = getCounter(realPrefix)
var c = c3.get
while (c < current) {
c = c3.incrementAndGet
}
}
def apply(prefix: String, preserve: Boolean = false): String = {
val (realPrefix, current) = extractPrefix(prefix, preserve)
val c3 = getCounter(realPrefix)
var v = c3.incrementAndGet
assert(current <= Int.MaxValue)
while (v <= current) {
v = c3.incrementAndGet
}
realPrefix.trim + "$" + v
}
}
object Namer extends Namer {
}
|
dzufferey/misc-scala-utils
|
src/main/scala/dzufferey/utils/Namer.scala
|
Scala
|
apache-2.0
| 1,517 |
package scife
package enumeration
import org.scalatest._
class EnumTest extends FunSuite with Matchers {
test("factory method") {
{
val enum = Enum((Set(1, 2, 3) map { Set(_) }).toList)
enum shouldBe a [WrapArray[_]]
enum.hasDefiniteSize should be (true)
enum.size should be (3)
}
{
// set is treated as a function !
val enum = Enum[Set[Int]]( Set(1, 2, 3) )
enum shouldBe a [Singleton[_]]
enum.hasDefiniteSize should be (true)
enum.size should be (1)
}
{
val enum = Enum(1, 2, 3)
enum shouldBe a [WrapArray[_]]
enum.hasDefiniteSize should be (true)
enum.size should be (3)
}
}
test("factory method, function") {
{
val a = Predef.identity[Int] _
val b = Predef.identity[Int] _
assert( a != b )
}
{
val enum = Enum[Int]( Predef.identity[Int] _ )
enum shouldBe a [WrapFunction[_]]
enum.hasDefiniteSize should be (false)
}
{
// set is treated as a function !
val enum = Enum( Set(1, 2, 3) )
enum shouldBe a [WrapFunction[_]]
enum(3) should be (true)
enum(4) should be (false)
enum.hasDefiniteSize should be (false)
}
}
}
|
kaptoxic/SciFe
|
src/test/scala/scife/enumeration/EnumTest.scala
|
Scala
|
gpl-2.0
| 1,239 |
package mesosphere.marathon
package upgrade
import mesosphere.UnitTest
import mesosphere.marathon.state._
import mesosphere.marathon.test.GroupCreation
class GroupVersioningUtilTest extends UnitTest with GroupCreation {
val emptyGroup = createRootGroup(version = Timestamp(1))
val app =
AppDefinition(AbsolutePathId("/nested/app"), role = "*", cmd = Some("sleep 123"), versionInfo = VersionInfo.OnlyVersion(Timestamp.zero))
val nestedApp = createRootGroup(
groups = Set(
createGroup(
id = AbsolutePathId("/nested"),
apps = Map(app.id -> app),
version = Timestamp(2)
)
),
version = Timestamp(2)
)
val scaledApp = AppDefinition(
AbsolutePathId("/nested/app"),
role = "*",
cmd = Some("sleep 123"),
instances = 2,
versionInfo = VersionInfo.OnlyVersion(Timestamp.zero)
)
val nestedAppScaled = createRootGroup(
groups = Set(
createGroup(
id = AbsolutePathId("/nested"),
apps = Map(scaledApp.id -> scaledApp),
version = Timestamp(2)
)
),
version = Timestamp(2)
)
val updatedApp = AppDefinition(AbsolutePathId("/nested/app"), role = "*", cmd = Some("sleep 234"))
val nestedAppUpdated = createRootGroup(
groups = Set(
createGroup(
id = AbsolutePathId("/nested"),
apps = Map(updatedApp.id -> updatedApp),
version = Timestamp(2)
)
),
version = Timestamp(2)
)
"GroupVersioningUtil" should {
"No changes for empty group should result in the resulting root group version set to the given one" in {
When("Calculating version infos for an empty group")
val updated = GroupVersioningUtil.updateVersionInfoForChangedApps(Timestamp(10), emptyGroup, emptyGroup)
Then("the version is updated anyway")
updated should be(emptyGroup.updateVersion(Timestamp(10)))
}
"No changes for nested app should result in the resulting root group version set to the given one" in {
When("Calculating version infos with no changes")
val updated = GroupVersioningUtil.updateVersionInfoForChangedApps(Timestamp(10), nestedApp, nestedApp)
Then("the version is updated anyway")
updated should be(nestedApp.updateVersion(Timestamp(10)))
}
"A new app should get proper versionInfo" in {
When("Calculating version infos with an added app")
val updated = GroupVersioningUtil.updateVersionInfoForChangedApps(Timestamp(10), emptyGroup, nestedApp)
Then("The timestamp of the app and groups are updated appropriately")
def update(maybeApp: Option[AppDefinition]): AppDefinition =
maybeApp.map(_.copy(versionInfo = VersionInfo.forNewConfig(Timestamp(10)))).get
updated should be(
nestedApp.updateApp(
AbsolutePathId("/nested/app"),
update,
Timestamp(10)
)
)
}
"A scaled app should get proper versionInfo" in {
When("Calculating version infos with a scaled app")
val updated = GroupVersioningUtil.updateVersionInfoForChangedApps(Timestamp(10), nestedApp, nestedAppScaled)
Then("The timestamp of the app and groups are updated appropriately")
def update(maybeApp: Option[AppDefinition]): AppDefinition =
maybeApp.map(_.copy(versionInfo = VersionInfo.forNewConfig(Timestamp(0)).withScaleChange(Timestamp(10)))).get
updated should equal(
nestedAppScaled.updateApp(
AbsolutePathId("/nested/app"),
update,
Timestamp(10)
)
)
}
"A updated app should get proper versionInfo" in {
When("Calculating version infos with an updated app")
val updated = GroupVersioningUtil.updateVersionInfoForChangedApps(Timestamp(10), nestedApp, nestedAppUpdated)
Then("The timestamp of the app and groups are updated appropriately")
def update(maybeApp: Option[AppDefinition]): AppDefinition =
maybeApp.map(_.copy(versionInfo = VersionInfo.forNewConfig(Timestamp(10)))).get
updated.toString should be(
nestedAppUpdated
.updateApp(
AbsolutePathId("/nested/app"),
update,
Timestamp(10)
)
.toString
)
}
}
}
|
mesosphere/marathon
|
src/test/scala/mesosphere/marathon/upgrade/GroupVersioningUtilTest.scala
|
Scala
|
apache-2.0
| 4,218 |
package dialectic.example.micro
import dialectic.micro._
import dialectic.micro.Micro._
import scalaz.Show
import scalaz.std.anyVal.intInstance
import scalaz.std.string._
import scalaz.syntax.foldable._
object MicroExample0 {
def stringify[A : Show](r: Streem[A], n: Int): String =
r.take(n).takeAll.map(Show[A].shows).intercalate("\\n")
def main(args: Array[String]): Unit = {
// (A = 7) /\\ ((B = 7) \\/ (B = 6))
val a = callFresh[Int](a => a =#= 7.const)
val b = callFresh[Int](b => (b =#= 5.const) \\/ (b =#= 6.const))
val query = a /\\ b
val result = query.runEmpty
println(stringify(result, 10) ++ "\\n")
// (C = 5) \\/ (C = 5) \\/ ...
def fives(c: Term[Int]): Goal[Int] = (c =#= 5.const) \\/ fives(c)
val result2 = callFresh(fives).runEmpty
println(stringify(result2, 10) ++ "\\n")
// (X = 5) \\/ (X = 6) \\/ (X = 5) \\/ (X = 6) \\/ ...
def sixes(c: Term[Int]): Goal[Int] = (c =#= 6.const) \\/ sixes(c)
val fivesAndSixes = callFresh[Int](x => fives(x) \\/ sixes(x))
val result3 = fivesAndSixes.runEmpty
println(stringify(result3, 10) ++ "\\n")
}
}
|
adelbertc/dialectic
|
example/src/main/scala/dialectic/example/micro/MicroExample0.scala
|
Scala
|
bsd-3-clause
| 1,114 |
package hr.element.beepo.client
package email
import scala.xml.Elem
import scalax.file.Path
object Email {
def apply(from: From, to: To, subject: Subject): Email =
Email(from, Seq(to), Nil, Nil, Nil, subject, None, None, Nil)
import org.apache.commons.codec.binary.Base64
private def b64encode(bytes: Array[Byte]): String =
new String(Base64.encodeBase64Chunked(bytes),"UTF-8").trim
}
case class Email(
from: From
, to: Seq[To]
, replyTo: Seq[ReplyTo]
, cc: Seq[CC]
, bcc: Seq[BCC]
, subject: Subject
, textBody: Option[TextBody]
, htmlBody: Option[HtmlBody]
, attachments: Seq[Attachment]) extends xml.EmailXMLConverter {
def add(cc: CC, otherCCs: CC*): Email =
copy(cc = this.cc.:+(cc) ++ otherCCs)
def add(bcc: BCC, otherBCCs: BCC*): Email =
copy(bcc = this.bcc.:+(bcc) ++ otherBCCs)
def add(replyTo: ReplyTo, otherReplyTos: ReplyTo*): Email =
copy(replyTo = this.replyTo.:+(replyTo) ++ otherReplyTos)
def addCC(cc: String, otherCCs: String*): Email =
add(CC(cc), otherCCs.map(CC(_)): _*)
def addBCC(bcc: String, otherBCCs: String*): Email =
add(BCC(bcc), otherBCCs.map(BCC(_)): _*)
def addReplyTo(replyTo: String, otherReplyTos: String*): Email =
add(ReplyTo(replyTo), otherReplyTos.map(ReplyTo(_)): _*)
def setBody(textBody: TextBody): Email =
copy(textBody = Some(textBody))
def setBody(htmlBody: HtmlBody): Email =
copy(htmlBody = Some(htmlBody))
def setBody(xhtmlBody: XHtmlBody): Email =
copy(htmlBody = Some(HtmlBody(xhtmlBody.toString)))
def setTextBody(textBody: String): Email =
setBody(TextBody(textBody))
def setHtmlBody(htmlBody: String): Email =
setBody(HtmlBody(htmlBody))
def setXHtmlBody(xhtmlBody: Elem): Email =
setBody(XHtmlBody(xhtmlBody))
def addAttachment(attachment: Attachment) =
copy(attachments = this.attachments :+ attachment)
def addAttachment(path: Path): Email =
addAttachment(Attachment(path))
def addAttachment(filename: String, content: Array[Byte]): Email =
addAttachment(Attachment(filename, content))
def addAttachment(filename: String, mimeType: String, content: Array[Byte]): Email =
addAttachment(Attachment(filename, mimeType, content))
// -----------------------------------------------------------------------------
def persist() =
Task(this).persist()
def persist(requestID: String) =
Task(requestID, this).persist()
def send() =
Task(this).send()
def send(requestID: String) =
Task(requestID, this).send()
}
|
element-doo/beepo
|
code/scala/client/src/main/scala/hr/element/beepo/client/email/Email.scala
|
Scala
|
bsd-3-clause
| 2,549 |
package sp.domain.logic
import play.api.libs.json._
import scala.util.Try
import org.threeten.bp._
import sp.domain._
/**
* To use the attributes, you also need to include the json formaters
* import sp.domain.Logic._ to get it all
*/
object AttributeLogic extends AttributeLogics
trait AttributeLogics {
// Attribute logic
implicit def stringToSPValue(x: String): SPValue = SPValue(x)
implicit def intToSPValue(x: Int): SPValue = SPValue(x)
implicit def boolToSPValue(x: Boolean): SPValue = SPValue(x)
implicit class SPValueLogic(value: SPValue) {
def to[T](implicit fjs: JSReads[T]): Try[T] = {
Try{ value.as[T] }
}
def pretty: String = Json.prettyPrint(value)
def toJson: String = Json.stringify(value)
/**
* Special equal that also handles numbers and bools that are wrapped in strings
* @param obj
* @return
*/
def ===(obj: scala.Any): Boolean = {
super.equals(obj) ||
(obj.isInstanceOf[SPValue] &&
(value.fixStringedTypes == obj.asInstanceOf[SPValue].fixStringedTypes))
}
def fixStringedTypes: SPValue = {
value match {
case JsString(str) if str.nonEmpty =>
Try{SPValue(str.toInt)}
.orElse(Try{SPValue(str.toBoolean)})
.orElse(Try{SPValue(str.toDouble)})
.getOrElse(value)
case _ => value
}
}
def getAs[T](key: String = "")(implicit fjs: JSReads[T]): Option[T] = {
value match {
case x: SPAttributes => x.getAs[T](key)
case x => None
}
}
}
def timeStamp: SPValue = {
import JsonLogic._
Json.toJson(ZonedDateTime.now)
}
implicit class SPAttributesLogic(x: SPAttributes) {
def addTimeStamp(): SPAttributes = {
x + ("time" -> timeStamp)
}
def merge(xs: SPAttributes): SPAttributes = x.deepMerge(xs)
def get(key: String): Option[SPValue] = {
x \ key match {
case JsDefined(res) => Some(res)
case e: JsUndefined if key.isEmpty => Some(x)
case e: JsUndefined => None
}
}
def getAs[T](key: String = "")(implicit fjs: JSReads[T]): Option[T] = {
for {
x <- get(key)
t <- x.asOpt[T]
} yield t
}
def to[T](implicit fjs: JSReads[T]): Try[T] = Try{x.as[T]}
def find(key: String): List[SPValue] = x \\ key toList
def findAs[T](key: String)(implicit fjs: JSReads[T]): List[T] = {
find(key).flatMap(_.asOpt[T])
}
def findType[T](implicit fjs: JSReads[T]): List[T] = {
def extrType(xs: List[JsValue]): List[T] = {
xs.collect {
case l: JsObject =>
l.asOpt[T] match {
case Some(found) => List(found)
case None => l.findType[T]
}
case l: JsArray =>
extrType(l.value.toList)
}.flatten
}
extrType(x.values.toList)
}
def pretty: String = Json.prettyPrint(x)
def toJson: String = Json.stringify(x)
}
}
|
kristoferB/SP
|
spdomain/src/main/scala/sp/domain/logic/AttributeLogic.scala
|
Scala
|
mit
| 2,996 |
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package swave.benchmarks
import scala.concurrent.Future
import scala.concurrent.duration._
import swave.core._
object Swave extends BenchmarkSuite("swave") {
implicit val env = StreamEnv()
def createBenchmarks = Seq(
fanIn(80.mio),
fanOut(80.mio),
fibonacci(50.mio),
simpleDrain(120.mio),
substreams(30.mio)
)
implicit def startStreamGraph[T](streamGraph: StreamGraph[Future[Unit]]): Future[Unit] =
streamGraph.run().result
def fanIn(count: Long) =
benchmark("fanIn") { showProgressAndTimeFirstElement =>
zeros
.attach(zeros)
.attach(zeros)
.attach(zeros)
.attach(zeros)
.fanInMerge()
.take(count)
.onElement(_ => showProgressAndTimeFirstElement(count))
.async()
.to(Drain.ignore)
}
def fanOut(count: Long) =
benchmark("fanOut") { showProgressAndTimeFirstElement =>
def blackhole = Drain.ignore.dropResult
zeros
.take(count)
.onElement(_ => showProgressAndTimeFirstElement(count))
.async()
.fanOutBroadcast()
.sub.to(blackhole)
.sub.to(blackhole)
.sub.to(blackhole)
.sub.to(blackhole)
.subContinue
.to(Drain.ignore)
}
def fibonacci(count: Long) =
benchmark("fibonacci") { showProgressAndTimeFirstElement =>
val coupling = Coupling[Int]
coupling.out
.fanOutBroadcast(eagerCancel = true)
.sub
.buffer(2, Buffer.RequestStrategy.Always)
.scan(0 -> 1) { case ((a, b), c) => (a + b) -> c }
.map(_._1)
.to(coupling.in)
.subContinue
.take(count)
.onElement(_ => showProgressAndTimeFirstElement(count))
.async()
.to(Drain.ignore)
}
def simpleDrain(count: Long) =
benchmark("simpleDrain") { showProgressAndTimeFirstElement =>
zeros
.take(count)
.onElement(_ => showProgressAndTimeFirstElement(count))
.async()
.to(Drain.ignore)
}
def substreams(count: Long) =
benchmark("substreams") { showProgressAndTimeFirstElement =>
zeros
.injectSequential()
.flatMap {
_
.take(1000000)
.injectSequential()
.flatMap {
_
.take(10000)
.injectSequential()
.flatMap(_.take(100))
}
}
.take(count)
.onElement(_ => showProgressAndTimeFirstElement(count))
.async()
.to(Drain.ignore)
}
private def zeros = Spout.repeat(zero)
override def cleanUp(): Unit = env.shutdown().awaitTermination(1.second)
}
|
sirthias/swave
|
benchmarks/src/main/scala/swave/benchmarks/Swave.scala
|
Scala
|
mpl-2.0
| 2,897 |
package fr.acinq.bitcoin.samples
import fr.acinq.bitcoin._
object KeysFromXpub extends App {
/**
* this is how you would derive pubkeys and addresses from an xpub that someone gave you
* we currently support BIP49 (p2sh-of-p2wpkh) and BIP84 (p2wpkh)
*
*/
def deriveAddresses(xpub: String) = {
val (prefix, master) = DeterministicWallet.ExtendedPublicKey.decode(xpub)
prefix match {
case DeterministicWallet.ypub =>
for (i <- 0L to 5L) {
val pub = DeterministicWallet.derivePublicKey(master, 0L :: i :: Nil)
val address = computeBIP49Address(pub.publicKey, Block.TestnetGenesisBlock.hash)
println(s"$pub $address")
}
case DeterministicWallet.vpub =>
for (i <- 0L to 5L) {
val pub = DeterministicWallet.derivePublicKey(master, 0L :: i :: Nil)
val address = computeBIP84Address(pub.publicKey, Block.TestnetGenesisBlock.hash)
println(s"$pub $address")
}
}
}
deriveAddresses("ypub6XKCLnXy5uuK5w5mL6viWaRPKJ9EQ7bo2sL4NTJ1wp6WgQp5fCEGYV5KSfF5DLDdCgUZdHBHQmTx95wfCM5LnRHQhWocNybZDhMaiytoD8J")
deriveAddresses("vpub5V8AVGVJD4oTKnAEjjTXUg6pao1jpyooD7VwbrHdMPPcL5RvtPrdiWqtRBj5W9gbccoo8mZznYFY6QSL2CXP75eAPoRjgS6bZehQaWMoy5y")
}
|
ACINQ/bitcoin-lib
|
src/test/scala/fr/acinq/bitcoin/samples/KeysFromXpub.scala
|
Scala
|
apache-2.0
| 1,264 |
package com.twitter.finagle.thriftmux.service
import com.twitter.finagle._
import com.twitter.finagle.filter.{ClientExceptionTracingFilter => ExceptionTracingFilter}
import com.twitter.finagle.thrift.ClientDeserializeCtx
import com.twitter.finagle.tracing.Tracing
import com.twitter.io.Buf
import com.twitter.util.{Future, Return, Throw}
import scala.util.control.NonFatal
/**
* Reports error and exception annotations when a span completes.
* Deserializes responses to get more accurate error annotations reported
* at the right time.
*/
final class ClientExceptionTracingFilter extends ExceptionTracingFilter[mux.Request, mux.Response] {
override def handleResponse(tracing: Tracing, rep: Future[mux.Response]): Future[mux.Response] = {
rep.respond {
case Throw(e) => traceError(tracing, e)
case Return(rep: mux.Response) =>
val deserCtx = ClientDeserializeCtx.get
if (deserCtx ne ClientDeserializeCtx.nullDeserializeCtx) {
try {
val bytes = Buf.ByteArray.Owned.extract(rep.body)
deserCtx.deserialize(bytes) match {
case Throw(e) => traceError(tracing, e)
case Return(_) =>
}
} catch {
case NonFatal(e: Throwable) => traceError(tracing, e)
}
}
}
}
}
|
twitter/finagle
|
finagle-thriftmux/src/main/scala/com/twitter/finagle/thriftmux/service/ClientExceptionTracingFilter.scala
|
Scala
|
apache-2.0
| 1,312 |
/*
* MUSIT is a museum database to archive natural and cultural history data.
* Copyright (C) 2016 MUSIT Norway, part of www.uio.no (University of Oslo)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License,
* or any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
package repositories.dao
import models.Organisation
import no.uio.musit.models.OrgId
import no.uio.musit.test.MusitSpecWithAppPerSuite
class OrganisationDaoSpec extends MusitSpecWithAppPerSuite {
val orgDao: OrganisationDao = fromInstanceCache[OrganisationDao]
"OrganisationDao" when {
"querying the organization methods" should {
"return None when Id is very large" in {
orgDao.getById(Long.MaxValue).futureValue mustBe None
}
"return a organization if the Id is valid" in {
val oid = OrgId(1)
val expected = Organisation(
id = Some(oid),
fn = "Kulturhistorisk museum - Universitetet i Oslo",
tel = "22 85 19 00",
web = "www.khm.uio.no",
synonyms = Some(Seq("KHM")),
serviceTags = Some(Seq("storage_facility"))
)
val res = orgDao.getById(oid).futureValue
expected.id mustBe res.get.id
expected.fn mustBe res.get.fn
expected.tel mustBe res.get.tel
expected.web mustBe res.get.web
}
"return None if the Id is 0 (zero)" in {
orgDao.getById(OrgId(0)).futureValue mustBe None
}
"return empty list if the search string is not found" in {
orgDao.getByName("Andlkjlkj").futureValue mustBe empty
}
"return list if the serviceTags is storage_facility" in {
val res = orgDao.getByNameAndTags("Kulturhis", "storage_facility").futureValue
res.size must be > 0
res.size mustBe 1
}
"return list if the serviceTags is empty " in {
val res = orgDao.getByName("Arkeologisk").futureValue
res.size must be > 0
res.size mustBe 1
}
}
"modifying organization" should {
"succeed when inserting organization" in {
val org = Organisation(
id = None,
fn = "Testmuseet i Bergen",
tel = "99887766",
web = "www.tmib.no",
synonyms = Some(Seq("UM")),
serviceTags = Some(Seq("storage_facility"))
)
val res = orgDao.insert(org).futureValue
res.fn mustBe "Testmuseet i Bergen"
res.id mustBe Some(OrgId(11))
}
"succeed when updating organization" in {
val org1 = Organisation(
id = None,
fn = "Museet i Foobar",
tel = "12344321",
web = "www.foob.no",
synonyms = Some(Seq("Foo")),
serviceTags = Some(Seq("storage_facility"))
)
val res1 = orgDao.insert(org1).futureValue
res1.fn mustBe "Museet i Foobar"
res1.id mustBe Some(OrgId(12))
val orgUpd = Organisation(
id = Some(OrgId(12)),
fn = "Museet i Bar",
tel = "99344321",
web = "www.bar.no",
synonyms = Some(Seq("MusBar")),
serviceTags = Some(Seq("storage_facility"))
)
val resInt = orgDao.update(orgUpd).futureValue
val res = orgDao.getById(OrgId(12)).futureValue
res.get.fn mustBe "Museet i Bar"
res.get.tel mustBe "99344321"
res.get.web mustBe "www.bar.no"
}
"not update organization with invalid id" in {
val orgUpd = Organisation(
id = Some(OrgId(999991)),
fn = "Museet i Bar99",
tel = "99344321",
web = "www.bar.no",
synonyms = Some(Seq("MusBar99", "MusBar")),
serviceTags = Some(Seq("storage_facility"))
)
val res = orgDao.update(orgUpd).futureValue
res.isSuccess mustBe true
res.get mustBe None
}
"succeed when deleting organization" in {
orgDao.delete(OrgId(12)).futureValue mustBe 1
orgDao.getById(OrgId(12)).futureValue mustBe None
}
"not be able to delete organization with invalid id" in {
orgDao.delete(OrgId(3)).futureValue mustBe 0
}
}
}
}
|
kpmeen/musit
|
service_actor/test/repositories/dao/OrganisationDaoSpec.scala
|
Scala
|
gpl-2.0
| 4,746 |
Either[A, B] => A
|
hmemcpy/milewski-ctfp-pdf
|
src/content/1.9/code/scala/snippet14.scala
|
Scala
|
gpl-3.0
| 17 |
/*
* This file is part of the sohva project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gnieh.sohva
package test
import org.scalatest._
import org.scalatest.OptionValues._
class TestBulkDocs extends SohvaTestSpec with Matchers {
val docs: List[TestDoc] =
(for (i <- 1 to 10)
yield TestDoc("doc" + i, i)).toList
"saving several documents at once" should "result in all the documents being saved in the db" in {
val result = synced(db.saveDocs(docs))
result.filter {
case OkResult(_, _, _) => false
case ErrorResult(_, _, _) => true
}.size should be(0)
val saved = synced(db.getDocsById[TestDoc](docs.map(_._id)))
saved should be(docs)
val revisions = synced(db.getDocRevisions(docs.map(_._id)))
revisions should have size (saved.size)
revisions.map(_._2) should not contain ("")
}
"saving several document with lists at once" should "result in all the documents being saved in the db and the list elements serialized correctly" in {
implicit val docWithListFormat = couchFormat[DocWithList]
def strings(id: Int) =
(for (i <- 1 to 3)
yield "element:" + id + ":" + i).toList
val docsString =
(for (i <- 1 to 5)
yield DocWithList("doc_string_list:" + i, strings(i))).toList
val result1 = synced(db.saveDocs(docsString))
result1.filter {
case OkResult(_, _, _) => false
case ErrorResult(_, _, _) => true
}.size should be(0)
val saved1 = synced(db.getDocsById[DocWithList](docsString.map(_._id)))
saved1 should be(docsString)
}
"deleting several documents at once" should "result in all documents being deleted in the db" in {
synced(db.saveDocs(docs))
val ids = docs.map(_._id)
val saved = synced(db.getDocsById[TestDoc](ids))
saved should be(docs)
val deleted = synced(db.deleteDocs(ids))
deleted.filter {
case OkResult(_, _, _) => false
case ErrorResult(_, _, _) => true
}.size should be(0)
synced(db.getDocsById[TestDoc](docs.map(_._id))) should be(Nil)
}
"deleting several documents at once" should "delete only documents for which an id was provided" in {
synced(db.saveDocs(docs))
val ids = docs.map(_._id)
val saved = synced(db.getDocsById[TestDoc](ids))
saved should be(docs)
val deleted = synced(db.deleteDocs(ids.take(5)))
deleted.filter {
case OkResult(_, _, _) => false
case ErrorResult(_, _, _) => true
}.size should be(0)
synced(db.getDocsById[TestDoc](docs.map(_._id))) should be(docs.drop(5))
}
}
case class DocWithList(_id: String, list: List[String]) extends IdRev
|
gnieh/sohva
|
src/test/scala/gnieh/sohva/test/TestBulkDocs.scala
|
Scala
|
apache-2.0
| 3,171 |
package lila.hub
import scala.concurrent.duration._
import actorApi.map._
import akka.actor._
import akka.pattern.{ ask, pipe }
import makeTimeout.short
trait ActorMap extends Actor {
private val actors = scala.collection.mutable.Map.empty[String, ActorRef]
def mkActor(id: String): Actor
def actorMapReceive: Receive = {
case Get(id) => sender ! getOrMake(id)
case Tell(id, msg) => getOrMake(id) forward msg
case TellAll(msg) => actors.values foreach (_ forward msg)
case TellIds(ids, msg) => ids foreach { id =>
actors get id foreach (_ forward msg)
}
case Ask(id, msg) => getOrMake(id) forward msg
case Terminated(actor) =>
context unwatch actor
actors foreach {
case (id, a) => if (a == actor) actors -= id
}
case Exists(id) => sender ! actors.contains(id)
}
protected def size = actors.size
private def getOrMake(id: String) = actors get id getOrElse {
context.actorOf(Props(mkActor(id)), name = id) ~ { actor =>
actors += (id -> actor)
context watch actor
}
}
}
object ActorMap {
def apply(make: String => Actor) = new ActorMap {
def mkActor(id: String) = make(id)
def receive = actorMapReceive
}
}
|
clarkerubber/lila
|
modules/hub/src/main/ActorMap.scala
|
Scala
|
agpl-3.0
| 1,239 |
import example.launcher.JettyLauncherSupport
object JettyLauncher extends JettyLauncherSupport {
override def resourceBase: String = "src/main/webapp"
}
|
debop/scalatra-spring-couchbase
|
src/main/scala/JettyLauncher.scala
|
Scala
|
apache-2.0
| 159 |
// A simple recursive solution
def ints(count: Int)(rng: RNG): (List[Int], RNG) =
if (count == 0)
(List(), rng)
else {
val (x, r1) = rng.nextInt
val (xs, r2) = ints(count - 1)(r1)
(x :: xs, r2)
}
// A tail-recursive solution
def ints2(count: Int)(rng: RNG): (List[Int], RNG) = {
def go(count: Int, r: RNG, xs: List[Int]): (List[Int], RNG) =
if (count == 0)
(xs, r)
else {
val (x, r2) = r.nextInt
go(count - 1, r2, x :: xs)
}
go(count, rng, List())
}
|
ud3sh/coursework
|
functional-programming-in-scala-textbook/answerkey/state/04.answer.scala
|
Scala
|
unlicense
| 508 |
package scaladex.server.route
import scala.concurrent.ExecutionContext
import akka.http.scaladsl.model.StatusCodes.TemporaryRedirect
import akka.http.scaladsl.model.Uri.Query
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.headers.Referer
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server.Route
import com.softwaremill.session.CsrfDirectives._
import com.softwaremill.session.CsrfOptions._
import com.softwaremill.session.SessionDirectives._
import com.softwaremill.session.SessionOptions._
import scaladex.core.service
import scaladex.server.GithubUserSession
import scaladex.server.config.OAuth2Config
class Oauth2(config: OAuth2Config, githubAuth: service.GithubAuth, session: GithubUserSession)(
implicit ec: ExecutionContext
) {
import session.implicits._
val routes: Route =
get(
concat(
path("login")(
optionalHeaderValueByType(Referer)(referer =>
redirect(
Uri("https://github.com/login/oauth/authorize").withQuery(
Query(
"client_id" -> config.clientId,
"scope" -> "read:org",
"state" -> referer.map(_.value).getOrElse("/")
)
),
TemporaryRedirect
)
)
),
path("logout")(
headerValueByType(Referer) { referer =>
requiredSession(refreshable, usingCookies) { _ =>
invalidateSession(refreshable, usingCookies) { ctx =>
ctx.complete(
HttpResponse(
status = TemporaryRedirect,
headers = headers.Location(Uri(referer.value)) :: Nil,
entity = HttpEntity.Empty
)
)
}
}
}
),
pathPrefix("callback")(
concat(
path("done")(
complete("OK")
),
pathEnd(
parameters("code", "state".?) { (code, state) =>
val userStateQuery = githubAuth.getUserStateWithOauth2(
config.clientId,
config.clientSecret,
code,
config.redirectUri
)
onSuccess(userStateQuery) { userState =>
setSession(
refreshable,
usingCookies,
session.addUser(userState)
)(
setNewCsrfToken(checkHeader) { ctx =>
ctx.complete(
HttpResponse(
status = TemporaryRedirect,
headers = headers
.Location(Uri(state.getOrElse("/"))) :: Nil,
entity = HttpEntity.Empty
)
)
}
)
}
}
)
)
)
)
)
}
|
scalacenter/scaladex
|
modules/server/src/main/scala/scaladex/server/route/Oauth2.scala
|
Scala
|
bsd-3-clause
| 3,031 |
/*
* Copyright (c) <2015-2016>, see CONTRIBUTORS
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package ch.usi.inf.l3.sana.dcct.ast
import ch.usi.inf.l3.sana
import sana.primj._
import sana.tiny.types.Type
import sana.tiny.source.Position
import sana.tiny.symbols.Symbol
import sana.tiny.names.Name
import sana.primj.ast.Implicits._
import sana.primj.types.MethodType
import sana.tiny.modifiers.Flags
import sana.tiny.ast._
import sana.calcj.ast._
import sana.primj.ast._
trait TreeFactories extends sana.ooj.ast.TreeFactories {
// TODO Copied from primj factories. Is there a nicer way to do it?
def mkActionDef(ret: UseTree,
name: Name, params: List[ValDefApi],
body: Expr, pos: Option[Position] = None,
symbol: Option[Symbol] = None): MethodDefApi = {
val res = new MethodDef(ret, name, params, body)
pos.foreach(res.pos = _)
symbol.foreach( sym => {
res.symbol = sym
sym.owner.foreach(res.owner = _)
})
val tys = params.flatMap(_.tpe)
ret.tpe.foreach(t => res.tpe = MethodType(t, tys))
res
}
def mkArrayDef(name: Name, indices: List[ValDefApi],
properties: List[ValDefApi], symbol: Option[Symbol] = None) : ArrayDefApi = {
val res = new ArrayDef(name, indices, properties)
symbol.foreach ( sym => {
res.symbol = sym
sym.owner.foreach(res.owner = _)
})
res
}
def mkForEach(entityVar: ValDefApi, whereExpr: Expr, body: BlockApi,
symbol: Option[Symbol] = None): ForEach = {
val res = new ForEach(entityVar, whereExpr, body)
symbol.foreach( sym => {
res.symbol = sym
sym.owner.foreach(res.owner = _)
})
res
}
}
object TreeFactories extends TreeFactories
|
amanjpro/languages-a-la-carte
|
dcct/src/main/scala/ast/TreeFactories.scala
|
Scala
|
bsd-3-clause
| 3,179 |
package com.sksamuel.avro4s.schema
import com.sksamuel.avro4s.AvroSchema
import org.scalatest.{FunSuite, Matchers}
class NamespaceSchemaTest extends FunSuite with Matchers {
test("use package name for top level class") {
val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/top_level_class_namespace.json"))
val schema = AvroSchema[Tau]
schema.toString(true) shouldBe expected.toString(true)
}
test("use package name without .package for classes defined in the package object") {
val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/top_level_package_object_namespace.json"))
val schema = AvroSchema[Sigma]
schema.toString(true) shouldBe expected.toString(true)
}
test("use namespace of object for classes defined inside an object") {
val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/top_level_object_namespace.json"))
val schema = AvroSchema[A]
schema.toString(true) shouldBe expected.toString(true)
}
test("local classes should use the namespace of their parent object package") {
case class NamespaceTestFoo(inner: String)
val expected = new org.apache.avro.Schema.Parser().parse(getClass.getResourceAsStream("/local_class_namespace.json"))
val schema = AvroSchema[NamespaceTestFoo]
schema.toString(true) shouldBe expected.toString(true)
}
}
case class Tau(a: String, b: Boolean)
case class A(inner: A.Inner)
object A {
final case class Inner(s: String)
}
|
51zero/avro4s
|
avro4s-core/src/test/scala/com/sksamuel/avro4s/schema/NamespaceSchemaTest.scala
|
Scala
|
mit
| 1,547 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2012-2014 Alexey Aksenov [email protected]
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: [email protected]
*/
package org.digimead.tabuddy.desktop.core.ui
import com.google.common.collect.MapMaker
import java.util.Locale
import java.util.concurrent.{ ConcurrentMap, CopyOnWriteArraySet }
import org.digimead.digi.lib.aop.log
import org.digimead.digi.lib.api.XDependencyInjection
import org.digimead.digi.lib.log.api.XLoggable
import org.digimead.tabuddy.desktop.core.support.App
import org.digimead.tabuddy.desktop.core.ui.block.View
import org.digimead.tabuddy.desktop.core.ui.definition.IWizard
import org.eclipse.jface.fieldassist.FieldDecorationRegistry
import org.eclipse.jface.resource.{ ImageDescriptor, JFaceResources }
import org.eclipse.swt.SWT
import org.eclipse.swt.graphics.{ Font, GC, Image }
import org.eclipse.swt.widgets.Shell
import org.osgi.framework.{ BundleActivator, BundleContext }
import scala.collection.JavaConverters.asScalaSetConverter
import scala.collection.convert.Wrappers.JMapWrapperLike
import scala.collection.mutable
import scala.language.implicitConversions
/**
* Handle application resources.
*/
class Resources extends BundleActivator with XLoggable {
val small = 0.315
val medium = 0.7
val large = 1
/** the large font */
lazy val fontLarge = {
val fD = App.display.getSystemFont().getFontData()
fD.head.setHeight(fD.head.getHeight + 1)
new Font(App.display, fD.head)
}
/** Default font metrics. */
lazy val fontMetrics = App.execNGet {
val gc = new GC(limboShell)
gc.setFont(limboShell.getFont())
val fontMetrics = gc.getFontMetrics()
gc.dispose()
fontMetrics
}
/** The small font */
lazy val fontSmall = {
val fD = App.display.getSystemFont().getFontData()
fD.head.setHeight(fD.head.getHeight - 1)
new Font(App.display, fD.head)
}
/** List of all application view factories. */
protected val viewFactoriesMap = Resources.ViewFactoryMap(new MapMaker().weakKeys().makeMap[View.Factory, Boolean]())
/** Application wizards set. */
protected val wizardsSet = new CopyOnWriteArraySet[Class[_ <: IWizard]].asScala
/** Limbo shell. */
lazy val limboShell = App.execNGet {
val limbo = new Shell(App.display, SWT.NONE)
limbo.setBackgroundMode(SWT.INHERIT_DEFAULT)
limbo
}
private val lock = new Object
/** Returns the number of pixels corresponding to the height of the given number of characters. */
def convertHeightInCharsToPixels(chars: Int): Int =
org.eclipse.jface.dialogs.Dialog.convertHeightInCharsToPixels(fontMetrics, chars)
/** Get factory by singleton class name. */
def factory(className: String): Option[View.Factory] =
viewFactoriesMap.find(_._1.getClass().getName() == className).map(_._1)
/** Get map of factories. */
def factories: Map[View.Factory, Boolean] = viewFactoriesMap.toMap
/** Returns the image stored in the image registry under the given symbolic name. */
def getImage(symbolicName: String) = JFaceResources.getImageRegistry().get(symbolicName)
/** Get image at the specific path and scale to k. */
def getImage(path: String, k: Double) = {
val image = ResourceManager.getImage(getClass, path)
scale(image, k)
}
/** Register view factory in the map of the application views. */
def registerViewFactory(factory: View.Factory, enabled: Boolean) = lock.synchronized {
log.debug(s"Register view factory ${factory}.")
viewFactoriesMap += factory -> enabled
}
/** Register wizard. */
def registerWizard(clazz: Class[_ <: IWizard]) = lock.synchronized {
log.debug(s"Register wizard ${clazz.getName}.")
wizardsSet += clazz
}
def scale(image: Image, k: Double): Image = {
val width = image.getBounds().width
val height = image.getBounds().height
new Image(App.display, image.getImageData().scaledTo((width * k).toInt, (height * k).toInt))
}
/** Recreate font with specific style */
def setFontStyle(font: Font, style: Int): Font = {
val fD = font.getFontData()
fD.head.setStyle(style)
new Font(App.display, fD.head)
}
@log
def start(context: BundleContext) {
log.debug("Initialize application SWT resources.")
// Reinitialize decoration registry for compatibility with OSGi reloads.
FieldDecorationRegistry.setDefault(new FieldDecorationRegistry())
val imageRegistry = JFaceResources.getImageRegistry()
// Define the images used in the standard decorations.
imageRegistry.remove("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_CONTENT_PROPOSAL")
imageRegistry.put("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_CONTENT_PROPOSAL",
ImageDescriptor.createFromFile(classOf[FieldDecorationRegistry], "images/contassist_ovr.gif"))
imageRegistry.remove("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_ERROR")
imageRegistry.put("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_ERROR",
ImageDescriptor.createFromFile(classOf[FieldDecorationRegistry], "images/error_ovr.gif"))
imageRegistry.remove("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_WARNING")
imageRegistry.put("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_WARNING",
ImageDescriptor.createFromFile(classOf[FieldDecorationRegistry], "images/warn_ovr.gif"))
imageRegistry.remove("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_REQUIRED")
imageRegistry.put("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_REQUIRED",
ImageDescriptor.createFromFile(classOf[FieldDecorationRegistry], "images/required_field_cue.gif"))
imageRegistry.remove("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_ERROR_QUICKFIX")
imageRegistry.put("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_ERROR_QUICKFIX",
ImageDescriptor.createFromFile(classOf[FieldDecorationRegistry], "images/errorqf_ovr.gif"))
imageRegistry.remove("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_INFO")
imageRegistry.put("org.eclipse.jface.fieldassist.IMG_DEC_FIELD_INFO",
ImageDescriptor.createFromFile(classOf[FieldDecorationRegistry], "images/info_ovr.gif"))
FieldDecorationRegistry.getDefault().registerFieldDecoration("DEC_CONTENT_PROPOSAL",
JFaceResources.getString("FieldDecorationRegistry.contentAssistMessage"),
"org.eclipse.jface.fieldassist.IMG_DEC_FIELD_CONTENT_PROPOSAL", imageRegistry)
FieldDecorationRegistry.getDefault().registerFieldDecoration("DEC_ERROR",
JFaceResources.getString("FieldDecorationRegistry.errorMessage"),
"org.eclipse.jface.fieldassist.IMG_DEC_FIELD_ERROR", imageRegistry)
FieldDecorationRegistry.getDefault().registerFieldDecoration("DEC_ERRORQUICKFIX",
JFaceResources.getString("FieldDecorationRegistry.errorQuickFixMessage"),
"org.eclipse.jface.fieldassist.IMG_DEC_FIELD_ERROR_QUICKFIX", imageRegistry)
FieldDecorationRegistry.getDefault().registerFieldDecoration("DEC_WARNING",
null, "org.eclipse.jface.fieldassist.IMG_DEC_FIELD_WARNING", imageRegistry)
FieldDecorationRegistry.getDefault().registerFieldDecoration("DEC_INFORMATION",
null, "org.eclipse.jface.fieldassist.IMG_DEC_FIELD_INFO", imageRegistry)
FieldDecorationRegistry.getDefault().registerFieldDecoration("DEC_REQUIRED",
JFaceResources.getString("FieldDecorationRegistry.requiredFieldMessage"),
"org.eclipse.jface.fieldassist.IMG_DEC_FIELD_REQUIRED", imageRegistry)
assert(!Resources.Image.error.isDisposed(), "FieldDecoration resources is already disposed.")
fontLarge
fontSmall
Locale.setDefault(Resources.initialLocale)
}
@log
def stop(context: BundleContext) = {
log.debug("Dispose application SWT resources.")
ResourceManager.dispose()
}
/** Validate resource leaks on shutdown. */
def validateOnShutdown() {
if (viewFactoriesMap.nonEmpty) log.fatal("View Factories leaks: " + viewFactoriesMap)
if (wizardsSet.nonEmpty) log.fatal("Wizards leaks: " + wizardsSet)
}
/** Get set of wizards. */
def wizards() = lock.synchronized { wizardsSet.toSet }
/** Remove view factory from the map of the application known views. */
def unregisterViewFactory(factory: View.Factory) = lock.synchronized {
log.debug("Remove " + factory)
viewFactoriesMap -= factory
}
/** Unregister wizard. */
def unregisterWizard(clazz: Class[_ <: IWizard]) = lock.synchronized {
log.debug(s"Unregister wizard ${clazz.getName}.")
wizardsSet -= clazz
}
object Image {
// We are unable to dispose this resources:
// This resources initialized only once in static block of org.eclipse.jface.fieldassist.FieldDecorationRegistry
// If we dispose it at shutdown then after a bundle restart we would have a pack of disposed garbage.
// Someone may want to restart org.eclipse.jface bundle and all dependencies ;-) lol.
// So org.eclipse.jface must control it life cycle itself in better world.
/*
* From org.eclipse.jface.fieldassist.FieldDecorationRegistry:
*
* Registers a field decoration using the specified id. The lifecyle of the
* supplied image should be managed by the client. That is, it will never be
* disposed by this registry and the decoration should be removed from the
* registry if the image is ever disposed elsewhere.
*
* fucking Eclipse code monkey...
*/
lazy val error = FieldDecorationRegistry.getDefault().getFieldDecoration(FieldDecorationRegistry.DEC_ERROR).getImage()
lazy val required = FieldDecorationRegistry.getDefault().getFieldDecoration(FieldDecorationRegistry.DEC_REQUIRED).getImage()
}
}
/**
* Application UI resources.
*/
object Resources extends XLoggable {
implicit def resources2implementation(r: Resources.type): Resources = r.inner
/** Get Resources implementation. */
def inner = DI.implementation
/** Application initial locale that is used for Locale.setDefault. */
def initialLocale = DI.locale
sealed trait IconTheme {
val name: String
}
object IconTheme {
case object Light extends Resources.IconTheme { val name = "light" }
case object Dark extends Resources.IconTheme { val name = "dark" }
}
/** Application view map. This class is responsible for action.View command update. */
case class ViewFactoryMap[A <: View.Factory, B](underlying: ConcurrentMap[A, B])
extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, ViewFactoryMap[A, B]] {
override def empty = ViewFactoryMap(new MapMaker().makeMap[A, B]())
/** Adds a single element to the map. */
override def +=(kv: (A, B)): this.type = { put(kv._1, kv._2); this }
/** Removes a key from this map. */
override def -=(key: A): this.type = { remove(key); this }
/** Adds a new key/value pair to this map and optionally returns previously bound value. */
override def put(key: A, value: B): Option[B] = {
if (keys.exists(_.name == key.name))
throw new IllegalArgumentException(s"View with name '${key.name.name}' is already exists.")
super.put(key, value)
}
/** Adds a new key/value pair to this map. */
override def update(key: A, value: B): Unit = put(key, value)
}
/**
* Dependency injection routines
*/
private object DI extends XDependencyInjection.PersistentInjectable {
/** Resources implementation. */
lazy val implementation = injectOptional[Resources] getOrElse new Resources()
/** Application locale. */
lazy val locale = injectOptional[Locale] getOrElse Locale.getDefault()
/** Icon theme. */
lazy val theme = injectOptional[Resources.IconTheme] getOrElse IconTheme.Light
}
}
|
digimead/digi-TABuddy-desktop
|
part-core-ui/src/main/scala/org/digimead/tabuddy/desktop/core/ui/Resources.scala
|
Scala
|
agpl-3.0
| 13,609 |
/*
Copyright 2014 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.summingbird.memory
import java.util.concurrent.{ BlockingQueue, ConcurrentHashMap, LinkedBlockingQueue }
import com.twitter.algebird.Monoid
import com.twitter.summingbird._
import com.twitter.summingbird.option.JobId
import org.scalacheck.Arbitrary
import org.scalatest.WordSpec
import scala.concurrent.Await
import scala.concurrent.duration.Duration
/**
* Tests for Summingbird's in-memory planner.
*/
class ConcurrentMemoryLaws extends WordSpec {
// This is dangerous, obviously. The Memory platform tested here
// doesn't perform any batching, so the actual time extraction isn't
// needed.
implicit def extractor[T]: TimeExtractor[T] = TimeExtractor(_ => 0L)
import scala.concurrent.ExecutionContext.Implicits.global
def sample[T: Arbitrary]: T = Arbitrary.arbitrary[T].sample.get
def empty[T](b: BlockingQueue[T]): List[T] = {
def go(items: List[T]): List[T] = b.poll() match {
case null => items.reverse
case x => go(x :: items)
}
go(Nil)
}
def unorderedEq[T](left: List[T], right: List[T]): Boolean = {
val leftMap = left.groupBy(identity).mapValues(_.size)
val rightMap = right.groupBy(identity).mapValues(_.size)
val eqv = leftMap == rightMap
if (!eqv) { println(s"from Queue: $leftMap\\nfrom scala: $rightMap") }
eqv
}
def testGraph[T: Manifest: Arbitrary, K: Arbitrary, V: Monoid: Arbitrary: Equiv] =
new TestGraphs[ConcurrentMemory, T, K, V](new ConcurrentMemory)(
() => new ConcurrentHashMap[K, V]())(() => new LinkedBlockingQueue[T]())(
Producer.source[ConcurrentMemory, T](_))(s => { k => Option(s.get(k)) })({ (f, items) =>
unorderedEq(empty(f), items)
})({ (p: ConcurrentMemory, plan: ConcurrentMemoryPlan) => Await.result(plan.run, Duration.Inf) })
/**
* Tests the in-memory planner against a job with a single flatMap
* operation.
*/
def singleStepLaw[T: Arbitrary: Manifest, K: Arbitrary, V: Monoid: Arbitrary: Equiv] =
testGraph[T, K, V].singleStepChecker(sample[List[T]], sample[T => List[(K, V)]])
/**
* Tests the in-memory planner against a job with a single flatMap
* operation.
*/
def diamondLaw[T: Manifest: Arbitrary, K: Arbitrary, V: Monoid: Arbitrary: Equiv] =
testGraph[T, K, V].diamondChecker(sample[List[T]], sample[T => List[(K, V)]], sample[T => List[(K, V)]])
/**
* Tests the in-memory planner by generating arbitrary flatMap and
* service functions.
*/
def leftJoinLaw[T: Manifest: Arbitrary, K: Arbitrary, U: Arbitrary, JoinedU: Arbitrary, V: Monoid: Arbitrary: Equiv] = {
val serviceFn = Arbitrary.arbitrary[K => Option[JoinedU]].sample.get
testGraph[T, K, V].leftJoinChecker[U, JoinedU](serviceFn, identity, sample[List[T]], sample[T => List[(K, U)]], sample[((K, (U, Option[JoinedU]))) => List[(K, V)]])
}
def mapKeysChecker[T: Manifest: Arbitrary, K1: Arbitrary, K2: Arbitrary, V: Monoid: Arbitrary: Equiv](): Boolean = {
val platform = new ConcurrentMemory
val currentStore = new ConcurrentHashMap[K2, V]()
val original = sample[List[T]]
val fnA = sample[T => List[(K1, V)]]
val fnB = sample[K1 => List[K2]]
// Use the supplied platform to execute the source into the
// supplied store.
val plan = platform.plan {
TestGraphs.singleStepMapKeysJob[ConcurrentMemory, T, K1, K2, V](original, currentStore)(fnA, fnB)
}
Await.result(plan.run, Duration.Inf)
val lookupFn = { k: K2 => Option(currentStore.get(k)) };
TestGraphs.singleStepMapKeysInScala(original)(fnA, fnB).forall {
case (k, v) =>
val lv = lookupFn(k).getOrElse(Monoid.zero)
Equiv[V].equiv(v, lv)
}
}
def lookupCollectChecker[T: Arbitrary: Equiv: Manifest, U: Arbitrary: Equiv]: Boolean = {
val mem = new ConcurrentMemory
val input = sample[List[T]]
val srv = sample[T => Option[U]]
val buffer = new LinkedBlockingQueue[(T, U)]()
val prod = TestGraphs.lookupJob[ConcurrentMemory, T, U](input, srv, buffer)
Await.result(mem.plan(prod).run, Duration.Inf)
// check it out:
val buffData = empty(buffer)
val correctData = TestGraphs.lookupJobInScala(input, srv)
unorderedEq(buffData, correctData)
}
/**
* Tests the in-memory planner against a job with a single flatMap
* operation and some test counters
*/
def counterChecker[T: Manifest: Arbitrary, K: Arbitrary, V: Monoid: Arbitrary: Equiv]: Boolean = {
implicit val jobID: JobId = new JobId("concurrent.memory.job.testJobId")
val mem = new ConcurrentMemory
val input = sample[List[T]]
val fn = sample[(T) => List[(K, V)]]
val sourceMaker = ConcurrentMemory.toSource[T](_)
val original = sample[List[T]]
val source = sourceMaker(original)
val store: ConcurrentMemory#Store[K, V] = new ConcurrentHashMap[K, V]()
val prod = TestGraphs.jobWithStats[ConcurrentMemory, T, K, V](jobID, source, store)(t => fn(t))
Await.result(mem.plan(prod).run, Duration.Inf)
//mem.run(mem.plan(prod))
val origCounter = mem.counter(Group("counter.test"), Name("orig_counter")).get
val fmCounter = mem.counter(Group("counter.test"), Name("fm_counter")).get
val fltrCounter = mem.counter(Group("counter.test"), Name("fltr_counter")).get
(origCounter == original.size) &&
(fmCounter == (original.flatMap(fn).size * 2)) &&
(fltrCounter == (original.flatMap(fn).size))
}
"The ConcurrentMemory Platform" should {
//Set up the job:
"singleStep w/ Int, Int, Set[Int]" in { assert(singleStepLaw[Int, Int, Set[Int]] == true) }
"singleStep w/ Int, String, List[Int]" in { assert(singleStepLaw[Int, String, List[Int]] == true) }
"singleStep w/ String, Short, Map[Set[Int], Long]" in { assert(singleStepLaw[String, Short, Map[Set[Int], Long]] == true) }
// Note the stored values only make sense if you have a commutative monoid
// since, due to concurrency, we might put things in a different order with this platform
"diamond w/ Int, Int, Set[Int]" in { assert(diamondLaw[Int, Int, Set[Int]] == true) }
"diamond w/ String, Short, Map[Set[Int], Long]" in {
/*
* It is important to use an Equiv on Maps that treats empty like 0s, since our scala
* implementation uses MapAlgebra.sumByKey, which for better or worse, removes zeros of
* monoids, since that is what the MapMonoid does. :/
*/
import com.twitter.algebird.MapAlgebra.sparseEquiv
assert(diamondLaw[String, Short, Map[Set[Int], Long]] == true)
}
"leftJoin w/ Int, Int, String, Long, Set[Int]" in { assert(leftJoinLaw[Int, Int, String, Long, Set[Int]] == true) }
"flatMapKeys w/ Int, Int, Int, Set[Int]" in { assert(mapKeysChecker[Int, Int, Int, Set[Int]] == true) }
"lookupCollect w/ Int, Int" in { assert(lookupCollectChecker[Int, Int] == true) }
"counters w/ Int, Int, Int" in { assert(counterChecker[Int, Int, Int] == true) }
}
}
|
nabarunnag/Summingbird_dev
|
summingbird-core-test/src/test/scala/com/twitter/summingbird/memory/ConcurrentMemoryLaws.scala
|
Scala
|
apache-2.0
| 7,476 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import sbt.Keys._
import uk.gov.hmrc.PublishingSettings._
import uk.gov.hmrc.SbtAutoBuildPlugin
import uk.gov.hmrc.versioning.SbtGitVersioning
object HmrcBuild extends Build {
import uk.gov.hmrc._
import DefaultBuildSettings._
import uk.gov.hmrc.{SbtBuildInfo, ShellPrompt}
import uk.gov.hmrc.PublishingSettings._
val appName = "http-verbs"
lazy val microservice = Project(appName, file("."))
.enablePlugins(SbtAutoBuildPlugin, SbtGitVersioning)
.settings(
targetJvm := "jvm-1.7",
libraryDependencies ++= AppDependencies(),
crossScalaVersions := Seq("2.11.6"),
resolvers := Seq(
Resolver.bintrayRepo("hmrc", "releases"),
"typesafe-releases" at "http://repo.typesafe.com/typesafe/releases/"
)
)
}
private object AppDependencies {
import play.PlayImport._
import play.core.PlayVersion
val compile = Seq(
"com.typesafe.play" %% "play" % PlayVersion.current,
ws,
"net.ceedubs" %% "ficus" % "1.1.1",
"uk.gov.hmrc" %% "time" % "1.1.0",
"uk.gov.hmrc" %% "http-exceptions" % "0.3.0"
)
trait TestDependencies {
lazy val scope: String = "test"
lazy val test: Seq[ModuleID] = ???
}
object Test {
def apply() = new TestDependencies {
override lazy val test = Seq(
"com.typesafe.play" %% "play-test" % PlayVersion.current % scope,
"commons-codec" % "commons-codec" % "1.7" % scope,
"org.scalatest" %% "scalatest" % "2.2.4" % scope,
"org.scalacheck" %% "scalacheck" % "1.12.2" % scope,
"org.pegdown" % "pegdown" % "1.4.2" % scope,
"com.github.tomakehurst" % "wiremock" % "1.52" % scope
)
}.test
}
def apply() = compile ++ Test()
}
|
liquidarmour/http-verbs
|
project/HmrcBuild.scala
|
Scala
|
apache-2.0
| 2,329 |
/*
* Copyright 2016 Michal Harish, [email protected]
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.amient.affinity.core.cluster
import java.util
import akka.actor.{ActorPath, ActorSystem}
import com.typesafe.config.Config
import io.amient.affinity.core.cluster.Coordinator.CoordinatorConf
import io.amient.affinity.core.cluster.CoordinatorZk.CoordinatorZkConf
import io.amient.affinity.core.config.CfgStruct
import io.amient.affinity.util.{ZkClients, ZkConf}
import org.I0Itec.zkclient.IZkChildListener
import org.apache.zookeeper.CreateMode
import scala.collection.JavaConverters._
object CoordinatorZk {
object CoordinatorZkConf extends CoordinatorZkConf {
override def apply(config: Config) = new CoordinatorZkConf()(config)
}
class CoordinatorZkConf extends CfgStruct[CoordinatorZkConf](classOf[CoordinatorConf]) {
val ZooKeeper = struct("zookeeper", new ZkConf, true)
val ZkRoot = string("zookeeper.root", "/affinity")
.doc("znode under which coordination data between affinity nodes will be registered")
}
}
class CoordinatorZk(system: ActorSystem, group: String, _conf: CoordinatorConf) extends Coordinator(system, group) {
val conf = CoordinatorZkConf(_conf)
val zkConf = conf.ZooKeeper()
val zkRoot = conf.ZkRoot()
val groupRoot = s"$zkRoot/${system.name}/$group/online"
val peersRoot = s"$zkRoot/${system.name}/$group/peers"
private val zk = ZkClients.get(zkConf)
if (!zk.exists(groupRoot)) zk.createPersistent(groupRoot, true)
updateChildren(zk.subscribeChildChanges(groupRoot, new IZkChildListener() {
override def handleChildChange(parentPath: String, children: util.List[String]): Unit = {
updateChildren(children)
}
}))
override def register(actorPath: ActorPath): String = {
zk.create(s"$groupRoot/", actorPath.toString(), CreateMode.EPHEMERAL_SEQUENTIAL)
}
override def unregister(handle: String) = zk.delete(handle)
override def close(): Unit = if (!closed.get) {
super.close()
ZkClients.close(zk);
}
private def listAsIndexedSeq(list: util.List[String]) = list.asScala.toIndexedSeq
private def updateChildren(children: util.List[String]): Unit = {
if (children != null) {
val newHandles = listAsIndexedSeq(children).map(id => s"$groupRoot/$id")
val newState = newHandles.map(handle => (handle, zk.readData[String](handle))).toMap
updateGroup(newState)
}
}
override def registerPeer(akkaAddress: String, knownZid: Option[String]): String = {
if (!zk.exists(peersRoot)) zk.createPersistent(peersRoot, true)
val nodes = zk.getChildren(peersRoot).asScala.map(i => (i, zk.readData[String](s"$peersRoot/$i")))
val zid: String = knownZid.flatMap { id =>
nodes.find(_._1 == id) match {
case Some((_, prevAkkaAddress)) if (prevAkkaAddress == akkaAddress) => Some(id)
case Some(_) => zk.writeData(s"$peersRoot/$id", akkaAddress); Some(id)
case None => None
}
} getOrElse {
nodes.find(_._2 == akkaAddress) match {
case Some((id, _)) => id
case None => zk.create(s"$peersRoot/", akkaAddress, CreateMode.PERSISTENT_SEQUENTIAL).substring(peersRoot.length+1)
}
}
def update(zids: util.List[String]) = updatePeers(zids.asScala.toList)
try zid finally update(zk.subscribeChildChanges(peersRoot, new IZkChildListener() {
override def handleChildChange(parentPath: String, zids: util.List[String]): Unit = update(zids)
}))
}
}
|
amient/affinity
|
core/src/main/scala/io/amient/affinity/core/cluster/CoordinatorZk.scala
|
Scala
|
apache-2.0
| 4,240 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Linked}
import uk.gov.hmrc.ct.computations.CP88
case class B172(value: Option[Int]) extends CtBoxIdentifier("Annual Investment Allowance") with CtOptionalInteger
object B172 extends Linked[CP88, B172] {
override def apply(source: CP88): B172 = B172(source.value)
}
|
ahudspith-equalexperts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600/v2/B172.scala
|
Scala
|
apache-2.0
| 971 |
package com.twitter.finagle.tracing
import scala.util.control.NonFatal
case class TraceId128(low: Option[SpanId], high: Option[SpanId])
object TraceId128 {
val empty: TraceId128 = TraceId128(None, None)
/**
* Extracts the high 64bits (if set and valid) and low 64bits (if valid) from a B3 TraceID's string representation.
*
* @param spanId A 64bit or 128bit Trace ID.
*/
def apply(spanId: String): TraceId128 = {
try {
val length = spanId.length
val lower64Bits = if (length <= 16) spanId else spanId.substring(length - 16)
val low =
Some(SpanId(java.lang.Long.parseUnsignedLong(lower64Bits, 16)))
val high =
if (length == 32)
Some(SpanId(java.lang.Long.parseUnsignedLong(spanId.substring(0, 16), 16)))
else None
TraceId128(low, high)
} catch {
case NonFatal(_) => empty
}
}
}
|
twitter/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/tracing/TraceId128.scala
|
Scala
|
apache-2.0
| 886 |
package io.buoyant.namer.consul
import com.twitter.conversions.DurationOps._
import com.twitter.finagle.http.{Request, Response, Status, Version}
import com.twitter.finagle.stats.NullStatsReceiver
import com.twitter.finagle.util.DefaultTimer
import com.twitter.finagle.{Addr, Address, Failure, IndividualRequestTimeoutException}
import com.twitter.util._
import io.buoyant.consul.v1._
import io.buoyant.namer.consul.SvcAddr.Stats
import io.buoyant.namer.{InstrumentedVar, Metadata}
import io.buoyant.test.Awaits
import java.net.InetSocketAddress
import java.util.concurrent.atomic.AtomicInteger
import org.scalatest.{FunSuite, Matchers}
class SvcAddrTest extends FunSuite with Matchers with Awaits {
implicit val timer: Timer = DefaultTimer
val hangForLongBackoff = Stream.continually(Duration.Top)
val emptyRequest = Request()
def service(host: String = "8.8.8.8", port: Int = 53): (ServiceNode, InetSocketAddress) =
(
ServiceNode(
Some("node"),
Some(host),
Some("servicename"),
Some("servicename"),
Some(Seq.empty),
Some(""),
Some(port),
Some(HealthStatus.Passing),
Some(Map("srv_meta" -> "some_srv_meta")),
Some(Map("nd_meta" -> "some_nd_meta"))
),
new InetSocketAddress(host, port)
)
def apiStub(stubFn: (String, Option[String], Option[String], Option[String], Option[ConsistencyMode], Boolean) => Future[IndexedServiceNodes]) =
new CatalogApi(null, "/v1") {
override def serviceNodes(
serviceName: String,
datacenter: Option[String],
tag: Option[String] = None,
blockingIndex: Option[String] = None,
consistency: Option[ConsistencyMode] = None,
retry: Boolean = false
): ApiCall[IndexedServiceNodes] = ApiCall(emptyRequest, _ => stubFn(serviceName, datacenter, tag, blockingIndex, consistency, retry))
}
/**
* Creates CatalogApi stub that serves responses in sequence.
* Returns CatalogApi and a Future that is satisfied after all responses are served.
*/
def scriptedApiStub(scriptedResponses: Future[IndexedServiceNodes]*): (CatalogApi, Future[Unit]) = {
val servedAllResponses = Promise[Unit]()
var remaining = scriptedResponses.toSeq
apiStub { (_, _, _, _, _, _) =>
remaining match {
case head +: tail =>
remaining = tail
head
case _ =>
servedAllResponses.setDone()
Future.never
}
} -> servedAllResponses
}
object singletonSet {
def unapply[T](arg: Set[T]): Option[T]= arg.headOption
}
test("should keep last known Addr.Bound value on generic error") {
// given
val (serviceNode, serviceAddr) = service()
val (api, servedAll) = scriptedApiStub(
Future.value(Indexed[Seq[ServiceNode]](Seq(serviceNode), Some("1"))),
Future.exception(new Throwable("whatever is thrown we catch"))
)
// when
val svcAddrVar: InstrumentedVar[Addr] = SvcAddr(
api,
Stream.continually(1.millis),
"dc1",
SvcKey("svc", None),
None,
None,
None,
Map.empty,
Stats(NullStatsReceiver),
new PollState
)
// observe Var so it is not dormant
svcAddrVar.underlying.changes.respond(_ => ())
// then
await(servedAll)
svcAddrVar.running shouldBe true
svcAddrVar.lastStartedAt shouldBe 'defined
svcAddrVar.lastStoppedAt should not be 'defined
svcAddrVar.lastUpdatedAt shouldBe 'defined
svcAddrVar.underlying.sample() should matchPattern {
case Addr.Bound(singletonSet(Address.Inet(addr, _)), _) if addr == serviceAddr => ()
}
}
test("should keep last known Addr.Bound value on the 'No path to datacenter' error") {
// given
val (serviceNode, serviceAddr) = service()
val rsp = Response(Version.Http11, Status.InternalServerError)
rsp.contentString = "No path to datacenter"
val (api, servedAll) = scriptedApiStub(
Future.value(Indexed[Seq[ServiceNode]](Seq(serviceNode), Some("1"))),
Future.exception(UnexpectedResponse(rsp))
)
// when
val svcAddrVar: InstrumentedVar[Addr] = SvcAddr(
api,
Stream.continually(1.millis),
"dc1",
SvcKey("svc", None),
None,
None,
None,
Map.empty,
Stats(NullStatsReceiver),
new PollState
)
// observe Var so it is not dormant
svcAddrVar.underlying.changes.respond(_ => ())
// then
await(servedAll)
svcAddrVar.running shouldBe true
svcAddrVar.lastStartedAt shouldBe 'defined
svcAddrVar.lastStoppedAt should not be 'defined
svcAddrVar.lastUpdatedAt shouldBe 'defined
svcAddrVar.underlying.sample() should matchPattern {
case Addr.Bound(singletonSet(Address.Inet(addr, _)), _) if addr == serviceAddr => ()
}
}
test("should return Addr.Neg on the 'No path to datacenter' error if no previous state exist") {
// given
val (serviceNode, serviceAddr) = service()
val rsp = Response(Version.Http11, Status.InternalServerError)
rsp.contentString = "No path to datacenter"
val (api, servedAll) = scriptedApiStub(
Future.exception(UnexpectedResponse(rsp))
)
// when
val svcAddrVar: InstrumentedVar[Addr] = SvcAddr(
api,
Stream.continually(1.millis),
"dc1",
SvcKey("svc", None),
None,
None,
None,
Map.empty,
Stats(NullStatsReceiver),
new PollState
)
// observe Var so it is not dormant
svcAddrVar.underlying.changes.respond(_ => ())
// then
await(servedAll)
svcAddrVar.running shouldBe true
svcAddrVar.lastStartedAt shouldBe 'defined
svcAddrVar.lastStoppedAt should not be 'defined
svcAddrVar.lastUpdatedAt shouldBe 'defined
svcAddrVar.underlying.sample() should matchPattern {
case Addr.Neg =>
}
}
test("should retry with backoff on errors") {
// given
val numOfRequests = new AtomicInteger()
val api = apiStub { (_, _, _, _, _, _) =>
numOfRequests.incrementAndGet()
Future.exception(new Throwable("whatever is thrown we retry"))
}
val numOfRetries = 5
val retried = new Promise[Unit] // satisfied when backoffs stream reaches hang
// When the head of a stream is destructured off, the next element is reified. This means that
// when the first Duration.Top backoff is used, the next element is evaluated and the retried
// promise is satisfied.
lazy val hang: Stream[Duration] = Duration.Top #:: {retried.setDone(); Duration.Top} #:: Stream.empty[Duration]
val backoffs: Stream[Duration] = Stream.fill(numOfRetries)(10.millis) #::: hang
// when
val addr: InstrumentedVar[Addr] = SvcAddr(api, backoffs, "dc1", SvcKey("svc", None), None, None, None, Map.empty, Stats(NullStatsReceiver), new PollState)
addr.underlying.changes.respond(_ => ())
// then
await(retried)
numOfRequests.intValue() should equal(numOfRetries+1)
}
test("should extract nested root cause correctly") {
// given
val cause = Failure("cause")
val failure = Failure("one", Failure("two", Failure("three", cause)))
// when
val extracted = SvcAddr.RootCause.unapply(failure)
// then
extracted should be(Some(cause))
}
test("should extract root cause if there are no nested causes") {
// given
val cause = Failure("cause")
// when
val extracted = SvcAddr.RootCause.unapply(cause)
// then
extracted should be(Some(cause))
}
test("should be Addr.Pending before call to consul returns") {
// given
val api = apiStub { (_, _, _, _, _, _) =>
Future.never
}
// when
val addr: InstrumentedVar[Addr] = SvcAddr(api, hangForLongBackoff, "dc1", SvcKey("svc", None), None, None, None, Map.empty, Stats(NullStatsReceiver), new PollState)
addr.underlying.changes.respond(_ => ())
// then
addr.running shouldBe true
addr.lastStartedAt shouldBe 'defined
addr.lastStoppedAt should not be 'defined
addr.lastUpdatedAt should not be 'defined
addr.underlying.sample() should matchPattern { case Addr.Pending => () }
}
test("should be Addr.Neg when unexpected error occurs initially") {
// given
val api = apiStub { (_, _, _, _, _, _) =>
Future.exception(new Throwable("No path to datacenter"))
}
// when
val addr: InstrumentedVar[Addr] = SvcAddr(api, hangForLongBackoff, "dc1", SvcKey("svc", None), None, None, None, Map.empty, Stats(NullStatsReceiver), new PollState)
addr.underlying.changes.respond(_ => ())
// then
addr.running shouldBe true
addr.lastStartedAt shouldBe 'defined
addr.lastStoppedAt should not be 'defined
addr.lastUpdatedAt shouldBe 'defined
addr.underlying.sample() should matchPattern { case Addr.Neg => () }
}
test("should use last known state on api timeout") {
// given
val requestCounter = new AtomicInteger()
val (initServiceUpdate, firstAddr) = service()
val (secondSvcUpdate, secondAddr) = service("9.9.9.9", 1024)
val ttl = 1.minute
@volatile var changes: Addr = Addr.Pending
val timer = new MockTimer
val api = apiStub { (_, _, _, _, _, _) =>
val count = requestCounter.incrementAndGet()
count match {
case 1 =>
val response = Indexed[Seq[ServiceNode]](Seq(initServiceUpdate), Some("1"))
Future.sleep(5.minutes)(timer).before(Future.value(response))
case 2 =>
val rspF = Future.sleep(11.minutes)(timer)
.before(Future.exception(new IndividualRequestTimeoutException(10.minutes)))
rspF
case 3 =>
Future.sleep(1.minute)(timer)
.before(Future.value(Indexed[Seq[ServiceNode]](Seq(secondSvcUpdate), Some("2"))))
}
}
// when
Time.withCurrentTimeFrozen { tc =>
val addr: InstrumentedVar[Addr] = SvcAddr(
api,
Stream.fill(10)(ttl),
"dc1",
SvcKey("svc", None),
None,
None,
None,
Map.empty,
Stats(NullStatsReceiver),
new PollState,
transferMetadata = true
)(timer)
addr.underlying.changes.respond {
changes = _
}
// then
tc.advance(5.minutes)
timer.tick()
changes match {
case Addr.Bound(addrs, _) => addrs.head shouldBe Address
.Inet(
firstAddr,
Addr
.Metadata(
Metadata.endpointWeight -> 1.0,
"nd_meta" -> "some_nd_meta"
)
)
case _ => fail("received unexpected Addr on initial service discovery")
}
tc.advance(12.minutes)
timer.tick()
changes match {
case Addr.Bound(addrs, _) => addrs.head shouldBe Address
.Inet(
firstAddr,
Addr
.Metadata(
Metadata.endpointWeight -> 1.0,
"nd_meta" -> "some_nd_meta"
)
)
case _ => fail("received unexpected Addr on timed out service discovery request")
}
// Advance timer to trigger Future.sleep(backoff) in SvcAddr
tc.advance(1.minutes)
timer.tick()
tc.advance(1.minutes)
timer.tick()
eventually {
changes match {
case Addr.Bound(addrs, _) => addrs.head shouldBe Address
.Inet(
secondAddr, Addr.Metadata(
Metadata.endpointWeight -> 1.0,
"nd_meta" -> "some_nd_meta"
)
)
case _ => fail("received unexpected Addr on timed out service discovery request")
}
}
}
}
}
|
BuoyantIO/linkerd
|
namer/consul/src/test/scala/io/buoyant/namer/consul/SvcAddrTest.scala
|
Scala
|
apache-2.0
| 11,658 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.io.File
import kafka.consumer.SimpleConsumer
import org.junit.Test
import junit.framework.Assert._
import kafka.message.ByteBufferMessageSet
import org.scalatest.junit.JUnit3Suite
import kafka.zk.ZooKeeperTestHarness
import kafka.producer._
import kafka.utils.IntEncoder
import kafka.utils.TestUtils._
import kafka.admin.CreateTopicCommand
import kafka.api.FetchRequestBuilder
import kafka.utils.{TestUtils, Utils}
class ServerShutdownTest extends JUnit3Suite with ZooKeeperTestHarness {
val port = TestUtils.choosePort
val props = TestUtils.createBrokerConfig(0, port)
val config = new KafkaConfig(props)
val host = "localhost"
val topic = "test"
val sent1 = List("hello", "there")
val sent2 = List("more", "messages")
@Test
def testCleanShutdown() {
var server = new KafkaServer(config)
server.startup()
val producerConfig = getProducerConfig(TestUtils.getBrokerListStrFromConfigs(Seq(config)))
producerConfig.put("key.serializer.class", classOf[IntEncoder].getName.toString)
var producer = new Producer[Int, String](new ProducerConfig(producerConfig))
// create topic
CreateTopicCommand.createTopic(zkClient, topic, 1, 1, "0")
TestUtils.waitUntilMetadataIsPropagated(Seq(server), topic, 0, 1000)
// send some messages
producer.send(sent1.map(m => new KeyedMessage[Int, String](topic, 0, m)):_*)
// do a clean shutdown and check that the clean shudown file is written out
server.shutdown()
for(logDir <- config.logDirs) {
val cleanShutDownFile = new File(logDir, server.logManager.CleanShutdownFile)
assertTrue(cleanShutDownFile.exists)
}
producer.close()
/* now restart the server and check that the written data is still readable and everything still works */
server = new KafkaServer(config)
server.startup()
// wait for the broker to receive the update metadata request after startup
TestUtils.waitUntilMetadataIsPropagated(Seq(server), topic, 0, 1000)
producer = new Producer[Int, String](new ProducerConfig(producerConfig))
val consumer = new SimpleConsumer(host, port, 1000000, 64*1024, "")
var fetchedMessage: ByteBufferMessageSet = null
while(fetchedMessage == null || fetchedMessage.validBytes == 0) {
val fetched = consumer.fetch(new FetchRequestBuilder().addFetch(topic, 0, 0, 10000).maxWait(0).build())
fetchedMessage = fetched.messageSet(topic, 0)
}
assertEquals(sent1, fetchedMessage.map(m => Utils.readString(m.message.payload)))
val newOffset = fetchedMessage.last.nextOffset
// send some more messages
producer.send(sent2.map(m => new KeyedMessage[Int, String](topic, 0, m)):_*)
fetchedMessage = null
while(fetchedMessage == null || fetchedMessage.validBytes == 0) {
val fetched = consumer.fetch(new FetchRequestBuilder().addFetch(topic, 0, newOffset, 10000).build())
fetchedMessage = fetched.messageSet(topic, 0)
}
assertEquals(sent2, fetchedMessage.map(m => Utils.readString(m.message.payload)))
consumer.close()
producer.close()
server.shutdown()
Utils.rm(server.config.logDirs)
}
}
|
kavink92/kafka-0.8.0-beta1-src
|
core/src/test/scala/unit/kafka/server/ServerShutdownTest.scala
|
Scala
|
apache-2.0
| 3,963 |
package org.scalatest.tools.scalasbt
import org.scalatest.FunSuite
import org.scalatest.tags.Disk
@Disk
class DiskTaggedSuite extends FunSuite {
test("test 1") {
}
test("test 2") {
}
test("test 3") {
}
}
|
svn2github/scalatest
|
src/test/scala/org/scalatest/tools/scalasbt/DiskTaggedSuite.scala
|
Scala
|
apache-2.0
| 239 |
package ulang
import ulang.expr.Expr
import ulang.expr.Var
import ulang.expr.Pat
import ulang.expr.unify
package object prove {
type Env = Map[Var, Expr]
type Ind = List[(Pat, List[Goal])]
object Env {
val empty: Env = Map()
def apply(dfs: List[(Var, Expr)]): Env = {
Map(dfs: _*)
}
def free(env: Env) = {
}
}
object Ind {
import derive.assert
def apply(inds: List[ulang.shell.Ind]): Ind = {
val pairs = inds.collect {
case ulang.shell.Ind(cases) if !cases.isEmpty =>
val constrs = cases map Goal.normalized
val pats = constrs.map(_.suc.toPat)
val pat = pats reduce unify.merge
(pat, constrs)
}
pairs
}
}
}
|
gernst/ulang-proto
|
src/main/scala/ulang/prove/package.scala
|
Scala
|
mit
| 740 |
package mountainrangepvp.engine.input
import com.badlogic.gdx.math.Vector2
import mountainrangepvp.engine.input.Bindings._
/**
* Maps from raw key and mouse events to actions.
*/
class InputMapper {
private var emptyStates: Map[String, Boolean] = Map.empty
private var mouseButtons: Map[MouseButton, String] = Map.empty
private var keys: Map[Int, String] = Map.empty
def addState(name: String) = {
emptyStates += name -> false
}
def bindMouseButton(button: MouseButton, action: String) = {
mouseButtons += (button -> action)
}
def bindKeyboard(key: Int, action: String) = {
keys += (key -> action)
}
def map(state: InputState) = {
var mapped = emptyStates
List(MouseLeft, MouseMiddle, MouseRight).foreach { b =>
if (state.buttonDown(b)) {
mouseButtons.get(b).foreach(state => mapped += state -> true)
}
}
state.keys.flatMap(keys.get).foreach(mapped += _ -> true)
mapped
}
}
/**
* The current state of the input system.
*/
case class InputState(mouse: Vector2, mouseButtons: Int, keys: Set[Int]) {
def buttonDown(b: Bindings.MouseButton) = b match {
case MouseLeft => (mouseButtons & 1) != 0
case MouseMiddle => (mouseButtons & 2) != 0
case MouseRight => (mouseButtons & 4) != 0
}
}
object Bindings {
sealed trait MouseButton
case object MouseLeft extends MouseButton
case object MouseMiddle extends MouseButton
case object MouseRight extends MouseButton
}
|
thorinii/MountainRangePvP
|
src/main/scala/mountainrangepvp/engine/input/InputMapper.scala
|
Scala
|
mit
| 1,525 |
package 练习05
trait 被乘数 {
type 被乘[T <: 乘数] <: 自然数
}
class 被乘数Positive[Tail <: 被乘数, Head] extends 被乘数 {
override type 被乘[T <: 乘数] = T#乘[Tail, Head]
}
class 被乘数Zero extends 被乘数 {
override type 被乘[T <: 乘数] = 自然数Zero
}
trait 乘数 {
type 乘[T <: 被乘数, H] <: 自然数
}
class 乘数Positive[Tail <: 乘数, Head] extends 乘数 {
override type 乘[T <: 被乘数, H] = 自然数Positive[Tail#乘[T, H], Head, H]
}
class 乘数Zero[Tail <: 乘数] extends 乘数 {
override type 乘[T <: 被乘数, H] = T#被乘[Tail]
}
trait 自然数
class 自然数Positive[Tail <: 自然数, Head1, Head2] extends 自然数
class 自然数Zero extends 自然数
|
djx314/ubw
|
a28-练习/src/main/scala/练习05/乘法.scala
|
Scala
|
bsd-3-clause
| 775 |
// Copyright 2017 Foursquare Labs Inc. All Rights Reserved.
package io.fsq.common.testing.matchers.test
import io.fsq.common.testing.AssertException
import io.fsq.common.testing.matchers.{FoursquareMatchers => FM}
import org.junit.{Assert => A, Test}
import scala.util.matching.Regex
class FoursquareMatchersTest {
def anyAssert: PartialFunction[Throwable, Boolean] = { case e: AssertionError => true }
def assertWithMessage(r: Regex): PartialFunction[Throwable, Boolean] = {
case e: AssertionError => r.findFirstMatchIn(e.getMessage.toLowerCase).isDefined
}
@Test
def testIsNoneMatcher(): Unit = {
A.assertThat((None: Option[Int]), FM.isNone[Int])
AssertException(anyAssert)(
A.assertThat((Some(2): Option[Int]), FM.isNone[Int])
)
}
@Test
def testExistsMatcher(): Unit = {
A.assertThat(Some(5), FM.exists[Int](_ > 2, "_ > 2"))
AssertException(anyAssert) {
A.assertThat(Some(1), FM.exists[Int](_ > 2, "_ > 2"))
}
AssertException(assertWithMessage("expected: some\\\\[int\\\\] matching predicate: _ > 2".r)) {
A.assertThat(None: Option[Int], FM.exists[Int](_ > 2, "_ > 2"))
}
}
@Test
def testEqualsCollectionMatcher(): Unit = {
A.assertThat(Vector(1, 2), FM.equalsCollection(Vector(1, 2)))
AssertException(assertWithMessage("lengths differed".r))(
A.assertThat(Vector(1), FM.equalsCollection(Vector(1, 2)))
)
AssertException(assertWithMessage("index 0.*expected=2.*actual=1".r))(
A.assertThat(Vector(1), FM.equalsCollection(Vector(2)))
)
}
}
|
foursquare/fsqio
|
test/jvm/io/fsq/common/testing/matchers/test/FoursquareMatchersTest.scala
|
Scala
|
apache-2.0
| 1,553 |
/**
* Objeto para probar la funcionalidad requerida
*/
object Prueba extends App{
import Huffman._
/**
* Codigo Huffman para el lenguaje frances, obtenido a partir de la pagina
* web http://fr.wikipedia.org/wiki/Fr%C3%A9quence_d%27apparition_des_lettres_en_fran%C3%A7ais
*/
val codigoHuffmanFrances: Nodo =
NodoIntermedio(
NodoIntermedio(
NodoIntermedio(
NodoHoja('s', 121895),
NodoIntermedio(
NodoHoja('d', 56269),
NodoIntermedio(
NodoIntermedio(
NodoIntermedio(
NodoHoja('x', 5928),
NodoHoja('j', 8351),
List('x', 'j'), 14279
),
NodoHoja('f', 16351),
List('x', 'j', 'f'), 30630
),
NodoIntermedio(
NodoIntermedio(
NodoIntermedio(
NodoIntermedio(
NodoHoja('z', 2093),
NodoIntermedio(
NodoHoja('k', 745),
NodoHoja('w', 1747),
List('k', 'w'), 2492
),
List('z', 'k', 'w'), 4585
),
NodoHoja('y', 4725),
List('z', 'k', 'w', 'y'), 9310
), NodoHoja('h', 11298),
List('z', 'k', 'w', 'y', 'h'), 20608
),
NodoHoja('q', 20889),
List('z', 'k', 'w', 'y', 'h', 'q'), 41497
), List('x', 'j', 'f', 'z', 'k', 'w', 'y', 'h', 'q'), 72127
), List('d', 'x', 'j', 'f', 'z', 'k', 'w', 'y', 'h', 'q'), 128396
), List('s', 'd', 'x', 'j', 'f', 'z', 'k', 'w', 'y', 'h', 'q'), 250291
),
NodoIntermedio(
NodoIntermedio(
NodoHoja('o', 82762),
NodoHoja('l', 83668),
List('o', 'l'), 166430
),
NodoIntermedio(
NodoIntermedio(
NodoHoja('m', 45521),
NodoHoja('p', 46335),
List('m', 'p'), 91856
),
NodoHoja('u', 96785),
List('m', 'p', 'u'), 188641
),
List('o', 'l', 'm', 'p', 'u'), 355071
),
List('s', 'd', 'x', 'j', 'f', 'z', 'k', 'w', 'y', 'h', 'q', 'o', 'l', 'm', 'p', 'u'), 605362
),
NodoIntermedio(
NodoIntermedio(
NodoIntermedio(
NodoHoja('r', 100500),
NodoIntermedio(
NodoHoja('c', 50003),
NodoIntermedio(
NodoHoja('v', 24975),
NodoIntermedio(
NodoHoja('g', 13288),
NodoHoja('b', 13822),
List('g', 'b'), 27110
),
List('v', 'g', 'b'), 52085
),
List('c', 'v', 'g', 'b'), 102088
),
List('r', 'c', 'v', 'g', 'b'), 202588
),
NodoIntermedio(
NodoHoja('n', 108812),
NodoHoja('t', 111103),
List('n', 't'), 219915
),
List('r', 'c', 'v', 'g', 'b', 'n', 't'), 422503
),
NodoIntermedio(
NodoHoja('e', 225947),
NodoIntermedio(
NodoHoja('i', 115465),
NodoHoja('a', 117110),
List('i', 'a'), 232575
),
List('e', 'i', 'a'), 458522
),
List('r', 'c', 'v', 'g', 'b', 'n', 't', 'e', 'i', 'a'), 881025
),
List('s', 'd', 'x', 'j', 'f', 'z', 'k', 'w', 'y', 'h', 'q', 'o', 'l', 'm', 'p', 'u', 'r', 'c', 'v', 'g', 'b', 'n', 't', 'e', 'i', 'a'), 1486387
)
print(codigoHuffmanFrances)
/**
* Mensaje secreto a decodificar
*/
val mensajeSecreto: List[Int] = List(0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1)
println(mensajeSecreto)
/**
* Se decodifica el mensaje
*/
val mensajeDecodificado: List[Char] = decodificar(codigoHuffmanFrances, mensajeSecreto)
println(mensajeDecodificado)
// Se intenta lo mismo con la tabla
val codificacionTabla = codificacionRapida(codigoHuffmanFrances)(mensajeDecodificado)
// Debe ser igual a mensajeSecreto
println(mensajeSecreto == codificacionTabla)
}
|
romanarranz/NTP
|
P5/src/Prueba.scala
|
Scala
|
mit
| 4,414 |
/****************************************************************************
* Copyright (C) 2015 Łukasz Szpakowski. *
* *
* This software is licensed under the GNU General Public License *
* v3 or later. See the LICENSE file for the full licensing terms. *
****************************************************************************/
package pl.luckboy.issuenotifier
import java.net.URI
import java.text.SimpleDateFormat
import java.util.Date
import java.util.TimeZone
import org.json.JSONArray
import org.json.JSONObject
import com.github.rjeschke.txtmark
import org.jsoup.Jsoup
import org.jsoup.safety.Whitelist
import HttpUtils._
class GitHubDataFetcher(val apiURI: String) extends DataFetcher
{
override val defaultPerPage = 10
private def stringFromRequestIssueState(state: RequestIssueState) =
state match {
case IssueState(State.Open) => "open"
case IssueState(State.Closed) => "closed"
case All => "all"
}
private def stringFromIssueSorting(sorting: IssueSorting.Value) =
sorting match {
case IssueSorting.Created => "created"
case IssueSorting.Updated => "updated"
case IssueSorting.Comments => "comments"
}
private def stringFromDirection(dir: Direction.Value) =
dir match {
case Direction.Asc => "asc"
case Direction.Desc => "desc"
}
private val simpleDateFormat = {
val sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'")
sdf.setTimeZone(TimeZone.getTimeZone("UTC"))
sdf
}
private def stringFromSince(since: Date) = simpleDateFormat.format(since)
private def stateFromString(s: String) =
s match {
case "open" => State.Open
case "closed" => State.Closed
}
private val apiRequestHeaders = Map("Accept" -> "application/vnd.github.v3+json")
private val whitelist = Whitelist.basic()
override def fetchIssue(repos: Repository, issueInfo: IssueInfo, timeout: Option[Int]): Either[Exception, Issue] = {
val uri = apiURI + "/repos/" + encode(repos.userName) + "/" + encode(repos.name) + "/issues/" ++ encode(issueInfo.number.toString)
getJSONObject(new URI(uri), timeout, apiRequestHeaders) match {
case Left(e) => Left(e)
case Right(jsonObject) => issueFromJSONObject(jsonObject)
}
}
private def issueFromJSONObject(jsonObject: JSONObject) =
try {
issueInfoFromJSONObject(jsonObject : JSONObject) match {
case Left(e) => Left(e)
case Right(issueInfo) =>
val bodyHtml = Jsoup.clean(txtmark.Processor.process(jsonObject.getString("body")), whitelist)
Right(Issue(issueInfo, bodyHtml))
}
} catch {
case e: Exception => Left(e)
}
private def userFromJSONObject(jsonObject: JSONObject) =
try {
val id = jsonObject.getLong("id").toString
val name = jsonObject.getString("login")
val avatarURI = jsonObject.getString("avatar_url")
Right(User(id, name, avatarURI))
} catch {
case e: Exception => Left(e)
}
override def fetchIssueInfos(repos: Repository, state: Option[RequestIssueState], sorting: Option[IssueSorting.Value], dir: Option[Direction.Value], since: Option[Date], page: Option[Long], perPage: Option[Long], timeout: Option[Int]): Either[Exception, Vector[IssueInfo]] = {
val paramMap = Map("per_page" -> perPage.getOrElse(defaultPerPage).toString) ++
state.map { s => ("state" -> stringFromRequestIssueState(s)) } ++
sorting.map { s => ("sort" -> stringFromIssueSorting(s)) } ++
dir.map { d => ("direction" -> stringFromDirection(d)) } ++
since.map { s => ("since" -> stringFromSince(s)) } ++
page.map { p => ("page" -> p.toString) }
val paramMapStr = stringFromParams(paramMap)
val uri = apiURI + "/repos/" + encode(repos.userName) + "/" + encode(repos.name) + "/issues" + (if(paramMapStr != "") "?" + paramMapStr else "")
getJSONArray(new URI(uri), timeout, apiRequestHeaders) match {
case Left(e) => Left(e)
case Right(jsonArray) =>
(0 until jsonArray.length()).foldLeft(Right(Vector()): Either[Exception, Vector[IssueInfo]]) {
case (Left(e), _) => Left(e)
case (Right(issueInfos), i) =>
issueInfoFromJSONObject(jsonArray.getJSONObject(i)) match {
case Left(e) => Left(e)
case Right(issueInfo) => Right(issueInfos :+ issueInfo)
}
}
}
}
private def issueInfoFromJSONObject(jsonObject: JSONObject) =
try {
val id = jsonObject.getLong("id").toString
val number = jsonObject.getLong("number")
val state = stateFromString(jsonObject.getString("state"))
val title = jsonObject.getString("title")
val commentCount = jsonObject.getLong("comments")
val closedAtStr = if(jsonObject.has("closed_at") && !jsonObject.isNull("closed_at"))
jsonObject.getString("closed_at")
else
""
val createdAtStr = jsonObject.getString("created_at")
val updatedAtStr = jsonObject.getString("updated_at")
val closedAt = if(closedAtStr != "") Some(simpleDateFormat.parse(closedAtStr)) else None
val createdAt = simpleDateFormat.parse(createdAtStr)
val updatedAt = simpleDateFormat.parse(updatedAtStr)
userFromJSONObject(jsonObject.getJSONObject("user")) match {
case Left(e) => Left(e)
case Right(user) => Right(IssueInfo(id, number, state, title, commentCount, user, closedAt, createdAt, updatedAt))
}
} catch {
case e: Exception => Left(e)
}
private def stringFromCommentSorting(sorting: CommentSorting.Value) =
sorting match {
case CommentSorting.Created => "created"
case CommentSorting.Updated => "updated"
}
override def fetchComments(repos: Repository, issueInfo: IssueInfo, sorting: Option[CommentSorting.Value], dir: Option[Direction.Value], since: Option[Date], page: Option[Long], perPage: Option[Long], timeout: Option[Int]): Either[Exception, Vector[Comment]] = {
val paramMap = Map("per_page" -> perPage.getOrElse(defaultPerPage).toString) ++
sorting.map { s => ("sort" -> stringFromCommentSorting(s)) } ++
dir.map { d => ("direction" -> stringFromDirection(d)) } ++
since.map { s => ("since" -> stringFromSince(s)) } ++
page.map { p => ("page" -> p.toString) }
val paramMapStr = stringFromParams(paramMap)
val uri = apiURI + "/repos/" + encode(repos.userName) + "/" + encode(repos.name) + "/issues/" + encode(issueInfo.number.toString) + "/comments" + (if(paramMapStr != "") "?" + paramMapStr else "")
getJSONArray(new URI(uri), timeout, apiRequestHeaders) match {
case Left(e) => Left(e)
case Right(jsonArray) =>
(0 until jsonArray.length()).foldLeft(Right(Vector()): Either[Exception, Vector[Comment]]) {
case (Left(e), _) => Left(e)
case (Right(comments), i) =>
commentFromJSONObject(jsonArray.getJSONObject(i)) match {
case Left(e) => Left(e)
case Right(comment) => Right(comments :+ comment)
}
}
}
}
private def commentFromJSONObject(jsonObject: JSONObject) =
try {
val id = jsonObject.getLong("id").toString
val bodyHtml = Jsoup.clean(txtmark.Processor.process(jsonObject.getString("body")), whitelist)
val createdAtStr = jsonObject.getString("created_at")
val updatedAtStr = jsonObject.getString("updated_at")
val createdAt = simpleDateFormat.parse(createdAtStr)
val updatedAt = simpleDateFormat.parse(updatedAtStr)
userFromJSONObject(jsonObject.getJSONObject("user")) match {
case Left(e) => Left(e)
case Right(user) => Right(Comment(id, bodyHtml, user, createdAt, updatedAt))
}
} catch {
case e: Exception => Left(e)
}
}
|
luckboy/IssueNotifier
|
src/main/scala/pl/luckboy/issuenotifier/GitHubDataFetcher.scala
|
Scala
|
gpl-3.0
| 8,031 |
package com.treode.server
import scala.util.parsing.input.OffsetPosition
import com.treode.store.TableId
import org.scalatest.FreeSpec
import SchemaParser._
class SchemaParserSpec extends FreeSpec {
def assertSuccess (s: String, expected: Schema): Unit =
assertResult (CompilerSuccess (expected)) (parse (s))
def assertFailure (s: String, expected: List [Message]): Unit =
assertResult (CompilerFailure (expected)) (parse (s))
"When inputs are correct" - {
"The parse should parse table names successfully" in {
var map = Map.empty [String, TableId]
map += "table_1v" -> 1
assertSuccess ("table table_1v { id : \\t 1 \\n; };", Schema (map))
}
"The parse should parse octal, hexadecimal and decimal id successfully" in {
var map = Map.empty [String, TableId]
map += "table1" -> 1
map += "table2" -> 31
map += "table3" -> 25
assertSuccess ("table table1 { id : \\t 1 \\n ;} ; table table2 {id : 0x1F;} ;\\n table table3 {id : 031;} ;\\t ", Schema (map))
}}
"When inputs are incorrect" - {
"The parser should report one error" in {
val input = "table top-1\\n{ id : 9 ; };"
assertFailure (input, List (ParseError ("Bad definition of clause, expected\\ntable <tablename> {\\n Field1;Field2;..\\n};\\nstarting", OffsetPosition (input,0))))
}
"The parser should report two errors" in {
val input = "tabe top2\\n{\\n id: 10 }; table top3 { id: 10 ; ;"
assertFailure (input, List (ParseError ("Bad definition of clause, expected\\ntable <tablename> {\\n Field1;Field2;..\\n};\\nstarting", OffsetPosition (input,0)), ParseError ("Bad definition of clause, expected\\ntable <tablename> {\\n Field1;Field2;..\\n};\\nstarting", OffsetPosition (input,24)) ))
}
"The parser should report three errors" in {
val input = "table top\\n{\\n i:9;\\n; \\ntable top2\\n{\\n id:g;\\n};\\ntable top3 {\\n id \\t 12 ;\\n}\\n;"
assertFailure (input, List (ParseError ("Bad definition of clause, expected\\ntable <tablename> {\\n Field1;Field2;..\\n};\\nstarting", OffsetPosition (input,0)), ParseError ("Expected long", OffsetPosition (input,40)), ParseError ("Id field improper, expected\\nid : <long> ;\\nbut found", OffsetPosition (input,61)) ))
}}}
|
Treode/store
|
server/test/com/treode/server/SchemaParserSpec.scala
|
Scala
|
apache-2.0
| 2,252 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.values
import com.spotify.scio.testing.PipelineSpec
class SCollectionWithSideOutputTest extends PipelineSpec {
"SCollectionWithSideOutput" should "support map()" in {
runWithContext { sc =>
val p1 = sc.parallelize(Seq("a", "b", "c"))
val p2 = SideOutput[String]()
val (main, side) = p1.withSideOutputs(p2).map { (x, s) => s.output(p2, x + "2"); x + "1" }
main should containInAnyOrder(Seq("a1", "b1", "c1"))
side(p2) should containInAnyOrder(Seq("a2", "b2", "c2"))
}
}
it should "support flatMap()" in {
runWithContext { sc =>
val p1 = sc.parallelize(Seq("a", "b", "c"))
val p2 = SideOutput[String]()
val (main, side) = p1.withSideOutputs(p2).flatMap { (x, s) =>
s.output(p2, x + "2x").output(p2, x + "2y")
Seq(x + "1x", x + "1y")
}
main should containInAnyOrder(Seq("a1x", "a1y", "b1x", "b1y", "c1x", "c1y"))
side(p2) should containInAnyOrder(Seq("a2x", "a2y", "b2x", "b2y", "c2x", "c2y"))
}
}
it should "not break when a side output is not applied (#1587)" in runWithContext { sc =>
val nSideOut = SideOutput[Int]()
val expected = List(2, 4, 6, 8, 10)
val elements = sc.parallelize(1 to 10)
val (even, _) =
elements
.withSideOutputs(nSideOut)
.flatMap { case (n, ctx) =>
if (n % 2 == 0)
Some(n)
else {
ctx.output(nSideOut, n)
None
}
}
even should containInAnyOrder(expected)
}
}
|
spotify/scio
|
scio-test/src/test/scala/com/spotify/scio/values/SCollectionWithSideOutputTest.scala
|
Scala
|
apache-2.0
| 2,137 |
package com.github.tminglei.slickpg
import java.sql.{Timestamp, Time, Date}
import java.util.Calendar
import java.text.SimpleDateFormat
import org.scalatest.FunSuite
import scala.concurrent.Await
import scala.concurrent.duration._
class PgDateSupportSuite extends FunSuite {
import MyPostgresDriver.api._
val db = Database.forURL(url = dbUrl, driver = "org.postgresql.Driver")
val dateFormat = new SimpleDateFormat("yyyy-MM-dd")
val timeFormat = new SimpleDateFormat("HH:mm:ss")
val tsFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
val tsFormat1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS")
def date(str: String) = new Date(dateFormat.parse(str).getTime)
def time(str: String) = new Time(timeFormat.parse(str).getTime)
def ts(str: String) = new Timestamp( (if (str.contains(".")) tsFormat1 else tsFormat) .parse(str).getTime )
def tstz(str: String) = PgDateSupportUtils.parseCalendar(str)
case class DatetimeBean(
id: Long,
date: Date,
time: Time,
timestamp: Timestamp,
timestamptz: Calendar,
interval: Interval
)
class DatetimeTable(tag: Tag) extends Table[DatetimeBean](tag, "DatetimeTest") {
def id = column[Long]("id", O.AutoInc, O.PrimaryKey)
def date = column[Date]("date")
def time = column[Time]("time", O.Default(PgDateSupportSuite.this.time("00:00:00")))
def timestamp = column[Timestamp]("timestamp")
def timestamptz = column[Calendar]("timestamptz")
def interval = column[Interval]("interval")
def * = (id, date, time, timestamp, timestamptz, interval) <> (DatetimeBean.tupled, DatetimeBean.unapply)
}
val Datetimes = TableQuery[DatetimeTable]
//------------------------------------------------------------------------------
val testRec1 = new DatetimeBean(101L, date("2010-11-3"), time("12:33:01"),
ts("2001-1-3 13:21:00.103"), tstz("2001-01-03 13:21:00+08:00"), Interval("1 days 1 hours"))
val testRec2 = new DatetimeBean(102L, date("2011-3-2"), time("3:14:7"),
ts("2012-5-8 11:31:06"), tstz("2012-05-08 11:31:06-05:00"), Interval("1 years 36 mons 127 days"))
val testRec3 = new DatetimeBean(103L, date("2000-5-19"), time("11:13:34"),
ts("2019-11-3 13:19:03"), tstz("2019-11-03 13:19:03+03:00"), Interval("63 hours 16 mins 2 secs"))
test("Date Lifted support") {
Await.result(db.run(
DBIO.seq(
sqlu"SET TIMEZONE TO '+8';",
Datetimes.schema create,
///
Datetimes forceInsertAll List(testRec1, testRec2, testRec3)
).andThen(
DBIO.seq(
Datetimes.to[List].result.map(
r => List(testRec1, testRec2, testRec3).zip(r).map {
case (b1, b2) => {
assert(b1.date === b2.date)
assert(b1.time === b2.time)
assert(b1.timestamp === b2.timestamp)
}
}
),
// +
Datetimes.filter(_.id === 101L.bind).map(r => r.date + r.time).result.head.map(
r => assert(ts("2010-11-3 12:33:01.000") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.time + r.date).result.head.map(
r => assert(ts("2010-11-3 12:33:01.000") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.date +++ r.interval).result.head.map(
r => assert(ts("2010-11-4 01:00:00.000") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.time +++ r.interval).result.head.map(
r => assert(time("13:33:01") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.timestamp +++ r.interval).result.head.map(
r => assert(ts("2001-1-4 14:21:00.103") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.date ++ 7.bind).result.head.map(
r => assert(date("2010-11-10") === r)
),
// -
Datetimes.filter(_.id === 101L.bind).map(r => r.date -- 1.bind).result.head.map(
r => assert(date("2010-11-2") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.timestamp -- r.time).result.head.map(
r => assert(ts("2001-1-3 00:47:59.103") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.timestamp - r.date).result.head.map(
r => assert(Interval("-3590 days -10 hours -38 mins -59.897 secs") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.date.asColumnOf[Timestamp] - r.timestamp).result.head.map(
r => assert(Interval("3590 days 10 hours 38 mins 59.897 secs") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.date - date("2009-7-5")).result.head.map(
r => assert(486 === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.time - time("2:37:00").bind).result.head.map(
r => assert(Interval("9 hours 56 mins 1.00 secs") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.timestamp --- r.interval).result.head.map(
r => assert(ts("2001-1-2 12:21:00.103") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.time --- r.interval).result.head.map(
r => assert(time("11:33:01") === r)
),
Datetimes.filter(_.id === 101L.bind).map(r => r.date --- r.interval).result.head.map(
r => assert(ts("2010-11-1 23:00:00.0") === r)
),
// age
Datetimes.filter(_.id === 101L.bind).map(r => r.timestamp.age === r.timestamp.age(Functions.currentDate.asColumnOf[Timestamp])).result.head.map(
r => assert(true === r)
),
// part
Datetimes.filter(_.id === 101L.bind).map(r => r.timestamp.part("year")).result.head.map(
r => assert(Math.abs(2001 - r) < 0.00001d)
),
Datetimes.filter(_.id === 102L.bind).map(r => r.interval.part("year")).result.head.map(
r => assert(Math.abs(4 - r) < 0.00001d)
),
// trunc
Datetimes.filter(_.id === 101L.bind).map(r => r.timestamp.trunc("day")).result.head.map(
r => assert(ts("2001-1-3 00:00:00.0") === r)
),
// interval
DBIO.seq(
// +
Datetimes.filter(_.id === 101L.bind).map(r => r.interval + Interval("3 hours").bind).result.head.map(
r => assert(Interval("1 days 4 hours") === r)
),
// -x
Datetimes.filter(_.id === 101L.bind).map(r => -r.interval).result.head.map(
r => assert(Interval("-1 days -1 hours") === r)
),
// -
Datetimes.filter(_.id === 101L.bind).map(r => r.interval - Interval("2 hours").bind).result.head.map(
r => assert(Interval("1 days -1 hours") === r)
),
// *
Datetimes.filter(_.id === 101L.bind).map(r => r.interval * 3.5).result.head.map(
r => assert(Interval("3 days 15 hours 30 mins") === r)
),
// /
Datetimes.filter(_.id === 101L.bind).map(r => r.interval / 5.0).result.head.map(
r => assert(Interval("5 hours") === r)
),
// justifyDays
Datetimes.filter(_.id === 102L.bind).map(r => r.interval.justifyDays).result.head.map(
r => assert(Interval("4 years 4 mons 7 days") === r)
),
// justifyHours
Datetimes.filter(_.id === 103L.bind).map(r => r.interval.justifyHours).result.head.map(
r => assert(Interval("2 days 15 hours 16 mins 2 secs") === r)
),
// justifyInterval
Datetimes.filter(_.id === 103L.bind).map(r => r.interval.justifyInterval).result.head.map(
r => assert(Interval("2 days 15 hours 16 mins 2 secs") === r)
)
),
// timestamp with time zone
DBIO.seq(
// age
Datetimes.filter(_.id === 101L.bind).map(r => r.timestamptz.age === r.timestamptz.age(Functions.currentDate.asColumnOf[Calendar])).result.head.map(
r => assert(true === r)
),
// part
Datetimes.filter(_.id === 101L.bind).map(r => r.timestamptz.part("year")).result.head.map(
r => assert(Math.abs(2001 - r) < 0.00001d)
),
// trunc
Datetimes.filter(_.id === 101L.bind).map(r => r.timestamptz.trunc("day")).result.head.map(
r => assert(tstz("2001-1-3 00:00:00+8:00") === r)
)
)
)
).andFinally(
Datetimes.schema drop
).transactionally
), Duration.Inf)
}
}
|
timcharper/slick-pg
|
src/test/scala/com/github/tminglei/slickpg/PgDateSupportSuite.scala
|
Scala
|
bsd-2-clause
| 8,664 |
/**
* Copyright (C) 2009-2011 the original author or authors.
* See the notice.md file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.scalate.ssp
import org.fusesource.scalate.InvalidSyntaxException
import util.parsing.input.{ Positional, CharSequenceReader }
import org.fusesource.scalate.support.{ Text, ScalaParseSupport }
sealed abstract class PageFragment extends Positional {
def tokenName = toString
}
case class CommentFragment(comment: Text) extends PageFragment
case class DollarExpressionFragment(code: Text) extends PageFragment
case class ExpressionFragment(code: Text) extends PageFragment
case class ScriptletFragment(code: Text) extends PageFragment
case class TextFragment(text: Text) extends PageFragment
case class AttributeFragment(
kind: Text,
name: Text,
className: Text,
defaultValue: Option[Text],
autoImport: Boolean
) extends PageFragment
abstract class Directive(override val tokenName: String) extends PageFragment
case class IfFragment(code: Text) extends Directive("#if")
case class ElseIfFragment(code: Text) extends Directive("#elseif")
case class ElseFragment() extends Directive("#else")
case class MatchFragment(code: Text) extends Directive("#match")
case class CaseFragment(code: Text) extends Directive("#case")
case class OtherwiseFragment() extends Directive("#otherwise")
case class ForFragment(code: Text) extends Directive("#for")
case class SetFragment(code: Text) extends Directive("#set")
case class DoFragment(code: Text) extends Directive("#do")
case class ImportFragment(code: Text) extends Directive("#import")
case class EndFragment() extends Directive("#end")
/**
* Parser for the SSP template language
*/
class SspParser extends ScalaParseSupport {
var skipWhitespaceOn = false
override def skipWhitespace = skipWhitespaceOn
def skip_whitespace[T](p: => Parser[T]): Parser[T] = Parser[T] {
in =>
skipWhitespaceOn = true
val result = p(in)
skipWhitespaceOn = false
result
}
val anySpace = text("""[ \\t]*""".r)
val identifier = text("""[a-zA-Z0-9\\$_]+""".r)
val typeName = text(scalaType)
val someText = text(""".+""".r)
val attribute = skip_whitespace(opt(text("import")) ~ text("var" | "val") ~ identifier ~ (":" ~> typeName)) ~ ("""\\s*""".r ~> opt("""=\\s*""".r ~> upto("""\\s*%>""".r))) ^^ {
case (p_import ~ p_kind ~ p_name ~ p_type) ~ p_default => AttributeFragment(p_kind, p_name, p_type, p_default, p_import.isDefined)
}
val literalPart: Parser[Text] =
upto("<%" | """\\<%""" | """\\\\<%""" | "${" | """\\${""" | """\\\\${""" | """\\#""" | """\\\\#""" | directives) ~
opt(
"""\\<%""" ~ opt(literalPart) ^^ { case x ~ y => "<%" + y.getOrElse("") } |
"""\\${""" ~ opt(literalPart) ^^ { case x ~ y => "${" + y.getOrElse("") } |
"""\\#""" ~ opt(literalPart) ^^ { case x ~ y => "#" + y.getOrElse("") } |
"""\\\\""" ^^ { s => """\\""" }
) ^^ {
case x ~ Some(y) => x + y
case x ~ None => x
}
val tagEnding = "+%>" | """%>[ \\t]*\\r?\\n""".r | "%>"
val commentFragment = wrapped("<%--", "--%>") ^^ { CommentFragment(_) }
val altCommentFragment = wrapped("<%#", "%>") ^^ { CommentFragment(_) }
val dollarExpressionFragment = wrapped("${", "}") ^^ { DollarExpressionFragment(_) }
val expressionFragment = wrapped("<%=", "%>") ^^ { ExpressionFragment(_) }
val attributeFragement = prefixed("<%@", attribute <~ anySpace ~ tagEnding)
val scriptletFragment = wrapped("<%", tagEnding) ^^ { ScriptletFragment(_) }
val textFragment = literalPart ^^ { TextFragment(_) }
val pageFragment: Parser[PageFragment] = positioned(directives | commentFragment | altCommentFragment | dollarExpressionFragment |
attributeFragement | expressionFragment | scriptletFragment |
textFragment)
val pageFragments = rep(pageFragment)
def directives: Parser[PageFragment] = ifExpression | elseIfExpression | elseExpression |
matchExpression | caseExpression | otherwiseExpression |
forExpression | doExpression | setExpression | velocityScriplet | importExpression | endExpression
// if / elseif / else
def ifExpression = expressionDirective("if") ^^ { IfFragment(_) }
def elseIfExpression = expressionDirective("elseif" | "elif") ^^ { ElseIfFragment(_) }
def elseExpression = emptyDirective("else") ^^ { case a => ElseFragment() }
// match / case / otherwise
def matchExpression = expressionDirective("match") ^^ { MatchFragment(_) }
def caseExpression = expressionDirective("case") ^^ { CaseFragment(_) }
def otherwiseExpression = emptyDirective("otherwise") ^^ { case a => OtherwiseFragment() }
// other directives
def velocityScriplet = wrapped("#{", "}#") ^^ { ScriptletFragment(_) }
def forExpression = expressionDirective("for" ~ opt("each")) ^^ { ForFragment(_) }
def setExpression = expressionDirective("set") ^^ { SetFragment(_) }
def doExpression = expressionDirective("do") ^^ { DoFragment(_) }
def importExpression = expressionDirective("import") ^^ { ImportFragment(_) }
def endExpression = emptyDirective("end") <~ """[ \\t]*\\r?\\n?""".r ^^ { case a => EndFragment() }
// useful for implementing directives
def emptyDirective(name: String) = text(("#" + name) | ("#(" + name + ")"))
def expressionDirective(name: String) = ("#" ~ name ~ anySpace ~ "(") ~> scalaExpression <~ ")"
def expressionDirective[T](p: Parser[T]) = ("#" ~ p ~ anySpace ~ "(") ~> scalaExpression <~ """\\)[ \\t]*\\r?\\n?""".r
def scalaExpression: Parser[Text] = {
text(
(rep(nonParenText) ~ opt("(" ~> scalaExpression <~ ")") ~ rep(nonParenText)) ^^ {
case a ~ b ~ c =>
val mid = b match {
case Some(tb) => "(" + tb + ")"
case tb => ""
}
a.mkString("") + mid + c.mkString("")
}
)
}
val nonParenText = characterLiteral | stringLiteral | """[^\\(\\)\\'\\"]+""".r
private def phraseOrFail[T](p: Parser[T], in: String): T = {
val x = phrase(p)(new CharSequenceReader(in))
x match {
case Success(result, _) => result
case NoSuccess(message, next) => throw new InvalidSyntaxException(message, next.pos);
}
}
def getPageFragments(in: String): List[PageFragment] = {
phraseOrFail(pageFragments, in)
}
def getAttribute(in: String): AttributeFragment = {
phraseOrFail(attribute, in)
}
}
|
maslovalex/scalate
|
scalate-core/src/main/scala/org/fusesource/scalate/ssp/SspParser.scala
|
Scala
|
apache-2.0
| 6,980 |
/**
Open Bank Project - API
Copyright (C) 2011, 2014, TESOBE / Music Pictures Ltd
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Email: [email protected]
TESOBE / Music Pictures Ltd
Osloerstrasse 16/17
Berlin 13359, Germany
This product includes software developed at
TESOBE (http://www.tesobe.com/)
by
Ayoub Benali: ayoub AT tesobe DOT com
*/
package com.tesobe.status.model
import java.util.Date
case class GetSupportedBanks
case class SupportedBanksReply(
banks: Set[BankInfo]
)
case class BankInfo(
country: String,
nationalIdentifier: String,
name: String
)
case class GetBanksStatues
case class BankStatus(
country: String,
id: String,
status: Boolean,
lastUpdate: Date
)
case class BanksStatuesReply(
statues: Set[BankStatus]
){
def find(country: String, id: String): Option[BankStatus]= {
statues.find(s => {s.country == country && s.id == id})
}
}
case class DetailedBankStatus(
country: String,
id: String,
name: String,
tested: Boolean,
lastTest: Option[Date]
)
case class DetailedBankStatues(
statues: Set[DetailedBankStatus]
)
|
OpenBankProject/status
|
src/main/scala/com/tesobe/status/model/Message.scala
|
Scala
|
agpl-3.0
| 1,682 |
def f(arr:List[Int]):List[Int] = arr.foldLeft(List[Int]())((a, b) => b::a)
|
franklingu/HackerRank
|
functional-programming/introduction/reverse-a-list/reverse_a_list.scala
|
Scala
|
mit
| 75 |
package yang.notification
import akka.actor.{ActorRef, ActorRefFactory, Props}
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.context.annotation._
/**
* Created by y28yang on 3/31/2016.
*/
@Configuration
class NotificationCoreModule {
@Autowired
var actorFactory: ActorRefFactory = null
@Autowired
var enrichActor: ActorRef = null
@Autowired
var userChangedNotifiable: UserChangedNotifiable = null
@Autowired
var userActorCreator: UserActorCreator = _
@Autowired
var alarmNotificationConvert:AlarmNotificationConvert=_
@Bean
def userManagerActor: ActorRef = {
actorFactory.actorOf(Props(new UserManager(userActorCreator,userChangedNotifiable)), "UserManager")
}
@Bean
def sourceActor: ActorRef = {
userChangedNotifiable.startScheduleCheck()
actorFactory.actorOf(Props(new NotificationSourceActor(alarmNotificationConvert,enrichActor)), "NotificationSource")
}
}
|
wjingyao2008/firsttry
|
NextGenAct/src/main/scala/yang/notification/NotificationCoreModule.scala
|
Scala
|
apache-2.0
| 964 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.tensor
import java.util.Comparator
import breeze.linalg.{DenseMatrix => BrzDenseMatrix, DenseVector => BrzDenseVector}
import com.intel.analytics.bigdl.mkl.MKL
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.RandomGenerator._
import com.intel.analytics.bigdl.utils.{File, Table}
import org.apache.spark.mllib.linalg.{DenseMatrix, DenseVector, Matrix, Vector}
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
import scala.collection.JavaConverters._
@SerialVersionUID(5876322619614900645L)
private[tensor] class DenseTensor[@specialized T: ClassTag](
private[tensor] var _storage: ArrayStorage[T],
private[tensor] var _storageOffset: Int,
private[tensor] var _size: Array[Int],
private[tensor] var _stride: Array[Int],
var nDimension: Int)(implicit ev: TensorNumeric[T])
extends Tensor[T] {
override def isEmpty: Boolean = this.storage() == null || this.storage().length() == 0
override def isScalar: Boolean = !this.isEmpty && this.nDimension == 0
override def storage(): Storage[T] = _storage
override def storageOffset(): Int = _storageOffset + 1
override def dim(): Int = nDimension
override def nElement(): Int = {
if (this.isEmpty) {
0
} else {
var n = 1
var d = 0
while (d < this.nDimension) {
n = n * this._size(d)
d += 1
}
n
}
}
override def squeeze(): Tensor[T] = DenseTensor.squeeze(this)
override def squeeze(dim: Int): Tensor[T] = DenseTensor.squeeze(this, dim - 1)
override def squeezeNewTensor(): Tensor[T] = {
val result = new DenseTensor(this._storage, this.storageOffset(), this._size, this._stride)
result.squeeze()
}
override def size(): Array[Int] = {
if (_size == null) null else _size.slice(0, this.nDimension)
}
override def size(dim: Int): Int = {
require(dim > 0 && dim <= this.nDimension,
s"dimension ${dim} out of range of ${this.nDimension}D tensor")
_size(dim - 1)
}
override def stride(): Array[Int] = {
if (_stride == null) null else _stride.slice(0, this.nDimension)
}
override def stride(dim: Int): Int = {
require(dim > 0 && dim <= this.nDimension,
s"dimension ${dim} out of range of ${this.nDimension}D tensor")
_stride(dim - 1)
}
override def resizeAs(src: Tensor[_]): Tensor[T] = {
DenseTensor.resizeAs(this, src)
this
}
override def cast[@specialized(Long, Int, Short, Double, Float) D: ClassTag]
(castTensor: Tensor[D])
(implicit ev1: TensorNumeric[D]): Tensor[D] = {
castTensor.getType() match {
case FloatType =>
castTensor.applyFun[T](this.asInstanceOf[Tensor[T]],
x => ev.toType[Float](x).asInstanceOf[D])
case DoubleType =>
castTensor.applyFun[T](this.asInstanceOf[Tensor[T]],
x => ev.toType[Double](x).asInstanceOf[D])
case LongType =>
castTensor.applyFun[T](this.asInstanceOf[Tensor[T]],
x => ev.toType[Long](x).asInstanceOf[D])
case IntType =>
castTensor.applyFun[T](this.asInstanceOf[Tensor[T]],
x => ev.toType[Int](x).asInstanceOf[D])
case ShortType =>
castTensor.applyFun[T](this.asInstanceOf[Tensor[T]],
x => ev.toType[Short](x).asInstanceOf[D])
case _ =>
throw new RuntimeException("Unspported type")
}
castTensor
}
override def resize(sizes: Array[Int], strides: Array[Int]): Tensor[T] = {
DenseTensor.resize(this, sizes, strides)
this
}
override def resize(size1: Int): Tensor[T] = {
if (this.nDimension != 1 || this.size(1) != size1) {
DenseTensor.resize(this, Array(size1))
} else {
this
}
}
override def resize(size1: Int, size2: Int): Tensor[T] = {
if (this.nDimension != 2 || this.size(1) != size1 || this.size(2) != size2) {
DenseTensor.resize(this, Array(size1, size2))
} else {
this
}
}
override def resize(size1: Int, size2: Int, size3: Int): Tensor[T] = {
if (this.nDimension != 3 || this.size(1) != size1 || this.size(2) != size2 ||
this.size(3) != size3) {
DenseTensor.resize(this, Array(size1, size2, size3))
} else {
this
}
}
override def resize(size1: Int, size2: Int, size3: Int, size4: Int): Tensor[T] = {
if (this.nDimension != 4 || this.size(1) != size1 || this.size(2) != size2 ||
this.size(3) != size3 ||
this.size(4) != size4) {
DenseTensor.resize(this, Array(size1, size2, size3, size4))
} else {
this
}
}
override def resize(size1: Int, size2: Int, size3: Int, size4: Int, size5: Int): Tensor[T] = {
if (this.nDimension != 5 || this.size(1) != size1 || this.size(2) != size2 ||
this.size(3) != size3 || this.size(4) != size4 || this.size(5) != size5) {
DenseTensor.resize(this, Array(size1, size2, size3, size4, size5))
} else {
this
}
}
override def view(sizes: Array[Int]): Tensor[T] = {
require(this.isContiguous(), "current tensor is not contiguous")
require(sizes.product == this.nElement(), "invalid size eElement")
new DenseTensor(this._storage, this.storageOffset(), sizes.clone())
}
override def unfold(dim: Int, size: Int, step: Int): Tensor[T] = {
require(this.nDimension > 0, "cannot unfold an empty tensor")
require(dim > 0 && dim <= this.nDimension, "out of range")
require(size <= this.size(dim), "out of range")
require(step > 0, "invalid step")
val newTensor = this
val newSize = new Array[Int](this.nDimension + 1)
val newStride = new Array[Int](this.nDimension + 1)
newSize(this.nDimension) = size
newStride(this.nDimension) = this.stride(dim)
var d = 0
while (d < this.nDimension) {
if (d + 1 == dim) {
newSize(d) = (this.size(d + 1) - size) / step + 1
newStride(d) = step * this.stride(d + 1)
} else {
newSize(d) = this.size(d + 1)
newStride(d) = this.stride(d + 1)
}
d = d + 1
}
new DenseTensor(this._storage, this._storageOffset, newSize, newStride, this.dim() + 1)
}
private[tensor] def this(d1: Int)(implicit ev: TensorNumeric[T]) =
this(new ArrayStorage[T](new Array[T](d1)), 0, Array(d1),
Array(1), 1)
private[tensor] def this(d1: Int, d2: Int)(implicit ev: TensorNumeric[T]) =
this(new ArrayStorage[T](new Array[T](d1 * d2)), 0, Array(d1, d2),
Array(d2, 1), 2)
private[tensor] def this(d1: Int, d2: Int, d3: Int)(implicit ev: TensorNumeric[T]) =
this(new ArrayStorage[T](new Array[T](d1 * d2 * d3)), 0, Array(d1, d2, d3),
Array(d3 * d2, d3, 1), 3)
private[tensor] def this(d1: Int, d2: Int, d3: Int, d4: Int)(implicit ev: TensorNumeric[T]) =
this(new ArrayStorage[T](new Array[T](d1 * d2 * d3 * d4)), 0, Array(d1, d2, d3, d4),
Array(d4 * d3 * d2, d4 * d3, d4, 1), 4)
private[tensor] def this(d1: Int, d2: Int, d3: Int, d4: Int, d5: Int)(
implicit ev: TensorNumeric[T]) =
this(new ArrayStorage[T](new Array[T](d1 * d2 * d3 * d4 * d5)), 0, Array(d1, d2, d3, d4, d5),
Array(d5 * d4 * d3 * d2, d5 * d4 * d3, d5 * d4, d5, 1), 5)
private[tensor] def this(dims: Int*)(implicit ev: TensorNumeric[T]) =
this(new ArrayStorage[T](new Array[T](dims.product)), 0, dims.toArray,
DenseTensor.size2Stride(dims.toArray), dims.length)
private[tensor] def this(storage: ArrayStorage[T])(implicit ev: TensorNumeric[T]) = {
this(null, 0, null, null, 0)
val _storageOffset = 0
val _size = Array(storage.length)
val _stride = Array(1)
DenseTensor.newWithStorage(this, storage, _storageOffset, _size, _stride, ev)
}
private[tensor] def this(storage: ArrayStorage[T], storageOffset: Int, size: Array[Int] = null,
stride: Array[Int] = null)(implicit ev: TensorNumeric[T]) = {
this(null, 0, null, null, 0)
if (storage != null) {
val _storageOffset = storageOffset - 1
val _size = if (size == null) Array(storage.length) else size
val _stride = if (size == null) null else stride
DenseTensor.newWithStorage(this, storage, _storageOffset, _size, _stride, ev)
}
}
private[tensor] def this(other: Tensor[T])(implicit ev: TensorNumeric[T]) = {
this(null, 0, null, null, 0)
require(other.isInstanceOf[DenseTensor[_]], "Only support dense tensor in this operation")
val _storage = other.storage().asInstanceOf[ArrayStorage[T]]
val _storageOffset = other.storageOffset() - 1
val _size = other.size()
val _stride = other.stride()
DenseTensor.newWithStorage(this, _storage, _storageOffset, _size, _stride, ev)
}
private[tensor] def this()(implicit ev: TensorNumeric[T]) = this(null, 0, null, null, 0)
override def fill(v: T): Tensor[T] = {
if (this.storage() == null) return this
if (this.isContiguous()) {
this.storage().fill(v, this.storageOffset(), this.nElement())
} else {
val func = new TensorFunc2[T] {
override def apply(data: Array[T], index: Int): Unit = {
data(index) = v
}
}
DenseTensorApply.apply1[T](this, func)
}
this
}
override def forceFill(v: Any): Tensor[T] = {
this.fill(v.asInstanceOf[T])
}
override def zero(): Tensor[T] = {
this.fill(ev.zero)
}
override def randn(): Tensor[T] = {
randn(0, 1)
}
override def randn(mean: Double, stdv: Double): Tensor[T] = {
if (this.isContiguous()) {
var i = 0
val total = this.nElement()
val data = this.storage().array()
val offset = this.storageOffset() - 1
while (i < total) {
data(offset + i) = ev.fromType(RNG.normal(mean, stdv))
i += 1
}
} else {
val func = new TensorFunc2[T] {
override def apply(data: Array[T], index: Int): Unit = {
data(index) = ev.fromType(RNG.normal(mean, stdv))
}
}
DenseTensorApply.apply1[T](this, func)
}
this
}
override def bernoulli(p: Double): Tensor[T] = {
if (this.isContiguous()) {
var i = 0
val total = this.nElement()
val data = this.storage().array()
val offset = this.storageOffset() - 1
while (i < total) {
data(offset + i) = if (RNG.bernoulli(p)) {
ev.fromType[Int](1)
} else {
ev.fromType[Int](0)
}
i += 1
}
} else {
val func = new TensorFunc2[T] {
override def apply(data: Array[T], index: Int): Unit = {
data(index) =
if (RNG.bernoulli(p)) {
ev.fromType[Int](1)
} else {
ev.fromType[Int](0)
}
}
}
DenseTensorApply.apply1[T](this, func)
}
this
}
override def rand(): Tensor[T] = rand(0.0, 1.0)
override def rand(lowerBound: Double, upperBound: Double): Tensor[T] = {
if (this.isContiguous()) {
var i = 0
val total = this.nElement()
val data = this.storage().array()
val offset = this.storageOffset() - 1
while (i < total) {
data(offset + i) = ev.fromType(RNG.uniform(lowerBound, upperBound))
i += 1
}
} else {
val func = new TensorFunc2[T] {
override def apply(data: Array[T], index: Int): Unit = {
data(index) = ev.fromType(RNG.uniform(lowerBound, upperBound))
}
}
DenseTensorApply.apply1[T](this, func)
}
this
}
override def set(other: Tensor[T]): Tensor[T] = {
require(other.isInstanceOf[DenseTensor[_]], "Only support dense tensor in this operation")
DenseTensor.rawSet(this, other.storage().asInstanceOf[ArrayStorage[T]],
other.storageOffset() - 1, other.nDimension(), other.size(), other.stride())
}
override def set(storage: Storage[T], storageOffset: Int = 1, sizes: Array[Int] = null,
strides: Array[Int] = null): Tensor[T] = {
if (sizes != null && strides != null) {
require(sizes.length == strides.length)
}
require(storage.isInstanceOf[ArrayStorage[_]], "Only support array storage in this operation")
DenseTensor.rawSet(this, storage.asInstanceOf[ArrayStorage[T]], storageOffset - 1,
if (sizes == null) 0 else sizes.length,
sizes, strides)
}
override def set(): Tensor[T] = {
if (this._storage != null) {
this._storage.resize(0)
}
this.nDimension = 0
this._size = Array[Int]()
this
}
override def transpose(dim1: Int, dim2: Int): Tensor[T] = {
val result = DenseTensor.newWithTensor(this)
DenseTensor.transpose(result, null, dim1 - 1, dim2 - 1)
result
}
override def t(): Tensor[T] = {
require(this.nDimension == 2, "t() is only for 2D tensor")
transpose(1, 2)
}
override def select(dim: Int, index: Int): Tensor[T] = {
val _dimension = dim - 1
val _sliceIndex = index - 1
require(this.nDimension > 0, "empty or scalar tensor cannot be selected")
val result = DenseTensor.newWithTensor(this)
DenseTensor.select(result, null, _dimension, _sliceIndex)
result
}
override def clone(): Tensor[T] = {
DenseTensor.newClone(this)
}
override def shallowClone(): Tensor[T] = {
Tensor(Storage(this.storage().array()), storageOffset(), size(), stride())
}
override def emptyInstance(): Tensor[T] = {
Tensor[T]()
}
override def copy(other: Tensor[T]): Tensor[T] = {
other match {
case t: DnnTensor[_] =>
require(this.nElement() == other.nElement(), "tensor size must match")
this.storage().copy(other.storage(), this.storageOffset() - 1, 0, other.nElement())
case t: DenseTensor[_] =>
DenseTensor.copy(this, other)
case _ => throw new UnsupportedOperationException(
"only support copy from dense tensor or dnn tensor")
}
this
}
override def narrow(dim: Int, index: Int, size: Int): Tensor[T] = {
val result = DenseTensor.newWithTensor(this)
DenseTensor.narrow(result, null, dim - 1, index - 1, size)
result
}
def applyFun[A: ClassTag](
t: Tensor[A],
func: (A) => T): Tensor[T] = {
val func2 = new TensorDiffTypeFunc4[A, T] {
override def apply(
data1: Array[A], index1: Int,
data2: Array[T], index2: Int): Unit = {
data2(index2) = func(data1(index1))
}
}
DenseTensorApply.apply1[A, T](t, this, func2)
this
}
def zipWith[A: ClassTag, B: ClassTag](
t1: Tensor[A],
t2: Tensor[B],
func: (A, B) => T): Tensor[T] = {
val func2 = new TensorDiffTypeFunc6[A, B, T] {
override def apply(
data1: Array[A], index1: Int,
data2: Array[B], index2: Int,
data3: Array[T], index3: Int): Unit = {
data3(index3) = func(data1(index1), data2(index2))
}
}
DenseTensorApply.apply2(t1, t2, this, func2)
this
}
override def apply1(func: T => T): Tensor[T] = {
val func2 = new TensorFunc2[T] {
override def apply(data: Array[T], index: Int): Unit = {
data(index) = func(data(index))
}
}
DenseTensorApply.apply1[T](this, func2)
this
}
override def map(other: Tensor[T], func: (T, T) => T): Tensor[T] = {
val func2 = new TensorFunc4[T] {
override def apply(data1: Array[T], index1: Int, data2: Array[T], index2: Int): Unit = {
data1(index1) = func(data1(index1), data2(index2))
}
}
DenseTensorApply.apply2[T](this, other, func2)
this
}
override def apply(index: Int): Tensor[T] = {
require(this.nDimension > 0, "empty or scalar tensor")
var _index = index - 1
if (_index < 0) _index = this._size(0) + _index + 1
require(_index >= 0 && _index < this._size(0),
s"out of range, ${_index}: 0 to ${this._size(0)}")
val result = DenseTensor.newWithTensor(this)
DenseTensor.select(result, null, 0, _index)
result
}
override def apply(table: Table): Tensor[T] = {
val (tensor, offset) = subset(table)
offset match {
case Some(i) =>
val result = new DenseTensor[T](1)
result.setValue(1, tensor.storage()(i))
result
case None => tensor
}
}
override def update(table: Table, value: T): Unit = {
val (tensor, offset) = subset(table)
offset match {
case Some(i) => tensor.storage()(i) = value
case None => tensor.fill(value)
}
}
override def update(table: Table, src: Tensor[T]): Unit = {
val (tensor, offset) = subset(table)
tensor.copy(src)
}
override def update(index: Int, src: Tensor[T]): Unit = {
require(this.nDimension > 0, "empty or scalar tensor")
var _index = index - 1
if (_index < 0) _index = this._size(0) + _index + 1
require(_index >= 0 && _index < this._size(0), "out of range")
val tensor = DenseTensor.newWithTensor(this)
DenseTensor.narrow(tensor, null, 0, _index, 1)
tensor.copy(src)
}
private def subset(table: Table): (Tensor[T], Option[Int]) = {
var cdim = 0
require(table.length <= this.nDimension, "too many indices provided")
val tensor = DenseTensor.newWithTensor(this)
var d = 1
while (d <= table.length) {
table[Any](d) match {
case index: Int =>
var z = index - 1
if (z < 0) z = tensor._size(cdim) + z + 1
require(z >= 0 && z < tensor._size(cdim), "index out of bound")
if (tensor.nDimension == 1) {
return (tensor, Some(tensor._storageOffset + z * tensor._stride(0)))
} else {
DenseTensor.select(tensor, null, cdim, z)
}
case range: Table =>
var start = 0
var end = tensor._size(cdim) - 1
if (range.length >= 1) {
range[Any](1) match {
case left: Int =>
start = left - 1
}
end = start
}
if (start < 0) start = tensor._size(cdim) + start + 1
require(start >= 0 && start < tensor._size(cdim), "start index out of bound")
if (range.length >= 2) {
range[Any](2) match {
case right: Int =>
end = right - 1
}
}
if (end < 0) end = tensor._size(cdim) + end + 1
require(end >= 0 && end < tensor._size(cdim), "end index out of bound")
require(end >= start, "end index must be greater or equal to start index")
DenseTensor.narrow(tensor, null, cdim, start, end - start + 1)
cdim = cdim + 1
}
d += 1
}
(tensor, None)
}
override def apply(indexes: Array[Int]): T = {
require(indexes.length == this.nDimension, "invalid size")
var offset = this._storageOffset
var d = 0
while (d < indexes.length) {
offset += getOffset(indexes(d) - 1, d + 1)
d += 1
}
this._storage(offset)
}
override def value(): T = {
require(1 == this.nElement(), s"invalid size: 1 == ${this.nElement()}")
var offset = this._storageOffset
this._storage(offset)
}
override def valueAt(d1: Int): T = {
require(1 == this.nDimension, s"invalid size: 1 == ${this.nDimension}")
var offset = this._storageOffset
offset += getOffset(d1 - 1, 1)
this._storage(offset)
}
override def valueAt(d1: Int, d2: Int): T = {
require(2 == this.nDimension, "invalid size")
var offset = this._storageOffset
offset += getOffset(d1 - 1, 1)
offset += getOffset(d2 - 1, 2)
this._storage(offset)
}
override def valueAt(d1: Int, d2: Int, d3: Int): T = {
require(3 == this.nDimension, "invalid size")
var offset = this._storageOffset
offset += getOffset(d1 - 1, 1)
offset += getOffset(d2 - 1, 2)
offset += getOffset(d3 - 1, 3)
this._storage(offset)
}
override def valueAt(d1: Int, d2: Int, d3: Int, d4: Int): T = {
require(4 == this.nDimension, "invalid size")
var offset = this._storageOffset
offset += getOffset(d1 - 1, 1)
offset += getOffset(d2 - 1, 2)
offset += getOffset(d3 - 1, 3)
offset += getOffset(d4 - 1, 4)
this._storage(offset)
}
override def valueAt(d1: Int, d2: Int, d3: Int, d4: Int, d5: Int): T = {
require(5 == this.nDimension, "invalid size")
var offset = this._storageOffset
offset += getOffset(d1 - 1, 1)
offset += getOffset(d2 - 1, 2)
offset += getOffset(d3 - 1, 3)
offset += getOffset(d4 - 1, 4)
offset += getOffset(d5 - 1, 5)
this._storage(offset)
}
private def getOffset(z: Int, dim: Int): Int = {
var _z = z
if (_z < 0) {
_z = this.size(dim) + _z + 1
}
require(_z >= 0 && _z < this.size(dim), "index out of bound")
_z * this.stride(dim)
}
override def update(index: Int, value: T): Unit = {
require(this.nDimension > 0, "empty tensor")
var _index = index - 1
if (_index < 0) _index = this._size(0) + _index + 1
require(_index >= 0 && _index < this._size(0), "out of range")
if (this.nDimension == 1) {
_storage(this._storageOffset + _index * this._stride(0)) = value
} else {
val tensor = DenseTensor.newWithTensor(this)
DenseTensor.narrow(tensor, null, 0, _index, 1)
tensor.fill(value)
}
}
override def update(indexes: Array[Int], value: T): Unit = {
require(indexes.length == this.nDimension, "invalid size")
var offset = this._storageOffset
var d = 0
while (d < indexes.length) {
offset += getOffset(indexes(d) - 1, d + 1)
d += 1
}
this._storage(offset) = value
}
override def setValue(d1: Int, d2: Int, d3: Int, d4: Int, value: T): this.type = {
require(4 == this.nDimension, "invalid size")
var offset = this._storageOffset
offset += getOffset(d1 - 1, 1)
offset += getOffset(d2 - 1, 2)
offset += getOffset(d3 - 1, 3)
offset += getOffset(d4 - 1, 4)
this._storage(offset) = value
this
}
override def setValue(d1: Int, d2: Int, d3: Int, d4: Int, d5: Int, value: T): this.type = {
require(5 == this.nDimension, "invalid size")
var offset = this._storageOffset
offset += getOffset(d1 - 1, 1)
offset += getOffset(d2 - 1, 2)
offset += getOffset(d3 - 1, 3)
offset += getOffset(d4 - 1, 4)
offset += getOffset(d5 - 1, 5)
this._storage(offset) = value
this
}
override def setValue(d1: Int, d2: Int, d3: Int, value: T): this.type = {
require(3 == this.nDimension, "invalid size")
var offset = this._storageOffset
offset += getOffset(d1 - 1, 1)
offset += getOffset(d2 - 1, 2)
offset += getOffset(d3 - 1, 3)
this._storage(offset) = value
this
}
override def setValue(d1: Int, d2: Int, value: T): this.type = {
require(2 == this.nDimension, "invalid size")
var offset = this._storageOffset
offset += getOffset(d1 - 1, 1)
offset += getOffset(d2 - 1, 2)
this._storage(offset) = value
this
}
override def setValue(d1: Int, value: T): this.type = {
require(1 == this.nDimension, "invalid size")
var offset = this._storageOffset
offset += getOffset(d1 - 1, 1)
this._storage(offset) = value
this
}
override def setValue(value: T): this.type = {
require(0 == this.nDimension, "invalid size, you can only call this on a scalar")
var offset = this._storageOffset
this._storage(offset) = value
this
}
override def update(func: T => Boolean, value: T): Unit = {
val func2 = new TensorFunc2[T] {
override def apply(data: Array[T], index: Int): Unit = {
data(index) = if (func(data(index))) value else data(index)
}
}
DenseTensorApply.apply1[T](this, func2)
}
override def isContiguous(): Boolean = {
DenseTensor.isContiguous(this)
}
override def contiguous(): Tensor[T] = {
DenseTensor.newContiguous(this)
}
override def isSameSizeAs(other: Tensor[_]): Boolean = {
DenseTensor.isSameSizeAs(this, other)
}
override def split(size: Int, dim: Int): Array[Tensor[T]] = {
val result = new ArrayBuffer[Tensor[T]]()
val dimLength = this.size(dim)
var start = 1
while (start <= dimLength) {
val curSize = math.min(size, dimLength - start + 1)
result.append(this.narrow(dim, start, curSize))
start += curSize
}
result.toArray
}
override def split(dim: Int): Array[Tensor[T]] = {
val result = new ArrayBuffer[Tensor[T]]()
val dimLength = this.size(dim)
var start = 1
while (start <= dimLength) {
result.append(this.select(dim, start))
start += 1
}
result.toArray
}
// scalastyle:off methodName
override def +(s: T): Tensor[T] = DenseTensorMath.add(s, this)
override def +(t: Tensor[T]): Tensor[T] = DenseTensorMath.add(this, t)
override def -(s: T): Tensor[T] = DenseTensorMath.sub(s, this)
override def -(t: Tensor[T]): Tensor[T] = DenseTensorMath.sub(this, t)
override def unary_-(): Tensor[T] = DenseTensorMath.neg(this)
override def /(s: T): Tensor[T] = DenseTensorMath.divide(s, this)
override def /(t: Tensor[T]): Tensor[T] = DenseTensorMath.divide(this, t)
override def *(s: T): Tensor[T] = DenseTensorMath.mul(s, this)
override def *(t: Tensor[T]): Tensor[T] = DenseTensorMath.mul(this, t)
// scalastyle:on methodName
override def prod(): T = DenseTensorMath.prodAll(this)
override def prod(x: Tensor[T], dim: Int): Tensor[T] = DenseTensorMath.prod(this, x, dim - 1)
override def sum(): T = DenseTensorMath.sumAll(this)
override def sum(dim: Int): Tensor[T] = DenseTensorMath.sum(null, this, dim - 1)
override def sum(x: Tensor[T], dim: Int): Tensor[T] = DenseTensorMath.sum(this, x, dim - 1)
override def mean(): T = DenseTensorMath.meanAll(this)
override def mean(dim: Int): Tensor[T] = DenseTensorMath.mean(this, dim - 1)
override def max(): T = DenseTensorMath.maxAll(this)
override def max(dim: Int): (Tensor[T], Tensor[T]) = {
require(dim > 0 && dim <= this.nDimension, "dimension out of range")
max(Tensor[T](), Tensor[T](), dim)
}
override def max(values: Tensor[T], indices: Tensor[T], dim: Int): (Tensor[T], Tensor[T]) = {
require(dim > 0 && dim <= this.nDimension, "dimension out of range")
val sizes = this.size() // here slice
sizes(dim - 1) = 1
values.resize(sizes)
indices.resize(sizes)
// TODO: the performance of contiguous tensor should be optimize
DenseTensorDimApply.dimApply3[T](this, values, indices, dim, (tdata, toffset, tstride,
tsize, vdata, voffset, vstride, vsize, idata, ioffset, istride, isize) => {
var max = tdata(toffset)
var index = 1
var i = 0
while (i < tsize) {
if (ev.toType[Double](ev.minus(tdata(toffset + i * tstride), max)) > 0) {
index = i + 1
max = tdata(toffset + i * tstride)
}
i += 1
}
vdata(voffset) = max
idata(ioffset) = ev.fromType[Float](index)
})
(values, indices)
}
override def min(): T = DenseTensorMath.minAll(this)
override def min(dim: Int): (Tensor[T], Tensor[T]) = {
require(dim > 0 && dim <= this.nDimension, "dimension out of range")
min(Tensor[T](), Tensor[T](), dim)
}
override def min(values: Tensor[T], indices: Tensor[T], dim: Int): (Tensor[T], Tensor[T]) = {
require(dim > 0 && dim <= this.nDimension, "dimension out of range")
val sizes = this.size()
sizes(dim - 1) = 1
values.resize(sizes)
indices.resize(sizes)
// TODO: the performance of contiguous tensor should be optimize
DenseTensorDimApply.dimApply3[T](this, values, indices, dim, (tdata, toffset, tstride,
tsize, vdata, voffset, vstride, vsize, idata, ioffset, istride, isize) => {
var min = tdata(toffset)
var index = 1
var i = 0
while (i < tsize) {
if (ev.isGreater(min, tdata(toffset + i * tstride))) {
index = i + 1
min = tdata(toffset + i * tstride)
}
i += 1
}
vdata(voffset) = min
idata(ioffset) = ev.fromType[Float](index)
})
(values, indices)
}
override def sumSquare(): T = {
this.dot(this)
}
override def clamp(min: Double, max: Double): Tensor[T] = {
val maxT = ev.fromType[Double](max)
val minT = ev.fromType[Double](min)
val func = new TensorFunc2[T] {
override def apply(data1: Array[T], offset1: Int): Unit = {
if (ev.isGreater(data1(offset1), maxT)) data1(offset1) = maxT
else if (ev.isGreater(minT, data1(offset1))) data1(offset1) = minT
}
}
DenseTensorApply.apply1[T](this, func)
this
}
def scatter(dim: Int, index: Tensor[T], src: Tensor[T]): Tensor[T] = {
require(src.dim() == this.dim(), "Input tensor must have same dimensions as output tensor")
require(dim <= this.dim(), "Index dimension is out of bounds")
require(index.dim() == src.dim(), "Index tensor must have same dimensions as input tensor")
val elementsPerRow = index.size(dim)
// TODO: the performance of contiguous tensor should be optimize
DenseTensorDimApply.dimApply3[T](this, src, index, dim, (tdata, toffset, tstride,
tsize, vdata, voffset, vstride, vsize, idata, ioffset, istride, isize) => {
var i = 0
while (i < elementsPerRow) {
val idx = ev.toType[Int](idata(ioffset + i * istride))
require(idx >= 1 && idx <= this.size(dim))
tdata((idx - 1) * tstride + toffset) = vdata(i * vstride + voffset)
i += 1
}
})
this
}
def gather(dim: Int, index: Tensor[T], src: Tensor[T]): Tensor[T] = {
require(src.dim() == this.dim(), "Input tensor must have same dimensions as output tensor")
require(dim <= this.dim(), "Index dimension is out of bounds")
require(index.dim() == src.dim(), "Index tensor must have same dimensions as input tensor")
val elementsPerRow = index.size(dim)
// TODO: the performance of contiguous tensor should be optimize
DenseTensorDimApply.dimApply3[T](this, src, index, dim, (tdata, toffset, tstride,
tsize, vdata, voffset, vstride, vsize, idata, ioffset, istride, isize) => {
var i = 0
while (i < elementsPerRow) {
val idx = ev.toType[Int](idata(ioffset + i * istride))
require(idx >= 1 && idx <= src.size(dim), "invalid index in gather")
tdata(i * tstride + toffset) = vdata((idx - 1) * vstride + voffset)
i += 1
}
})
this
}
override def add(value: T, y: Tensor[T]): Tensor[T] = DenseTensorMath.cadd(this, this, value, y)
override def add(x: Tensor[T]): Tensor[T] = {
require(x.isInstanceOf[DenseTensor[_]], "Only support dense tensor in this operation")
if (this.nElement() == x.nElement()) {
if (MKL.isMKLLoaded && this.isContiguous() && x.isContiguous()) {
ev.vAdd(this.nElement(), this.storage().array(), this.storageOffset() - 1,
x.storage().array(), x.storageOffset() - 1,
this.storage().array(), this.storageOffset() - 1)
} else {
val func = new TensorFunc4[T] {
override def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int): Unit = {
data1(offset1) = ev.plus(data1(offset1), data2(offset2))
}
}
DenseTensorApply.apply2[T](this, x, func)
}
} else if (DenseTensor.canFastBroadcast(this, x)) {
// recursive add
var i = 0
while (i < this.size(1)) {
this.select(1, i + 1).add(x)
i += 1
}
} else {
this.add(expandTensor(x.asInstanceOf[DenseTensor[T]]))
}
this
}
private[tensor] def expandTensor(x: DenseTensor[T]): Tensor[T] = {
val targetSize = DenseTensor.expandSize(this, x)
val expandStrides = new Array[Int](targetSize.length)
val expandStridesX = new Array[Int](targetSize.length)
var i = targetSize.length - 1
val delta2 = targetSize.length - x.nDimension
while(i >= delta2) {
if (x.size(i + 1- delta2) != 1) expandStridesX(i) = x.stride(i + 1- delta2)
i -= 1
}
val expandX = new DenseTensor[T](
x.storage().asInstanceOf[ArrayStorage[T]],
x.storageOffset(),
targetSize,
expandStridesX
)
if (targetSize.product != this.nElement()) {
i = targetSize.length - 1
val delta1 = targetSize.length - this.nDimension
while (i >= delta1) {
if (this.size(i + 1 - delta1) != 1) expandStrides(i) = this.stride(i + 1 - delta1)
i -= 1
}
val tensor1 = new DenseTensor[T](
this._storage,
this.storageOffset(),
targetSize,
expandStrides
)
val newTensor = new DenseTensor[T]().resize(targetSize).add(tensor1)
this.set(newTensor)
}
expandX
}
override def add(x: Tensor[T], y: Tensor[T]): Tensor[T] = {
require(this.nElement() == x.nElement() && this.nElement() == y.nElement())
if (MKL.isMKLLoaded && this.isContiguous() && x.isContiguous() && y.isContiguous()) {
ev.vAdd(this.nElement(), y.storage().array(), y.storageOffset() - 1,
x.storage().array(), x.storageOffset() - 1,
this.storage().array(), this.storageOffset() - 1)
} else {
val func = new TensorFunc6[T] {
override def apply(data: Array[T], offset: Int, data1: Array[T],
offset1: Int, data2: Array[T], offset2: Int): Unit = {
data(offset1) = ev.plus(data1(offset1), data2(offset2))
}
}
DenseTensorApply.apply3[T](this, x, y, func)
}
this
}
// Puts the result of x + value * y in current tensor
override def add(x: Tensor[T], value: T, y: Tensor[T]): Tensor[T] =
DenseTensorMath.cadd(this, x, value, y)
override def add(value: T): Tensor[T] = {
if (this.isContiguous()) {
ev.add(this.nElement(), this.storage().array(), this.storageOffset() - 1, value, 1)
this
} else {
this.apply1(ev.plus(_, value))
}
}
override def sub(value: T, y: Tensor[T]): Tensor[T] =
DenseTensorMath.csub(this, this, ev.negative(value), y)
override def sub(x: Tensor[T]): Tensor[T] = {
require(x.isInstanceOf[DenseTensor[T]], "Only dense tensor is supported in this operation")
if (this.nElement() == x.nElement()) {
if (MKL.isMKLLoaded && this.isContiguous() && x.isContiguous() &&
(x.getType() == DoubleType || x.getType() == FloatType)) {
ev.vSub(this.nElement(), this.storage().array(), this.storageOffset() - 1,
x.storage().array(), x.storageOffset() - 1,
this.storage().array(), this.storageOffset() - 1)
}
else {
val func = new TensorFunc4[T] {
override def apply(data1: Array[T], offset1: Int,
data2: Array[T], offset2: Int): Unit = {
data1(offset1) = ev.minus(data1(offset1), data2(offset2))
}
}
DenseTensorApply.apply2[T](this, x, func)
}
} else if (DenseTensor.canFastBroadcast(this, x)) {
// recursive add
var i = 0
while (i < this.size(1)) {
this.select(1, i + 1).sub(x)
i += 1
}
} else {
this.sub(expandTensor(x.asInstanceOf[DenseTensor[T]]))
}
this
}
override def sub(x: Tensor[T], y: Tensor[T]): Tensor[T] = {
require(this.nElement() == x.nElement() && this.nElement() == y.nElement())
if (MKL.isMKLLoaded && this.isContiguous() && x.isContiguous() && y.isContiguous()) {
ev.vSub(this.nElement(), x.storage().array(), x.storageOffset() - 1,
y.storage().array(), y.storageOffset() - 1,
this.storage().array(), this.storageOffset() - 1)
} else {
val func = new TensorFunc6[T] {
override def apply (data: Array[T], offset: Int, data1: Array[T],
offset1: Int, data2: Array[T], offset2: Int): Unit = {
data(offset) = ev.minus(data1(offset1), data2(offset2))
}
}
DenseTensorApply.apply3[T](this, x, y, func)
}
this
}
// Puts the result of x - value * y in current tensor
override def sub(x: Tensor[T], value: T, y: Tensor[T]): Tensor[T] =
DenseTensorMath.csub(this, x, value, y)
override def sub(value: T): Tensor[T] = {
if (this.isContiguous()) {
ev.sub(this.nElement(), this.storage().array(), this.storageOffset() - 1, value, 1)
this
} else {
this.apply1(ev.minus(_, value))
}
}
override def dot(y: Tensor[T]): T = {
require(this.nElement() == y.nElement())
if (MKL.isMKLLoaded && this.isContiguous() && y.isContiguous()) {
ev.dot(this.nElement(), this.storage().array(), this.storageOffset() - 1, 1,
y.storage().array(), y.storageOffset() - 1, 1)
}
else {
var sum = ev.fromType[Int](0)
this.map(y, (a, b) => {
sum = ev.plus(sum, ev.times(a, b))
a
})
sum
}
}
override def cmax(value: T): Tensor[T] = {
this.apply1(x => ev.max(x, value))
}
override def dist(y: Tensor[T], norm: Int): T = {
var sum = ev.fromType[Int](0)
this.map(y, (a, b) => {
sum = ev.plus(sum, ev.pow(ev.abs(ev.minus(b, a)), ev.fromType[Int](norm)))
a
})
ev.pow(sum, ev.divide(ev.fromType[Int](1), ev.fromType[Int](norm)))
}
override def addcmul(value: T, tensor1: Tensor[T], tensor2: Tensor[T]): Tensor[T] = {
require(tensor1.nElement() == tensor2.nElement() && this.nElement() == tensor1.nElement())
if (this.isContiguous() && tensor1.isContiguous() && tensor2.isContiguous()) {
ev.addcmul(value, this.nElement(), this.storage().array(), this.storageOffset() - 1,
tensor1.storage().array(), tensor1.storageOffset() - 1,
tensor2.storage().array(), tensor2.storageOffset() - 1)
} else {
val func = new TensorFunc6[T] {
override def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int,
data3: Array[T], offset3: Int): Unit = {
data1(offset1) = ev.plus(data1(offset1), ev.times(ev.times(data2(offset2),
data3(offset3)), value))
}
}
DenseTensorApply.apply3[T](this, tensor1, tensor2, func)
}
this
}
override def addcmul(tensor1: Tensor[T], tensor2: Tensor[T]): Tensor[T] = {
addcmul(ev.fromType(1), tensor1, tensor2)
}
override def addcdiv(value: T, tensor1: Tensor[T], tensor2: Tensor[T]): Tensor[T] = {
if (this.isContiguous() && tensor1.isContiguous() && tensor2.isContiguous()) {
ev.addcdiv(value, this.nElement(), this.storage().array(), this.storageOffset() - 1,
tensor1.storage().array(), tensor1.storageOffset() - 1,
tensor2.storage().array(), tensor2.storageOffset() - 1)
} else {
val func = new TensorFunc6[T] {
override def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int,
data3: Array[T], offset3: Int): Unit = {
data1(offset1) = ev.plus(data1(offset1), ev.times(ev.divide(data2(offset2),
data3(offset3)), value))
}
}
DenseTensorApply.apply3[T](this, tensor1, tensor2, func)
}
this
}
override def cmul(y: Tensor[T]): Tensor[T] = {
require(y.isInstanceOf[DenseTensor[_]], "Only support dense tensor in this operation")
DenseTensorMath.cmul(this, this, y.asInstanceOf[DenseTensor[T]])
}
override def cmul(x: Tensor[T], y: Tensor[T]): Tensor[T] = {
require(x.isInstanceOf[DenseTensor[_]], "Only support dense tensor in this operation")
require(y.isInstanceOf[DenseTensor[_]], "Only support dense tensor in this operation")
DenseTensorMath.cmul(this, x.asInstanceOf[DenseTensor[T]], y.asInstanceOf[DenseTensor[T]])
}
override def cdiv(y: Tensor[T]): Tensor[T] = DenseTensorMath.cdiv(this, this, y)
override def cdiv(x: Tensor[T], y: Tensor[T]): Tensor[T] = DenseTensorMath.cdiv(this, x, y)
/**
* stores the element-wise maximum of x and y in x.
* x.cmax(y) = max(x, y)
*
* @param y tensor
* @return current tensor
*/
override def cmax(y: Tensor[T]): Tensor[T] = DenseTensorMath.cmax(this, this, y)
/**
* stores the element-wise maximum of x and y in z.
* z.cmax(x, y) means z = max(x, y)
*
* @param x tensor
* @param y tensor
*/
override def cmax(x: Tensor[T], y: Tensor[T]): Tensor[T] = DenseTensorMath.cmax(this, x, y)
override def cmin(y: Tensor[T]): Tensor[T] = DenseTensorMath.cmin(this, this, y)
override def cmin(x: Tensor[T], y: Tensor[T]): Tensor[T] = DenseTensorMath.cmin(this, x, y)
override def mul(x: Tensor[T], value: T): Tensor[T] = DenseTensorMath.mul(this, x, value)
override def mul(value: T): Tensor[T] = DenseTensorMath.mul(this, null, value)
override def div(value: T): Tensor[T] = DenseTensorMath.mul(this, null, ev.inv(value))
override def div(x: Tensor[T]): Tensor[T] = {
require(x.isInstanceOf[DenseTensor[_]], "Only dense tensor is supported in this operation")
if (this.nElement() == x.nElement()) {
if (MKL.isMKLLoaded && this.isContiguous() && x.isContiguous()) {
ev.vDiv(this.nElement(), this.storage().array(), this.storageOffset() - 1,
x.storage().array(), x.storageOffset() - 1,
this.storage().array(), this.storageOffset() - 1)
}
else {
val func = new TensorFunc4[T] {
override def apply (data1: Array[T], offset1: Int,
data2: Array[T], offset2: Int): Unit = {
data1(offset1) = ev.divide(data1(offset1), data2(offset2))
}
}
DenseTensorApply.apply2[T](this, x, func)
}
} else if (DenseTensor.canFastBroadcast(this, x)) {
// recursive add
var i = 0
while(i < this.size(1)) {
this.select(1, i + 1).div(x)
i += 1
}
} else {
this.div(expandTensor(x.asInstanceOf[DenseTensor[T]]))
}
this
}
override def conv2(kernel: Tensor[T], vf: Char = 'V'): Tensor[T] =
DenseTensorConv.conv2Dmul[T](ev.fromType[Int](1), this, kernel, 1, 1, vf, 'C')
override def xcorr2(kernel: Tensor[T], vf: Char = 'V'): Tensor[T] =
DenseTensorConv.conv2Dmul[T](ev.fromType[Int](1), this, kernel, 1, 1, vf, 'X')
override def addmm(v1: T, M: Tensor[T], v2: T, mat1: Tensor[T], mat2: Tensor[T]): Tensor[T] =
DenseTensorMath.addmm(this, v1, M, v2, mat1, mat2)
override def addmm(M: Tensor[T], mat1: Tensor[T], mat2: Tensor[T]): Tensor[T] =
DenseTensorMath.addmm[T](this, ev.fromType[Int](1), M, ev.fromType[Int](1), mat1, mat2)
override def addmm(mat1: Tensor[T], mat2: Tensor[T]): Tensor[T] =
DenseTensorMath.addmm[T](this, ev.fromType[Int](1), this, ev.fromType[Int](1), mat1, mat2)
override def addmm(v2: T, mat1: Tensor[T], mat2: Tensor[T]): Tensor[T] =
DenseTensorMath.addmm[T](this, ev.fromType[Int](1), this, v2, mat1, mat2)
override def addmm(v1: T, v2: T, mat1: Tensor[T], mat2: Tensor[T]): Tensor[T] =
DenseTensorMath.addmm(this, v1, this, v2, mat1, mat2)
override def mm(mat1: Tensor[T], mat2: Tensor[T]): Tensor[T] =
DenseTensorMath.addmm(this, ev.zero, this, ev.fromType[Int](1), mat1, mat2)
override def addr(t1: Tensor[T], t2: Tensor[T]): Tensor[T] =
DenseTensorMath.addr[T](this, ev.fromType[Int](1), this, ev.fromType[Int](1), t1, t2)
override def addr(v2: T, t1: Tensor[T], t2: Tensor[T]): Tensor[T] =
DenseTensorMath.addr[T](this, ev.fromType[Int](1), this, v2, t1, t2)
override def addr(v1: T, t1: Tensor[T], v2: T, t2: Tensor[T]): Tensor[T] =
DenseTensorMath.addr(this, v1, this, v2, t1, t2)
/**
* Performs the outer-product between vec1 (1D Tensor) and vec2 (1D Tensor).
* Optional values v1 and v2 are scalars that multiply mat and vec1 [out] vec2 respectively.
* In other words,res_ij = (v1 * mat_ij) + (v2 * vec1_i * vec2_j)
*
* @param v1
* @param t1
* @param v2
* @param t2
* @param t3
* @return
*/
override def addr(v1: T, t1: Tensor[T], v2: T, t2: Tensor[T], t3: Tensor[T]): Tensor[T] =
DenseTensorMath.addr(this, v1, t1, v2, t2, t3)
override def addmv(beta: T, vec1: Tensor[T], alpha: T, mat: Tensor[T],
vec2: Tensor[T]): Tensor[T] =
DenseTensorMath.addmv(this, beta, vec1, alpha, mat, vec2)
override def addmv(beta: T, alpha: T, mat: Tensor[T], vec2: Tensor[T]): Tensor[T] =
DenseTensorMath.addmv(this, beta, this, alpha, mat, vec2)
/**
* return pseudo-random numbers, require 0<=args.length<=2
* if args.length = 0, return [0, 1)
* if args.length = 1, return [1, args(0)] or [args(0), 1]
* if args.length = 2, return [args(0), args(1)]
*
* @param args
*/
override def uniform(args: T*): T = {
require(args.length <= 2, s"invalid arguments, excepted ${args.length} <= 2.")
if (args.length == 0) {
ev.rand()
} else if (args.length == 1) {
ev.plus(ev.times(ev.rand(), ev.minus(args(0), ev.fromType[Int](1))),
ev.fromType[Int](1))
} else {
require(ev.toType[Double](ev.minus(args(0), args(1))) <= 0.0,
s"invalid arguments, excepted ${args(0)} <= ${args(1)}.")
ev.plus(ev.times(ev.rand(), ev.minus(args(1), args(0))), args(0))
}
}
override def repeatTensor(sizes: Array[Int]): Tensor[T] = {
require(sizes.length >= this.nDimension,
"Number of dimensions of repeat dims can not be smaller than number of dimensions of tensor")
val result = new DenseTensor[T]()
val xTensor = this.clone()
var xSize = xTensor.size()
var i = 1
while (i <= sizes.length - this.dim()) {
xSize = Array(1) ++ xSize
i += 1
}
val size = new DenseTensor(new ArrayStorage[T](xSize.map(x => ev.fromType[Int](x)))).
cmul(new DenseTensor(new ArrayStorage[T](sizes.map(x => ev.fromType[Int](x))))).
storage().array().map(x => ev.toType[Int](x))
xTensor.resize(xSize)
result.resize(size)
var urTensor = Tensor(result)
i = 1
while (i <= xTensor.dim()) {
urTensor = urTensor.unfold(i, xTensor.size(i), xTensor.size(i))
i += 1
}
i = 1
while (i <= urTensor.dim() - xTensor.dim()) {
xSize = Array(1) ++ xSize
i += 1
}
xTensor.resize(xSize)
val xxTensor = xTensor.expandAs(urTensor)
urTensor.copy(xxTensor)
result
}
override def expandAs(template: Tensor[T]): Tensor[T] = {
this.expand(template.size())
}
override def expand(sizes: Array[Int]): Tensor[T] = {
require(sizes.length == this.dim(),
s"the number of dimensions provided must equal ${this.dim()}")
val tensorDim = this.dim()
val tensorStride = this.stride()
val tensorSize = this.size()
var i = 0
while (i < tensorDim) {
if (tensorSize(i) == 1) {
tensorSize(i) = sizes(i)
tensorStride(i) = 0
} else if (tensorSize(i) != sizes(i)) {
throw new UnsupportedOperationException(
"incorrect size: only supporting singleton expansion (size=1)")
}
i += 1
}
set(this.storage(), this.storageOffset(), tensorSize, tensorStride)
}
override def addmv(alpha: T, mat: Tensor[T], vec2: Tensor[T]): Tensor[T] =
DenseTensorMath.addmv(this, ev.fromType[Int](1), this, alpha, mat, vec2)
override def mv(mat: Tensor[T], vec2: Tensor[T]): Tensor[T] =
DenseTensorMath.addmv(this, ev.fromType[Int](1), this, ev.fromType[Int](1), mat, vec2)
override def baddbmm(beta: T, M: Tensor[T], alpha: T, batch1: Tensor[T],
batch2: Tensor[T]): Tensor[T] = DenseTensorMath.baddbmm(this, beta, M, alpha, batch1, batch2)
override def baddbmm(beta: T, alpha: T, batch1: Tensor[T], batch2: Tensor[T]): Tensor[T] =
DenseTensorMath.baddbmm(this, beta, this, alpha, batch1, batch2)
override def baddbmm(alpha: T, batch1: Tensor[T], batch2: Tensor[T]): Tensor[T] =
DenseTensorMath.baddbmm(this, ev.fromType[Int](1), this, alpha, batch1, batch2)
override def bmm(batch1: Tensor[T], batch2: Tensor[T]): Tensor[T] =
DenseTensorMath.baddbmm(this, ev.fromType[Int](1), this, ev.fromType[Int](1), batch1, batch2)
override def abs(): Tensor[T] = this.apply1(ev.abs(_))
override def toBreezeVector(): BrzDenseVector[T] = {
require(this.nDimension == 1, "tensor is not 1D")
new BrzDenseVector(this.storage().array(), this.storageOffset() - 1, this.stride(1),
this.nElement())
}
override def getType(): TensorDataType = ev.getType()
override def toMLlibMatrix(): Matrix = {
require(this.nDimension == 2, "tensor is not 2D")
require((this.stride(1) == 1 && this.stride(2) == this.size(1))
|| (this.stride(1) == this.size(2) && this.stride(2) == 1), "tensor is not continuous")
new DenseMatrix(this.size(1), this.size(2), this.storage().array().asInstanceOf[Array[Double]],
this.stride(2) == 1) // column major
}
override def toBreezeMatrix(): BrzDenseMatrix[T] = {
require(this.nDimension == 2, "tensor is not 2D")
val majorStride = if (this.stride(2) == 1) this.stride(1) else this.stride(2)
new BrzDenseMatrix[T](this.size(1), this.size(2), this.storage().array(),
this.storageOffset() - 1,
majorStride, this.stride(2) == 1)
}
override def toMLlibVector(): Vector = {
require(this.nDimension == 1, "tensor is not 1D")
require(this.stride(1) == 1, "tensor is not continuous")
new DenseVector(this.storage().array().asInstanceOf[Array[Double]])
}
override def equals(obj: Any): Boolean = {
if (obj == null) {
return false
}
if (!obj.isInstanceOf[Tensor[T]]) {
return false
}
val other = obj.asInstanceOf[Tensor[T]]
if (this.eq(other)) {
return true
}
if (this.nDimension != other.nDimension) {
return false
}
var d = 1
while (d <= this.nDimension) {
if (this.size(d) != other.size(d)) {
return false
}
d += 1
}
var result = true
this.map(other, (a, b) => {
if (result) {
result = ev.nearlyEqual(a, b, DenseTensorMath.floatEpsilon)
}
a
})
return result
}
override def hashCode(): Int = {
val seed = 37
var hash = 1
hash = hash * seed + this.nDimension
var d = 1
while (d <= this.nDimension) {
hash = hash * seed + this.size(d)
d += 1
}
this.apply1(e => {
hash = hash * seed + e.hashCode()
e
})
hash
}
override def toString(): String = {
val foldThreshold = System.getProperty("bigdl.tensor.fold", "1000").toInt
this.nDimension match {
case 0 =>
if (this.isScalar) {
s"Scalar(${this.value()})"
} else {
s"Empty Tensor"
}
case 1 =>
val sb = new StringBuilder
if (this.size().product < foldThreshold) {
this.apply1(e => {
sb.append(e).append('\\n')
e
})
} else {
var i = 0
this.apply1(e => {
i = i + 1
if (i < 3 || i > this.size(1) - 3) {
sb.append(e).append('\\n')
} else if (i == 3) sb.append(e).append("\\n...\\n")
e
})
}
s"${sb}[${this.getClass.getName} of size ${this.size(1)}]"
case 2 =>
val sb = new StringBuilder
val indexer = Array(0, 0)
if (this.size().product < foldThreshold) {
var i = 1
while (i <= this.size(1)) {
var j = 1
while (j <= this.size(2)) {
indexer(0) = i
indexer(1) = j
sb.append(this.apply(indexer)).append('\\t')
j += 1
}
sb.append('\\n')
i += 1
}
} else {
var i = 1
while (i <= this.size(1)) {
var j = 1
if (i <= 3 || i > this.size(1) - 3) {
while (j <= this.size(2)) {
indexer(0) = i
indexer(1) = j
if (j < 3 || j > this.size(2) - 3) {
sb.append(this.apply(indexer)).append('\\t')
} else if (j == 3) {
sb.append(this.apply(indexer)).append("\\t...\\t")
}
j += 1
}
sb.append('\\n')
if (i == 3) sb.append("...\\n")
}
i += 1
}
}
s"${sb}[${this.getClass.getName} of size ${this.size(1)}x${this.size(2)}]"
case _ =>
val sb = new StringBuilder
val size = this.size()
val indexer = Array.fill(this.nDimension)(1)
var done = false
val _lastDim = this.nDimension - 1
val _secLastDim = _lastDim - 1
var d = _secLastDim - 1
val total = this.nElement()
while (!done) {
var i = 0
var needPrint = true
if (this.size.product > foldThreshold) {
while (i < _secLastDim) {
if (indexer(i) <= 2 || indexer(i) > size(i) - 2) i += 1
else {
needPrint = false
i = _secLastDim
}
if (indexer(i) == size(i) - 1) sb.append("...\\n\\n")
}
}
if (needPrint) {
// print header
sb.append('(')
i = 0
while (i < _secLastDim) {
sb.append(indexer(i)).append(',')
i += 1
}
sb.append(".,.) =\\n")
// print current matrix
i = 1
if (this.size(_secLastDim + 1) * this.size(_lastDim + 1) < foldThreshold) {
while (i <= this.size(_secLastDim + 1)) {
var j = 1
while (j <= this.size(_lastDim + 1)) {
indexer(_lastDim) = j
indexer(_secLastDim) = i
sb.append(this.apply(indexer)).append('\\t')
j += 1
}
sb.append('\\n')
i += 1
}
} else {
while (i <= this.size(_secLastDim + 1)) {
var j = 1
if (i <= 3 || i > this.size(_secLastDim + 1) - 3) {
while (j <= this.size(_lastDim + 1)) {
indexer(_lastDim) = j
indexer(_secLastDim) = i
if (j < 3 || j > this.size(_lastDim + 1) - 3) {
sb.append(this.apply(indexer)).append('\\t')
}
else if (j == 3) {
sb.append(this.apply(indexer)).append("\\t...\\t")
}
j += 1
}
sb.append('\\n')
if (i == 3) sb.append("...\\n")
}
i += 1
}
}
sb.append('\\n')
}
indexer(d) = indexer(d) + 1
while (d >= 0 && indexer(d) > size(d)) {
indexer(d) = 1
d = d - 1
if (d >= 0) indexer(d) = indexer(d) + 1
}
if (d == -1) {
done = true
} else {
d = _secLastDim - 1
}
}
s"${sb}[${this.getClass.getName} of size ${size.mkString("x")}]"
}
}
override def diff(other: Tensor[T], count: Int, reverse: Boolean): Boolean = {
if (this.nDimension != other.nDimension()) {
println("Dimension number is different")
return true
}
var d = 1
while (d <= this.nDimension) {
if (this.size(d) != other.size(d)) {
println(s"Dimension $d is different, left is ${this.size(d)}, right is ${other.size(d)}")
return true
}
d += 1
}
val buffer = new Array[(T, T, Int)](count)
var result = false
var catchNum = 0
val func2 = new TensorFunc4[T] {
override def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int): Unit = {
if (data1(offset1) != data2(offset2)) {
require(offset1 == offset2)
if (reverse || catchNum < count) {
buffer(catchNum % count) = (data1(offset1), data2(offset2), offset1)
}
catchNum += 1
result = true
}
}
}
DenseTensorApply.apply2[T](this, other, func2)
if (result == true) {
var i = 0
while (i < buffer.length) {
println(
s"Find difference => this is ${buffer(i)._1} other is ${buffer(i)._2} " +
s"offset is (${buffer(i)._3}/${this.nElement()}})")
i += 1
}
}
result
}
override def reshape(sizes: Array[Int]): Tensor[T] = {
require(sizes.product == this.nElement(),
"DenseTensor: nElement of this tensor is not equal to nElement specified by sizes," +
s" specified sizes = (${sizes.mkString(",")})," +
s" nElement specified by sizes = ${sizes.reduce(_ * _)}," +
s" nElement of this tensor = ${this.nElement()}")
val result = new DenseTensor[T]()
result.resize(sizes)
result.copy(this)
result
}
override def topk(k: Int, dim: Int, increase: Boolean, result: Tensor[T],
indices: Tensor[T], sortedResult: Boolean = true): (Tensor[T], Tensor[T]) = {
val selectDim = if (dim == -1) this.dim() else dim
require(selectDim > 0 && selectDim <= this.nDimension)
val sliceSize = this.size(selectDim)
require(k > 0 && k <= sliceSize)
val tmpResult = new Array[(T, Int)](sliceSize)
val topKSize = this.size()
topKSize(selectDim - 1) = k
val resultTensor = if (result == null) Tensor[T]() else result
resultTensor.resize(topKSize)
val indicesTensor = if (indices == null) Tensor[T]() else indices
indicesTensor.resize(topKSize)
@inline
def compare(a: T, b: T): Boolean = ev.isGreater(b, a) ^ !increase
DenseTensorDimApply.dimApply3[T](this, resultTensor, indicesTensor, selectDim,
(tdata, toffset, tstride, tsize, vdata, voffset, vstride, vsize, idata,
ioffset, istride, isize) => {
val set = new java.util.TreeSet[(T, Int)](new Comparator[(T, Int)] {
override def compare(o1: (T, Int), o2: (T, Int)): Int = {
val ret = if (ev.isGreaterEq(o1._1, o2._1)) {
if (o1._1 == o2._1 && o1._2 > o2._2) {
-1
} else {
1
}
} else {
-1
}
if (increase) {
-ret
} else {
ret
}
}
})
var i = 0
while (i < tsize) {
val v = tdata(toffset + i * tstride)
if (set.size() < k) {
set.add((v, i + 1))
} else if (compare(v, set.first()._1)) {
set.remove(set.first())
set.add((v, i + 1))
}
i += 1
}
val sorted = set.descendingIterator().asScala
i = 0
while (i < k) {
val current = sorted.next()
if (sortedResult) {
vdata(voffset + i * vstride) = current._1
idata(ioffset + i * istride) = ev.fromType(current._2)
} else {
vdata(voffset + (k - i - 1) * vstride) = current._1
idata(ioffset + (k - i - 1) * istride) = ev.fromType(current._2)
}
i += 1
}
})
(resultTensor, indicesTensor)
}
override def pow(x: Tensor[T], n: T): Tensor[T] = DenseTensorMath.pow[T](this, x, n)
override def pow(n: T): Tensor[T] = DenseTensorMath.pow[T](this, this, n)
override def square(): Tensor[T] = pow(ev.fromType(2.0))
override def log(x: Tensor[T]): Tensor[T] = DenseTensorMath.log[T](this, x)
override def log(): Tensor[T] = DenseTensorMath.log[T](this, this)
override def exp(x: Tensor[T]): Tensor[T] = DenseTensorMath.exp[T](this, x)
override def exp(): Tensor[T] = DenseTensorMath.exp[T](this, this)
override def sqrt(x: Tensor[T]): Tensor[T] = DenseTensorMath.sqrt[T](this, x)
override def sqrt(): Tensor[T] = DenseTensorMath.sqrt[T](this, this)
override def tanh(): Tensor[T] = DenseTensorMath.tanh[T](this, this)
override def tanh(x: Tensor[T]): Tensor[T] = DenseTensorMath.tanh[T](this, x)
override def log1p(x: Tensor[T]): Tensor[T] = DenseTensorMath.log1p[T](this, x)
override def log1p(): Tensor[T] = DenseTensorMath.log1p[T](this, this)
override def norm(y: Tensor[T], value: Int, dim: Int): Tensor[T] =
DenseTensorMath.norm(this, y, value, dim - 1)
override def abs(x: Tensor[T]): Tensor[T] = {
require(this.nElement() == x.nElement())
if (MKL.isMKLLoaded && this.isContiguous() && x.isContiguous()) {
ev.vAbs(this.nElement(), x.storage().array(), x.storageOffset() - 1,
this.storage().array(), this.storageOffset() - 1)
} else {
val func = new TensorFunc4[T] {
override def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int): Unit = {
data1(offset1) = ev.abs(data2(offset2))
}
}
DenseTensorApply.apply2[T](this, x, func)
}
this
}
override def save(path: String, overWrite: Boolean): this.type = {
File.save(this, path, overWrite)
this
}
/**
* Fills the masked elements of itself with value val
*
* @param mask
* @param value
* @return current tensor reference
*/
override def maskedFill(mask: Tensor[T], value: T): Tensor[T] = {
require(this.nElement() == mask.nElement())
// todo: the performance of contiguous tensor should be optimized
val func = new TensorFunc4[T] {
def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int): Unit = {
require(ev.toType[Int](data2(offset2)) == 1 || ev.toType[Int](data2(offset2)) == 0,
"Mask tensor can take 0 and 1 values only")
if (ev.toType[Int](data2(offset2)) == 1) {
data1(offset1) = value
}
}
}
DenseTensorApply.apply2[T](this, mask, func)
this
}
/**
* Copies the elements of tensor into mask locations of itself.
*
* @param mask
* @param y
* @return current tensor reference
*/
override def maskedCopy(mask: Tensor[T], y: Tensor[T]): Tensor[T] = {
require(this.nElement() == mask.nElement())
require(y.isContiguous())
val data3 = y.storage().array()
var offset = 0
// todo: the performance of contiguous tensor should be optimized
val func = new TensorFunc4[T] {
override def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int): Unit = {
require(ev.toType[Int](data2(offset2)) == 1 || ev.toType[Int](data2(offset2)) == 0,
"Mask tensor can take 0 and 1 values only")
if (ev.toType[Int](data2(offset2)) == 1) {
require(offset < data3.length, "Number of elements of y < number of ones in mask")
data1(offset1) = data3(offset)
offset += 1
}
}
}
DenseTensorApply.apply2[T](this, mask, func)
this
}
/**
* Returns a new Tensor which contains all elements aligned to a 1 in the corresponding mask.
*
* @param mask
* @param res
* @return current tensor reference
*/
override def maskedSelect(mask: Tensor[T], res: Tensor[T]): Tensor[T] = {
require(this.nElement() == mask.nElement())
require(ev.isGreater(mask.sum(), ev.fromType(0)))
val length = mask.sum()
var offset = 0
res.resize(ev.toType[Double](length).toInt)
val result = res.storage().array()
// todo: the performance of contiguous tensor should be optimized
val func = new TensorFunc4[T] {
override def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int): Unit = {
require(ev.toType[Int](data2(offset2)) == 1 || ev.toType[Int](data2(offset2)) == 0,
"Mask tensor can take 0 and 1 values only")
if (ev.toType[Int](data2(offset2)) == 1) {
result(offset) = data1(offset1)
offset += 1
}
}
}
DenseTensorApply.apply2[T](this, mask, func)
res
}
/**
* Implements > operator comparing each element in x with y
*
* @param x
* @param y
* @return current tensor reference
*/
override def gt(x: Tensor[T], y: Tensor[T]): Tensor[T] = {
// todo: the performance of contiguous tensor should be optimized
val func = new TensorFunc6[T] {
def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int,
data3: Array[T], offset3: Int): Unit = {
if (ev.isGreater(data2(offset1), data3(offset2))) {
data1(offset1) = ev.fromType(1)
} else {
data1(offset1) = ev.fromType(0)
}
}
}
DenseTensorApply.apply3[T](this, x, y, func)
this
}
/**
* Implements < operator comparing each element in x with y
*
* @param x
* @param y
* @return current tensor reference
*/
override def lt(x: Tensor[T], y: Tensor[T]): Tensor[T] = {
// todo: the performance of contiguous tensor should be optimized
val func = new TensorFunc6[T] {
def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int,
data3: Array[T], offset3: Int): Unit = {
if (ev.toType[Double](ev.minus(data2(offset1), data3(offset2))) < 0) {
data1(offset1) = ev.fromType(1)
} else {
data1(offset1) = ev.fromType(0)
}
}
}
DenseTensorApply.apply3[T](this, x, y, func)
this
}
/**
* Implements <= operator comparing each element in x with y
*
* @param x
* @param y
* @return current tensor reference
*/
override def le(x: Tensor[T], y: Tensor[T]): Tensor[T] = {
// todo: the performance of contiguous tensor should be optimized
val func = new TensorFunc6[T] {
def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int,
data3: Array[T], offset3: Int): Unit = {
if (ev.toType[Double](ev.minus(data2(offset1), data3(offset2))) <= 0) {
data1(offset1) = ev.fromType(1)
} else {
data1(offset1) = ev.fromType(0)
}
}
}
DenseTensorApply.apply3[T](this, x, y, func)
this
}
/**
* Implements == operator comparing each element in a with b
*
* @param x
* @param value
* @return
*/
override def eq(x: Tensor[T], value: T): Tensor[T] = {
// todo: the performance of contiguous tensor should be optimized
val func = new TensorFunc4[T] {
def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int): Unit = {
if (data2(offset1) == value) {
data1(offset1) = ev.fromType(1)
} else {
data1(offset1) = ev.fromType(0)
}
}
}
DenseTensorApply.apply2[T](this, x, func)
this
}
/**
* returns the sum of the n-norms on the Tensor x
*
* @param value the n-norms
* @return
*/
override def norm(value: Int): T = {
require(value > 0, "norm value should be greater than 0")
var res: T = ev.fromType(0)
val func = new TensorFunc2[T] {
override def apply(data1: Array[T], offset1: Int): Unit = {
res = ev.plus(res, ev.pow(ev.abs(data1(offset1)), ev.fromType(value)))
}
}
DenseTensorApply.apply1[T](this, func)
ev.pow(res, ev.fromType(1.0 / value))
}
/**
* returns a new Tensor with the sign (+/- 1 or 0) of the elements of x.
*
* @return
*/
override def sign(): Tensor[T] = {
val func = new TensorFunc2[T] {
override def apply(data1: Array[T], offset1: Int): Unit = {
if (ev.isGreater(data1(offset1), ev.zero)) {
data1(offset1) = ev.one
} else if (ev.isGreater(ev.zero, data1(offset1))) {
data1(offset1) = ev.fromType(-1)
} else {
data1(offset1) = ev.zero
}
}
}
DenseTensorApply.apply1[T](this, func)
this
}
/**
* resize this tensor size to floor((xmax - xmin) / step) + 1 and set values from
* xmin to xmax with step (default to 1).
* @param xmin
* @param xmax
* @param step
* @return this tensor
*/
override def range(xmin: Double, xmax: Double, step: Int = 1): Tensor[T] = {
require((xmax >= xmin) && (step > 0),
"upper bound and larger bound incoherent with step sign")
val size = math.floor((xmax-xmin)/ step + 1).toInt
if (this.nElement() != size) this.resize(size)
var i = 0
// TODO: the performance of contiguous tensor should be optimize
val func = new TensorFunc2[T] {
override def apply(data1: Array[T], offset1: Int): Unit = {
data1(offset1) = ev.fromType(xmin + i * step)
i += 1
}
}
DenseTensorApply.apply1[T](this, func)
this
}
override def addSingletonDimension(t: Tensor[T], dim: Int = 1): Tensor[T] = {
require(dim > 0 && dim <= t.dim() + 1, s"invalid dimension: $dim. " +
s"Tensor is of ${t.dim()} dimensions.")
val size = new Array[Int](t.dim() + 1)
val stride = new Array[Int](t.dim() + 1)
var d = 0
while (d < dim - 1) {
size(d) = t.size(d + 1)
stride(d) = t.stride(d + 1)
d += 1
}
size(dim - 1) = 1
stride(dim - 1) = 1
d += 1
while (d < t.dim + 1) {
size(d) = t.size(d)
stride(d) = t.stride(d)
d += 1
}
this.set(t.storage(), t.storageOffset(), size, stride)
}
override def addMultiDimension( t: Tensor[T], dims: Array[Int] = Array(1)): Tensor[T] = {
// increase 1 to the following pos after a previous smaller pos have one dimension inserted.
for (i <- 0 until dims.length) {
for (j <- i + 1 until dims.length) {
if (dims(j) > dims(i)) {
dims(j) = dims(j) + 1
}
}
}
var temp = t.clone()
var size = new Array[Int](t.dim())
var stride = new Array[Int](t.dim())
for ( i <- 0 until dims.length) {
require(dims(i) > 0 && dims(i) <= temp.dim() + 1, s"invalid dimension: ${dims(i)}. " +
s"Tensor is of ${temp.dim()} dimensions.")
size = new Array[Int](temp.dim() + 1)
stride = new Array[Int](temp.dim() + 1)
var d = 0
while (d < dims(i) - 1) {
size(d) = temp.size(d + 1)
stride(d) = temp.stride(d + 1)
d += 1
}
size(dims(i) - 1) = 1
stride(dims(i) - 1) = 1
d += 1
while (d < temp.dim + 1) {
size(d) = temp.size(d)
stride(d) = temp.stride(d)
d += 1
}
temp.set(temp.storage(), temp.storageOffset(), size, stride)
}
this.set(temp.storage(), temp.storageOffset(), size, stride)
}
/**
* Implements >= operator comparing each element in x with value
*
* @param x
* @param value
* @return
*/
override def ge(x: Tensor[T], value: Double): Tensor[T] = {
// todo: the performance of contiguous tensor should be optimized
val func = new TensorFunc4[T] {
def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int): Unit = {
if (ev.toType[Double](data2(offset2)) >= value) {
data1(offset1) = ev.fromType(1)
} else {
data1(offset1) = ev.fromType(0)
}
}
}
DenseTensorApply.apply2[T](this, x, func)
this
}
/**
* Accumulate the elements of tensor into the original tensor by adding to the indices
* in the order given in index. The shape of tensor must exactly match the elements indexed
* or an error will be thrown.
*
* @param dim
* @param index
* @param y
* @return
*/
override def indexAdd(dim: Int, index: Tensor[T], y: Tensor[T]): Tensor[T] = {
require(dim <= y.nDimension(), "Indexing dim is out of bounds of tensor y")
require(index.nElement() == y.size(dim),
"Number of indices should be equal to source:size(dim)")
require(index.nDimension() == 1, "Index is supposed to be a vector")
val indexC = index.contiguous()
val numEle = indexC.nElement()
var i = 1
if (this.nDimension > 1) {
while (i <= numEle) {
this.select(dim, ev.toType[Double](indexC(Array(i))).toInt).add(y.select(dim, i))
i += 1
}
} else {
while (i <= numEle) {
this.narrow(1, ev.toType[Double](indexC(Array(i))).toInt, 1).add(y.narrow(1, i, 1))
i += 1
}
}
this
}
/**
* create a new Tensor which indexes the original Tensor along dimension dim using the entries
* in torch.LongTensor index. The returned Tensor has the same number of dimensions as the
* original Tensor.
*
* @param dim
* @param index
* @param y
* @return
*/
override def index(dim: Int, index: Tensor[T], y: Tensor[T]): Tensor[T] = {
require(dim <= y.nDimension(), "Indexing dim is out of bounds of tensor y")
require(index.nDimension() == 1, "Index is supposed to be a vector")
require(y.nDimension() > 0, "Source tensor is empty")
val indexC = index.contiguous()
val numEle = indexC.nElement()
val newSize = y.size()
newSize(dim - 1) = numEle
this.resize(newSize)
var i = 1
if (y.nDimension() == 1) {
while (i <= numEle) {
this.narrow(1, i, 1).add(y.narrow(1, ev.toType[Double](indexC(Array(i))).toInt, 1))
i += 1
}
} else {
while (i <= numEle) {
this.select(dim, i).copy(y.select(dim, ev.toType[Double](indexC(Array(i))).toInt))
i += 1
}
}
this
}
override def toTensor[D](implicit env: TensorNumeric[D]): Tensor[D] = {
if (ev.getType() == env.getType()) {
this.asInstanceOf[Tensor[D]]
} else {
throw new IllegalArgumentException(s"The type ${env.getType().getClass}" +
s" in toTensor[${env.getType().getClass}] is not same" +
s"as the numeric type ${ev.getType().getClass} of the " +
"corresponding module, please keep them same.")
}
}
override def getTensorNumeric(): TensorNumeric[T] = ev
override def getTensorType: TensorType = DenseType
override def floor(y: Tensor[T]): Tensor[T] = {
this.map(y, (a, b) => ev.floor(b))
}
override def floor(): Tensor[T] = {
this.apply1(a => ev.floor(a))
}
override def ceil(): Tensor[T] = {
this.apply1(a => ev.ceil(a))
}
override def negative(x: Tensor[T]): Tensor[T] = {
this.map(x, (a, b) => ev.negative(b))
this
}
override def inv(): Tensor[T] = {
this.apply1(a => ev.inv(a))
}
override def reduce(dim: Int, result: Tensor[T], reducer: (T, T) => T): Tensor[T] = {
DenseTensorDimApply.dimApply2[T](result.asInstanceOf[DenseTensor[T]], this, dim - 1,
(r, rOffset, rStride, rSize, t, tOffset, tStride, tSize) => {
r(rOffset) = t(tOffset)
var i = 1
while(i < tSize) {
r(rOffset) = reducer(r(rOffset), t(tOffset + i * tStride))
i += 1
}
})
result
}
override def toArray(): Array[T] = {
require(this.dim() == 1, "toArray only support 1D tensor")
val n = this.nElement()
val array = new Array[T](n)
var i = 0
while(i < n) {
array(i) = this.valueAt(i + 1)
i += 1
}
array
}
override def erf(): Tensor[T] = {
this.apply1(a => ev.erf(a))
}
override def erfc(): Tensor[T] = {
this.apply1(a => ev.erfc(a))
}
override def logGamma(): Tensor[T] = {
this.apply1(a => ev.logGamma(a))
}
override def digamma(): Tensor[T] = {
this.apply1(a => ev.digamma(a))
}
override private[bigdl] def toQuantizedTensor: QuantizedTensor[T] =
throw new IllegalArgumentException("DenseTensor cannot be cast to QuantizedTensor")
}
object DenseTensor {
def apply[@specialized(Float, Double) T: ClassTag](value: T)(
implicit ev: TensorNumeric[T]): Tensor[T] = {
new DenseTensor[T](new ArrayStorage[T](Array(value)), 0, Array[Int](),
Array[Int](), 0)
}
private[tensor] def squeeze[@specialized(Float, Double) T](self: DenseTensor[T]): Tensor[T] = {
var ndim = 0
var d = 0
while (d < self.nDimension) {
if (self._size(d) != 1) {
if (d != ndim) {
self._size(ndim) = self._size(d)
self._stride(ndim) = self._stride(d)
}
ndim += 1
}
d += 1
}
if (ndim == 0 && self.nDimension > 0) {
self._size(0) = 1
self._stride(0) = 1
ndim = 1
}
self.nDimension = ndim
self
}
private[tensor] def squeeze[@specialized(Float, Double) T](self: DenseTensor[T],
_dim: Int): Tensor[T] = {
require(_dim >= 0 && _dim < self.nDimension, "dimension out of range")
if (self._size(_dim) == 1 && self.nDimension > 1) {
var d = _dim
while (d < self.nDimension - 1) {
self._size(d) = self._size(d + 1)
self._stride(d) = self._stride(d + 1)
d += 1
}
self.nDimension -= 1
}
self
}
private[tensor] def newWithStorage[@specialized(Float, Double) T: ClassTag](
tensor: DenseTensor[T], storage: ArrayStorage[T], storageOffset: Int, size: Array[Int],
stride: Array[Int], ev: TensorNumeric[T]): DenseTensor[T] = {
if (size != null && stride != null) {
require(size.length == stride.length, "inconsistent size")
}
implicit val ev2 = ev
val self = if (tensor == null) new DenseTensor[T]() else tensor
val nDimension = if (size != null) size.length else if (stride != null) stride.length else 0
DenseTensor.rawSet[T](self, storage, storageOffset, nDimension, size, stride)
}
private[tensor] def newWithTensor[@specialized(Float, Double) T: ClassTag](
other: DenseTensor[T])(implicit ev: TensorNumeric[T]): DenseTensor[T] = {
val self = new DenseTensor[T]()
DenseTensor.rawSet[T](self, other._storage, other._storageOffset,
other.nDimension, other._size, other._stride)
}
private[tensor] def rawSet[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], storage: ArrayStorage[T], storageOffset: Int,
nDimension: Int, _size: Array[Int], _stride: Array[Int]): DenseTensor[T] = {
self._storage = storage
require(storageOffset >= 0, "Tensor: invalid storage offset")
self._storageOffset = storageOffset
rawResize[T](self, nDimension, _size, _stride)
}
private[tensor] def rawResize[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], nDim: Int, _size: Array[Int], _stride: Array[Int])
: DenseTensor[T] = {
// resize as a scalar
if (nDim == 0 && _size.isEmpty) {
self._size = Array[Int]()
self._stride = Array[Int]()
self.nDimension = nDim
val totalSize = 1
if (self._storage == null ) {
self._storage = new ArrayStorage(new Array[T](totalSize + self._storageOffset))
} else if (totalSize + self._storageOffset > self._storage.length) {
self._storage.resize(totalSize + self._storageOffset)
}
return self
}
var hasCorrectSize = true
var nDim_ = 0
var d = 0
while (d < nDim) {
nDim_ = nDim_ + 1
if (self.nDimension > d && _size(d) != self._size(d)) {
hasCorrectSize = false
}
if (self.nDimension > d && _stride != null && _stride(d) >= 0 &&
_stride(d) != self._stride(d)) {
hasCorrectSize = false
}
d += 1
}
if (nDim_ != self.nDimension) hasCorrectSize = false
if (hasCorrectSize) return self
if (nDim_ > 0) {
if (nDim_ != self.nDimension) {
self._size = new Array[Int](nDim)
self._stride = new Array[Int](nDim)
self.nDimension = nDim
}
var totalSize = 1
var d = self.nDimension - 1
while (d >= 0) {
self._size(d) = _size(d)
if (_stride != null && _stride(d) >= 0) {
self._stride(d) = _stride(d)
} else {
if (d == self.nDimension - 1) {
self._stride(d) = 1
} else {
self._stride(d) = self._size(d + 1) * self._stride(d + 1)
}
}
totalSize = totalSize + (self._size(d) - 1) * self._stride(d)
d -= 1
}
if (totalSize + self._storageOffset > 0) {
if (self._storage == null ) {
self._storage = new ArrayStorage(new Array[T](totalSize + self._storageOffset))
} else if (totalSize + self._storageOffset > self._storage.length) {
self._storage.resize(totalSize + self._storageOffset)
}
}
} else {
self.nDimension = 0
}
self
}
private[tensor] def newClone[@specialized(Float, Double) T: ClassTag](self: DenseTensor[T])(
implicit ev: TensorNumeric[T]): DenseTensor[T] = {
val tensor = new DenseTensor[T]()
resizeAs(tensor, self)
copy(tensor, self)
tensor
}
private[tensor] def newContiguous[@specialized(Float, Double) T: ClassTag](self: DenseTensor[T])(
implicit ev: TensorNumeric[T]): DenseTensor[T] = {
if (!isContiguous(self)) {
newClone(self)
} else {
self
}
}
private[tensor] def newSelect[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], _dimension: Int,
_sliceIndex: Int)(implicit ev: TensorNumeric[T]): DenseTensor[T] = {
val tensor = DenseTensor.newWithTensor(self)
select(tensor, null, _dimension, _sliceIndex)
tensor
}
private[tensor] def newNarrow[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], _dimension: Int,
_firstIndex: Int, _size: Int)(implicit ev: TensorNumeric[T]): DenseTensor[T] = {
val tensor = DenseTensor.newWithTensor(self)
narrow(tensor, null, _dimension, _firstIndex, _size)
tensor
}
private[tensor] def newTranspose[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], _dimension1: Int,
_dimension2: Int)(implicit ev: TensorNumeric[T]): DenseTensor[T] = {
val tensor = DenseTensor.newWithTensor(self)
transpose(tensor, null, _dimension1, _dimension2)
tensor
}
private[tensor] def resizeAs[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], src: Tensor[_]): Unit = {
if (!isSameSizeAs(self, src)) rawResize(self, src.nDimension(), src.size(), null)
}
private[tensor] def resize[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], sizes: Array[Int], strides: Array[Int] = null) = {
require(sizes != null, "invalid size")
if (strides != null) {
require(sizes.length == strides.length, "invalid stride")
}
rawResize(self, sizes.length, sizes, strides)
}
private[tensor] def isSameSizeAs[@specialized T](
self: DenseTensor[T], src: Tensor[_]): Boolean = {
if (self.nDimension != src.nDimension()) {
return false
}
if (self.isEmpty != src.isEmpty) {
return false
}
var d = 0
while (d < self.nDimension) {
if (self.size(d + 1) != src.size(d + 1)) {
return false
}
d += 1
}
return true
}
private[tensor] def isContiguous[@specialized(Float, Double) T](
self: DenseTensor[T]): Boolean = {
var s = 1
var d = self.nDimension - 1
while (d >= 0) {
if (self._size(d) != 1) {
if (s != self._stride(d)) {
return false
} else {
s = s * self._size(d)
}
}
d -= 1
}
return true
}
private[tensor] def size2Stride(sizes: Array[Int]): Array[Int] = {
val strides = new Array[Int](sizes.length)
var jump = 1
var i = strides.length - 1
while (i >= 0) {
strides(i) = jump
jump = jump * sizes(i)
i -= 1
}
strides
}
private[tensor] def set[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], other: DenseTensor[T]): Tensor[T] = {
if (self != other) {
DenseTensor.rawSet(self, other.storage.asInstanceOf[ArrayStorage[T]], other.storageOffset,
other.nDimension, other.size, other.stride)
} else {
self
}
}
private[tensor] def offsetFromIndice[@specialized(Float, Double) T](
self: DenseTensor[T], indexes: Array[Int]): Int = {
var offset = self._storageOffset
var d = 0
while (d < indexes.length) {
offset = offset + (indexes(d) - 1) * self._stride(d)
d += 1
}
offset
}
private[tensor] def select[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], source: DenseTensor[T], _dimension: Int, _sliceIndex: Int): Unit = {
var src = source
if (src == null) src = self
require(src.nDimension > 0, "cannot select on a scalar")
require(_dimension >= 0 && _dimension < src.nDimension, "out of range")
require(_sliceIndex >= 0 && _sliceIndex < src.size(_dimension + 1),
s"${_sliceIndex} out of range 0 to ${src.size(_dimension + 1) - 1}")
set(self, src)
narrow(self, null, _dimension, _sliceIndex, 1)
var d = _dimension
while (d < self.nDimension - 1) {
self._size(d) = self._size(d + 1)
self._stride(d) = self._stride(d + 1)
d += 1
}
self.nDimension = self.nDimension - 1
}
private[tensor] def narrow[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], source: DenseTensor[T], _dimension: Int, _firstIndex: Int, size: Int)
: Unit = {
var src = source
if (src == null) {
src = self
}
require(_dimension >= 0 && _dimension < src.nDimension, "dimension out of range")
require(_firstIndex >= 0 && _firstIndex < src.size(_dimension + 1),
s"firstIndex(${_firstIndex}) out of range [0, ${src.size(_dimension + 1)})")
require(size > 0 && _firstIndex + size <= src.size(_dimension + 1),
s"size out of range $size (0, ${src.size(_dimension + 1)} - ${_firstIndex}]")
set(self, src)
if (_firstIndex > 0) {
self._storageOffset = self._storageOffset + _firstIndex * self._stride(_dimension)
}
self._size(_dimension) = size
}
private[tensor] def transpose[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], source: DenseTensor[T], _dimension1: Int, _dimension2: Int): Unit = {
var src = source
if (src == null) src = self
require(_dimension1 >= 0 && _dimension1 < src.nDimension, "out of range")
require(_dimension2 >= 0 && _dimension2 < src.nDimension, "out of range")
set(self, src)
if (_dimension1 == _dimension2) {
return
}
var z = self._stride(_dimension1)
self._stride(_dimension1) = self._stride(_dimension2)
self._stride(_dimension2) = z
z = self._size(_dimension1)
self._size(_dimension1) = self._size(_dimension2)
self._size(_dimension2) = z
}
private[tensor] def get1d[@specialized(Float, Double) T](self: DenseTensor[T], x0: Int): T = {
require(self.nDimension != 0, "tensor must have one dimension")
require(x0 >= 0 && x0 < self._size(0), "out of range")
self._storage(self._storageOffset + x0 * self._stride(0))
}
private[tensor] def get1dTensor[@specialized(Float, Double) T: ClassTag](
self: DenseTensor[T], x0: Int)(implicit ev: TensorNumeric[T]): DenseTensor[T] = {
new DenseTensor(new ArrayStorage(Array(get1d(self, x0))))
}
private[tensor] def copy[@specialized T](
self: DenseTensor[T], src: Tensor[T]): Unit = {
require(self.nElement() == src.nElement(), s"self element number(${self.nElement()}) is not" +
s" equal to source element number(${src.nElement()})")
if (self.isEmpty) {
return
}
if (self.isContiguous() && src.isContiguous() && sameStride(self.stride(), src.stride())) {
System.arraycopy(src.storage().array(), src.storageOffset - 1, self.storage().array(),
self.storageOffset - 1, self.nElement())
return
}
val func2 = new TensorFunc4[T] {
override def apply(data1: Array[T], offset1: Int, data2: Array[T], offset2: Int): Unit = {
data1(offset1) = data2(offset2)
}
}
DenseTensorApply.apply2[T](self, src, func2)
}
private[tensor] def randperm[@specialized(Float, Double) T: ClassTag](size: Int)(
implicit ev: TensorNumeric[T]): Tensor[T] = {
require(size >= 1, "invalid size")
// create an ordinal array
val array = new Array[T](size)
var i = 1
while (i <= size) {
array(i - 1) = ev.fromType[Int](i)
i = i + 1
}
// Randomly exchange the elements
i = 0
while (i < size - 1) {
val rand = Math.floor(RNG.random() % (size - i)).toInt
val tmp = array(i)
array(i) = array(rand + i)
array(rand + i) = tmp
i += 1
}
Tensor(new ArrayStorage(array))
}
private[tensor] def sameStride(l: Array[Int], r: Array[Int]): Boolean = {
if (l.length != r.length) return false
var i = 0
while (i < l.length) {
if (l(i) != r(i)) {
return false
}
i += 1
}
return true
}
private[tensor] def range[@specialized(Float, Double) T: ClassTag]
(xmin: Double, xmax: Double, step: Int = 1)(
implicit ev: TensorNumeric[T]): Tensor[T] = {
val newTensor = Tensor[T]()
newTensor.range(xmin, xmax, step)
}
private[tensor] def ones[@specialized(Float, Double) T: ClassTag](sizes: Array[Int])(
implicit ev: TensorNumeric[T]): Tensor[T] = {
val length = sizes.product
Tensor(Storage(new Array[T](length)), 1, sizes).fill(ev.fromType[Int](1))
}
private[tensor] def gaussian1D[@specialized T: ClassTag](
size: Int = 3,
sigma: Double = 0.25,
amplitude: Int = 1,
normalize: Boolean = false,
mean: Double = 0.5,
tensor: Tensor[T] = null)(implicit ev: TensorNumeric[T]): Tensor[T] = {
val gauss = if (null != tensor) {
require(tensor.dim() == 1, "expecting 1D tensor")
require(tensor.nElement() > 0, "expecting non-empty tensor")
tensor
} else {
Tensor[T](size)
}
val center = mean * gauss.nElement() + 0.5
// generate kernel
var i = 1
while (i <= gauss.nElement()) {
gauss.setValue(i, ev.fromType[Double](amplitude * math.exp(-(math.pow((i - center)
/ (sigma * size), 2) / 2)))
)
i += 1
}
if (normalize) {
gauss.div(gauss.sum())
}
gauss
}
private[tensor] def canFastBroadcast[T](tensor: Tensor[T],
other: Tensor[T]): Boolean = {
if (tensor.nDimension < other.nDimension()) return false
val delta = tensor.nDimension - other.nDimension()
var d = other.nDimension()
// Check dimensions
var broadcasting = false
while(d > 0) {
if (broadcasting) {
if (other.size(d) != 1) return false
} else if (tensor.size(delta + d) != other.size(d)) {
if (other.size(d) != 1) return false
broadcasting = true
}
d -= 1
}
return true
}
private[tensor] def expandSize[T: ClassTag](tensor: Tensor[T],
other: Tensor[T]): Array[Int] = {
val errorMsg = s"tensor size not match ${tensor.size.mkString("x")} " +
s"${other.size.mkString("x")}"
val longTensor = if (tensor.dim() > other.dim()) tensor else other
val shortTensor = if (tensor.dim() > other.dim()) other else tensor
val ndim = longTensor.nDimension()
val delta = longTensor.nDimension() - shortTensor.nDimension()
val size = new Array[Int](ndim)
var i = ndim - 1
while (i >= delta) {
require(longTensor.size(i + 1) == shortTensor.size(i + 1 - delta) ||
longTensor.size(i + 1) == 1 ||
shortTensor.size(i + 1 - delta) == 1, errorMsg)
size(i) = math.max(longTensor.size(i + 1), shortTensor.size(i + 1 - delta))
i -= 1
}
while (i >= 0) {
size(i) = longTensor.size(i + 1)
i -= 1
}
size
}
private[tensor] def apply[T: ClassTag](
sparseTensor: SparseTensor[T],
res: Tensor[T] = null)(implicit ev: TensorNumeric[T]): Tensor[T] = {
val dt = if (null == res) Tensor(sparseTensor.size()) else res
val srcIndex = new Array[Int](dt.dim())
val tgtIndex = new Array[Int](dt.dim())
// fill DenseTensor with sparseTensors' active values one by one
(0 until sparseTensor._nElement).foreach { i =>
// targetIndex = sourceIndex - indicesOffset
srcIndex.indices.foreach { j =>
srcIndex(j) = sparseTensor._indices(j)(i + sparseTensor._storageOffset) + 1
tgtIndex(j) = srcIndex(j) - sparseTensor._indicesOffset(j)
}
dt(tgtIndex) = sparseTensor(srcIndex)
}
dt
}
}
|
zhangxiaoli73/BigDL
|
spark/dl/src/main/scala/com/intel/analytics/bigdl/tensor/DenseTensor.scala
|
Scala
|
apache-2.0
| 92,055 |
package com.airtonjal.poc.producer
import java.text.{SimpleDateFormat, DateFormat}
import java.util.{Locale, TimeZone, Date}
import com.airtonjal.poc.parser.pchr.PCHRParser
import com.airtonjal.poc.producer.impl.PCHRProducer
import com.airtonjal.poc.utils.{SizeFormatter, CommandLineUtils}
import org.apache.commons.logging.LogFactory
import scala.collection.JavaConversions._
import scala.concurrent.Future
/**
* Producer entry point
* @author <a href="mailto:[email protected]">Airton Libório</a>
*/
object Main {
private val log = LogFactory.getLog(getClass())
val PCHR_TOPIC = "pchr"
def main(args: Array[String]): Unit = {
log.info("Starting POC pipeline")
val files = CommandLineUtils.getFiles(args(0))
if (files != null) {
val pchrParser = new PCHRParser
val producer = new PCHRProducer(PCHR_TOPIC)
var parserTime = 0l
var totalVolume = 0l
var numberOfFiles = 0l
val timeFormat: DateFormat = new SimpleDateFormat("HH:mm:ss", Locale.getDefault)
var producerTime = 0l
var numberOfMessages = 0l
var sent = 0
files foreach { file =>
val parserStartTime = System.currentTimeMillis()
val pchrs = pchrParser.parseList(file)
numberOfFiles = numberOfFiles + 1
totalVolume = totalVolume + file.length()
log.info("File " + file.getName() + " parsed\tProducing to Kafka")
parserTime = parserTime + System.currentTimeMillis() - parserStartTime
log.info("PCHR Parsing throughput: " + SizeFormatter.readableFileSize((totalVolume / (parserTime / 1000f)).toInt) +
" per second\t\t\tTotal parsed: " + SizeFormatter.readableFileSize(totalVolume) + "\t\tTotal parser time: " + timeFormat.format(new Date(parserTime - TimeZone.getDefault.getRawOffset)))
val producerStartTime = System.currentTimeMillis()
producer.produce(pchrs.toList)
numberOfMessages = numberOfMessages + pchrs.size()
producerTime = producerTime + System.currentTimeMillis() - producerStartTime
log.info("PCHR Producer throughput: " + (numberOfMessages / (producerTime / 1000f)).toInt +
" messages per second\t\t\tTotal produced: " + numberOfMessages + " messages\t\tTotal producer time: " + timeFormat.format(new Date(producerTime - TimeZone.getDefault.getRawOffset)))
}
}
}
}
|
airtonjal/Big-Data-Pipeline
|
producer/src/main/scala/com/airtonjal/poc/producer/Main.scala
|
Scala
|
mit
| 2,386 |
/*
Stratagem is a model checker for transition systems described using rewriting
rules and strategies.
Copyright (C) 2013 - SMV@Geneva University.
Program written by Edmundo Lopez Bobeda <edmundo [at] lopezbobeda.net>.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package ch.unige.cui.smv.stratagem.ts
/**
* Represents the One strategy.
* @param S the strategy it takes as parameter.
* @param n represents which subterm will be rewritten by this one.
* @author mundacho
*
*/
case class One(S: Strategy, n: Int) extends NonVariableStrategy {
require(n >= 0)
}
/**
* Companion object. It adds a default constructor.
*/
object One {
def apply(S: Strategy) = new One(S, 0)
}
|
didierbuchs/oldstratagem
|
src/main/scala/ch/unige/cui/smv/stratagem/ts/One.scala
|
Scala
|
gpl-2.0
| 1,319 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600a.v3
import uk.gov.hmrc.ct.box.{Calculated, CtBoxIdentifier, CtOptionalInteger}
import uk.gov.hmrc.ct.ct600.v3.calculations.LoansToParticipatorsCalculator
import uk.gov.hmrc.ct.ct600a.v3.retriever.CT600ABoxRetriever
case class A30(value: Option[Int]) extends CtBoxIdentifier(name = "A30 - Amount repaid - sum of all iterations of amount of loan repayed (after period end but equal or less than 9 months from period end)") with CtOptionalInteger
object A30 extends Calculated[A30, CT600ABoxRetriever] with LoansToParticipatorsCalculator {
override def calculate(fieldValueRetriever: CT600ABoxRetriever): A30 = {
calculateA30(fieldValueRetriever.retrieveCP2(), fieldValueRetriever.retrieveLoansToParticipators())
}
}
|
scottcutts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/ct600a/v3/A30.scala
|
Scala
|
apache-2.0
| 1,360 |
/*
* Copyright (c) 2010-2011 Belmont Technology Pty Ltd. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sodatest.api { package reflection {
import org.sodatest.coercion.CoercionReflectionUtil
/**
* [[org.sodatest.api.SodaFixture]] base class that supports the discovery of [[org.sodatest.api.SodaEvent]]s
* and [[org.sodatest.api.SodaReport]]s by reflecting on the subclass' functions.
*
* ReflectiveSodaFixture is probably the easiest way to implement the [[org.sodatest.api.SodaFixture]] trait.
* Simply extend this trait and then define in the subclass public functions that have no parameters
* and which return either a [[org.sodatest.api.SodaEvent]] or [[org.sodatest.api.SodaReport]] as required.
* ReflectiveSodaFixture will canonize the incoming Event or Report name and then discover and invoke
* a function on the subclass that has a name which, when also canonized, matches the Event or
* Report name.
*
* (Names in SodaTest are canonized by removing all non-alpha-numeric characters and
* converting all alpha characters to lower-case. e.g. canonized("Secret Report #2") -> "secretreport2")
*
* <b>Example</b>
* {{{
* class MyFixutre extends ReflectiveSodaFixture {
* def secretReport2: SodaReport = new SecretReport2()
* }
* }}}
*/
trait ReflectiveSodaFixture extends SodaFixture {
import CoercionReflectionUtil._
/**
* Creates a SodaEvent by reflecting on this SodaFixture to find a function whose canonized
* name is equivalent to the canonized version of the specified name.
*/
def createEvent(name: String): Option[SodaEvent] = invokeNoParamFunctionReturning(classOf[SodaEvent], name, this)
/**
* Creates a SodaReport by reflecting on this SodaFixture to find a function whose canonized
* name is equivalent to the canonized version of the specified name.
*/
def createReport(name: String): Option[SodaReport] = invokeNoParamFunctionReturning(classOf[SodaReport], name, this)
}
}}
|
GrahamLea/SodaTest
|
sodatest-api/src/main/scala/org/sodatest/api/reflection/ReflectiveSodaFixture.scala
|
Scala
|
apache-2.0
| 2,503 |
package muster.codec.json
import muster.{Consumer, Junk}
class Ac {
type Foo = Junk
object WithAlias {
implicit val WithAliasConsumer = Consumer.consumer[WithAlias]
}
case class WithAlias(in: Foo)
case class NoAlias(in: Junk)
}
|
json4s/muster
|
codecs/json/src/test/scala/muster/codec/json/Ac.scala
|
Scala
|
mit
| 247 |
object Test extends App {
def printIArray[T](arr: IArray[T]): Unit =
println(arr.asInstanceOf[Array[T]].mkString("IArray(", ",", ")"))
// This is used to check the correct result, as well as checking that the IArray was not mutated in place
def assertDifferent[T, U](expr: IArray[T], sources: IArray[U]*): Unit = {
sources.foreach(source =>
assert(expr.asInstanceOf[AnyRef] ne source.asInstanceOf[AnyRef], "IArray was mutated in place")
)
printIArray(expr)
}
val arr1 = IArray[Int](1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
val arr2 = IArray[Int](11, 12, 13, 14, 15, 16, 17, 18, 19, 20)
assertDifferent(arr1 ++ arr2, arr1, arr2)
println(arr1.contains(7))
println(arr1.count(_ % 2 == 0))
assertDifferent(arr1.drop(8), arr1)
assertDifferent(arr1.dropRight(8), arr1)
assertDifferent(arr1.dropWhile(_ < 8), arr1)
println(arr1.exists(_ % 6 == 0))
assertDifferent(arr1.filter(_ % 2 == 0), arr1)
assertDifferent(arr1.filterNot(_ % 2 == 0), arr1)
println(arr1.find(_ % 5 == 0))
assertDifferent(arr1.flatMap(x => List(x, x)), arr1)
val twoDArr = IArray(List(1, 2), List(3, 4))
assertDifferent(twoDArr.flatten[Int], twoDArr)
println(arr1.fold(0)(_ + _))
println(arr1.foldLeft("")((acc, x) => acc + x.toString))
println(arr1.foldRight("")((x, acc) => acc + x.toString))
println(arr1.forall(_ > 5))
arr1.foreach(x => println(x))
println(arr2.head)
println(arr1.headOption)
println(arr1.indexOf(5, 7))
println(arr1.indexWhere(_ > 3, 1))
println(arr2.indices.mkString(","))
assertDifferent(arr1.init, arr1)
println(arr1.isEmpty)
println(arr1.iterator.take(3).mkString(","))
println(arr1.last)
println(arr2.lastOption)
println(arr2.lastIndexOf(17))
println(arr1.lastIndexWhere(_ < 5))
assertDifferent(arr1.map(_ + 10), arr1)
println(arr1.nonEmpty)
val (even, odd) = arr1.partition(_ % 2 == 0)
assertDifferent(even, arr1)
assertDifferent(odd, arr1)
assertDifferent(arr1.reverse, arr1)
assertDifferent(arr1.scan(0)(_ + _), arr1)
assertDifferent(arr1.scanLeft("")((acc, x) => acc + x.toString), arr1)
assertDifferent(arr1.scanRight("")((x, acc) => acc + x.toString), arr1)
println(arr2.size)
assertDifferent(arr1.slice(5,7), arr1)
assertDifferent(arr1.sortBy(- _), arr1)
assertDifferent(arr1.sortWith((x, y) => x.toString.length > y.toString.length || x < y), arr1)
assertDifferent(arr1.sorted, arr1)
val (smaller, greater) = arr1.span(_ < 5)
assertDifferent(smaller, arr1)
assertDifferent(greater, arr1)
val (first, last) = arr1.splitAt(7)
assertDifferent(first, arr1)
assertDifferent(last, arr1)
println(arr1.startsWith(IArray(1,2,3,4,5,6,42)))
assertDifferent(arr1.tail, arr1)
assertDifferent(arr1.take(3), arr1)
assertDifferent(arr1.takeRight(4), arr1)
assertDifferent(arr1.takeWhile(_ < 3), arr1)
val tupArr = IArray[(Int, String)]((1, "1"), (2, "2"), (3, "3"))
val (ints, strings) = tupArr.unzip
assertDifferent(ints, tupArr)
assertDifferent(strings, tupArr)
assertDifferent(arr1.zip(arr2), arr1, arr2)
}
|
lampepfl/dotty
|
tests/run-bootstrapped/iarray-extmtds.scala
|
Scala
|
apache-2.0
| 3,090 |
/*
* Copyright (c) 2015 Andreas Wolf
*
* See te LICENSE file in the project root for further copyright information.
*/
package info.andreaswolf.roadhopper.measurements
import com.emotioncity.soriento.ODocumentReader
import com.emotioncity.soriento.RichODocumentImpl._
import com.emotioncity.soriento.annotations.EmbeddedList
import com.graphhopper.util.shapes.GHPoint3D
import com.orientechnologies.orient.core.record.impl.ODocument
import info.andreaswolf.roadhopper.road.RoadSegment
import scala.collection.LinearSeq
object Measurement {
implicit object MeasurementReader extends ODocumentReader[Measurement] {
def read(oDocument: ODocument): Measurement = {
new Measurement(
// this typed get() call is possible because we imported implicit conversions from soriento.RichODocumentImpl
oDocument.get[String]("name").get,
oDocument.getAsList[DataPoint]("points").get,
oDocument.getAsList[RoadSegment]("road")(RoadSegmentReader).getOrElse(List[RoadSegment]())
)
}
}
implicit object RoadSegmentReader extends ODocumentReader[RoadSegment] {
override def read(oDocument: ODocument): RoadSegment = {
new RoadSegment(
oDocument.get[GHPoint3D]("start").get,
oDocument.get[GHPoint3D]("end").get,
oDocument.get[Double]("speedLimit").get
)
}
}
}
case class Measurement(name: String, @EmbeddedList points: LinearSeq[DataPoint], road: List[RoadSegment]) {
}
|
andreaswolf/roadhopper
|
src/main/scala/info/andreaswolf/roadhopper/measurements/Measurement.scala
|
Scala
|
mit
| 1,412 |
package com.cloudray.scalapress.item
import com.cloudray.scalapress.item.attr.{Attribute, AttributeFuncs}
import scala.util.Random
import com.cloudray.scalapress.search.Sort
/** @author Stephen Samuel */
object ItemSorter {
def sort(objs: Iterable[Item],
sort: Sort,
sortAttribute: Option[Attribute],
seed: Long = System.currentTimeMillis): Seq[Item] = {
(sort, sortAttribute) match {
case (Sort.Attribute, Some(attribute)) =>
objs.toSeq.sortBy(obj => AttributeFuncs.attributeValue(obj, attribute).getOrElse(""))
case (Sort.AttributeDesc, Some(attribute)) =>
objs.toSeq.sortBy(obj => AttributeFuncs.attributeValue(obj, attribute).getOrElse("")).reverse
case (Sort.Price, _) => objs.toSeq.sortBy(_.price)
case (Sort.PriceHigh, _) => objs.toSeq.sortBy(_.price).reverse
case (Sort.Newest, _) => objs.toSeq.sortBy(_.id).reverse
case (Sort.Oldest, _) => objs.toSeq.sortBy(_.id)
case (Sort.Random, _) => new Random(seed).shuffle(objs.toSeq)
case _ => objs.toSeq.sortBy(_.name)
}
}
}
|
vidyacraghav/scalapress
|
src/main/scala/com/cloudray/scalapress/item/ItemSorter.scala
|
Scala
|
apache-2.0
| 1,092 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes.relatedPartyTransactions
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC7801(value: Option[Boolean]) extends CtBoxIdentifier(name = "is incoming (vs outgoing) transaction")
with CtOptionalBoolean
with Input
with ValidatableBox[Frs102AccountsBoxRetriever]
with Validators {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateAsMandatory(this)
)
}
}
|
hmrc/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/relatedPartyTransactions/AC7801.scala
|
Scala
|
apache-2.0
| 1,155 |
/*
* Shadowsocks - A shadowsocks client for Android
* Copyright (C) 2014 <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* ___====-_ _-====___
* _--^^^#####// \\\\#####^^^--_
* _-^##########// ( ) \\\\##########^-_
* -############// |\\^^/| \\\\############-
* _/############// (@::@) \\\\############\\_
* /#############(( \\\\// ))#############\\
* -###############\\\\ (oo) //###############-
* -#################\\\\ / VV \\ //#################-
* -###################\\\\/ \\//###################-
* _#/|##########/\\######( /\\ )######/\\##########|\\#_
* |/ |#/\\#/\\#/\\/ \\#/\\##\\ | | /##/\\#/ \\/\\#/\\#/\\#| \\|
* ` |/ V V ` V \\#\\| | | |/#/ V ' V V \\| '
* ` ` ` ` / | | | | \\ ' ' ' '
* ( | | | | )
* __\\ | | | | /__
* (vvv(VVV)(VVV)vvv)
*
* HERE BE DRAGONS
*
*/
package com.github.shadowsocks
import java.io.File
import java.lang.reflect.{InvocationTargetException, Method}
import java.util.Locale
import android.app._
import android.content._
import android.content.pm.{PackageInfo, PackageManager}
import android.net.{Network, ConnectivityManager}
import android.os._
import android.support.v4.app.NotificationCompat
import android.util.{SparseArray, Log}
import android.widget.Toast
import com.github.shadowsocks.aidl.Config
import com.github.shadowsocks.utils._
import com.google.android.gms.analytics.HitBuilders
import org.apache.http.conn.util.InetAddressUtils
import scala.collection._
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ops._
class ShadowsocksNatService extends Service with BaseService {
val TAG = "ShadowsocksNatService"
val CMD_IPTABLES_RETURN = " -t nat -A OUTPUT -p tcp -d 0.0.0.0 -j RETURN"
val CMD_IPTABLES_DNAT_ADD_SOCKS = " -t nat -A OUTPUT -p tcp " +
"-j DNAT --to-destination 127.0.0.1:8123"
private val mStartForegroundSignature = Array[Class[_]](classOf[Int], classOf[Notification])
private val mStopForegroundSignature = Array[Class[_]](classOf[Boolean])
private val mSetForegroundSignature = Array[Class[_]](classOf[Boolean])
private val mSetForegroundArgs = new Array[AnyRef](1)
private val mStartForegroundArgs = new Array[AnyRef](2)
private val mStopForegroundArgs = new Array[AnyRef](1)
var lockReceiver: BroadcastReceiver = null
var closeReceiver: BroadcastReceiver = null
var connReceiver: BroadcastReceiver = null
var notificationManager: NotificationManager = null
var config: Config = null
var apps: Array[ProxiedApp] = null
val myUid = Process.myUid()
private var mSetForeground: Method = null
private var mStartForeground: Method = null
private var mStopForeground: Method = null
private lazy val application = getApplication.asInstanceOf[ShadowsocksApplication]
private val dnsAddressCache = new SparseArray[String]
def getNetId(network: Network): Int = {
network.getClass.getDeclaredField("netId").get(network).asInstanceOf[Int]
}
def restoreDnsForAllNetwork() {
val manager = getSystemService(Context.CONNECTIVITY_SERVICE).asInstanceOf[ConnectivityManager]
val networks = manager.getAllNetworks
val cmdBuf = new ArrayBuffer[String]()
networks.foreach(network => {
val netId = getNetId(network)
val oldDns = dnsAddressCache.get(netId)
if (oldDns != null) {
cmdBuf.append("ndc resolver setnetdns %d \\"\\" %s".formatLocal(Locale.ENGLISH, netId, oldDns))
dnsAddressCache.remove(netId)
}
})
if (cmdBuf.nonEmpty) Console.runRootCommand(cmdBuf.toArray)
}
def setDnsForAllNetwork(dns: String) {
val manager = getSystemService(Context.CONNECTIVITY_SERVICE).asInstanceOf[ConnectivityManager]
val networks = manager.getAllNetworks
if (networks == null) return
val cmdBuf = new ArrayBuffer[String]()
networks.foreach(network => {
val networkInfo = manager.getNetworkInfo(network)
if (networkInfo == null) return
if (networkInfo.isConnected) {
val netId = getNetId(network)
val curDnsList = manager.getLinkProperties(network).getDnsServers
if (curDnsList != null) {
import scala.collection.JavaConverters._
val curDns = curDnsList.asScala.map(ip => ip.getHostAddress).mkString(" ")
if (curDns != dns) {
dnsAddressCache.put(netId, curDns)
cmdBuf.append("ndc resolver setnetdns %d \\"\\" %s".formatLocal(Locale.ENGLISH, netId, dns))
}
}
}
})
if (cmdBuf.nonEmpty) Console.runRootCommand(cmdBuf.toArray)
}
def setupDns() {
setDnsForAllNetwork("127.0.0.1")
}
def resetDns() = {
restoreDnsForAllNetwork()
}
def flushDns() {
if (Utils.isLollipopOrAbove) {
val manager = getSystemService(Context.CONNECTIVITY_SERVICE).asInstanceOf[ConnectivityManager]
val networks = manager.getAllNetworks
val cmdBuf = new ArrayBuffer[String]()
networks.foreach(network => {
val networkInfo = manager.getNetworkInfo(network)
if (networkInfo.isAvailable) {
val netId = network.getClass.getDeclaredField("netId").get(network).asInstanceOf[Int]
cmdBuf.append("ndc resolver flushnet %d".formatLocal(Locale.ENGLISH, netId))
}
})
Console.runRootCommand(cmdBuf.toArray)
} else {
Console.runRootCommand(Array("ndc resolver flushdefaultif", "ndc resolver flushif wlan0"))
}
}
def destroyConnectionReceiver() {
if (connReceiver != null) {
unregisterReceiver(connReceiver)
connReceiver = null
}
resetDns()
}
def initConnectionReceiver() {
val filter = new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION)
connReceiver = new BroadcastReceiver {
override def onReceive(context: Context, intent: Intent) = {
setupDns()
}
}
registerReceiver(connReceiver, filter)
}
def startShadowsocksDaemon() {
if (config.route != Route.ALL) {
val acl: Array[String] = config.route match {
case Route.BYPASS_LAN => getResources.getStringArray(R.array.private_route)
case Route.BYPASS_CHN => getResources.getStringArray(R.array.chn_route_full)
}
ConfigUtils.printToFile(new File(Path.BASE + "acl.list"))(p => {
acl.foreach(item => p.println(item))
})
}
val conf = ConfigUtils
.SHADOWSOCKS.formatLocal(Locale.ENGLISH, config.proxy, config.remotePort, config.localPort,
config.sitekey, config.encMethod, 10)
ConfigUtils.printToFile(new File(Path.BASE + "ss-local-nat.conf"))(p => {
p.println(conf)
})
val cmd = new ArrayBuffer[String]
cmd += (Path.BASE + "ss-local"
, "-b" , "127.0.0.1"
, "-t" , "600"
, "-c" , Path.BASE + "ss-local-nat.conf"
, "-f" , Path.BASE + "ss-local-nat.pid")
if (config.route != Route.ALL) {
cmd += "--acl"
cmd += (Path.BASE + "acl.list")
}
if (BuildConfig.DEBUG) Log.d(TAG, cmd.mkString(" "))
Console.runCommand(cmd.mkString(" "))
}
def startTunnel() {
if (config.isUdpDns) {
val conf = ConfigUtils
.SHADOWSOCKS.formatLocal(Locale.ENGLISH, config.proxy, config.remotePort, 8153,
config.sitekey, config.encMethod, 10)
ConfigUtils.printToFile(new File(Path.BASE + "ss-tunnel-nat.conf"))(p => {
p.println(conf)
})
val cmd = new ArrayBuffer[String]
cmd += (Path.BASE + "ss-tunnel"
, "-u"
, "-t" , "10"
, "-b" , "127.0.0.1"
, "-L" , "8.8.8.8:53"
, "-c" , Path.BASE + "ss-tunnel-nat.conf"
, "-f" , Path.BASE + "ss-tunnel-nat.pid")
cmd += ("-l" , "8153")
if (BuildConfig.DEBUG) Log.d(TAG, cmd.mkString(" "))
Console.runCommand(cmd.mkString(" "))
} else {
val conf = ConfigUtils
.SHADOWSOCKS.formatLocal(Locale.ENGLISH, config.proxy, config.remotePort, 8163,
config.sitekey, config.encMethod, 10)
ConfigUtils.printToFile(new File(Path.BASE + "ss-tunnel-nat.conf"))(p => {
p.println(conf)
})
val cmdBuf = new ArrayBuffer[String]
cmdBuf += (Path.BASE + "ss-tunnel"
, "-u"
, "-t" , "10"
, "-b" , "127.0.0.1"
, "-l" , "8163"
, "-L" , "8.8.8.8:53"
, "-c" , Path.BASE + "ss-tunnel-nat.conf"
, "-f" , Path.BASE + "ss-tunnel-nat.pid")
if (BuildConfig.DEBUG) Log.d(TAG, cmdBuf.mkString(" "))
Console.runCommand(cmdBuf.mkString(" "))
}
}
def startDnsDaemon() {
val conf = if (config.route == Route.BYPASS_CHN) {
val reject = ConfigUtils.getRejectList(getContext, application)
val blackList = ConfigUtils.getBlackList(getContext, application)
ConfigUtils.PDNSD_DIRECT.formatLocal(Locale.ENGLISH, "127.0.0.1", 8153,
Path.BASE + "pdnsd-nat.pid", reject, blackList, 8163)
} else {
ConfigUtils.PDNSD_LOCAL.formatLocal(Locale.ENGLISH, "127.0.0.1", 8153,
Path.BASE + "pdnsd-nat.pid", 8163)
}
ConfigUtils.printToFile(new File(Path.BASE + "pdnsd-nat.conf"))(p => {
p.println(conf)
})
val cmd = Path.BASE + "pdnsd -c " + Path.BASE + "pdnsd-nat.conf"
if (BuildConfig.DEBUG) Log.d(TAG, cmd)
Console.runCommand(cmd)
}
def getVersionName: String = {
var version: String = null
try {
val pi: PackageInfo = getPackageManager.getPackageInfo(getPackageName, 0)
version = pi.versionName
} catch {
case e: PackageManager.NameNotFoundException =>
version = "Package name not found"
}
version
}
def startRedsocksDaemon() {
val conf = ConfigUtils.REDSOCKS.formatLocal(Locale.ENGLISH, config.localPort)
val cmd = Path.BASE + "redsocks -p %sredsocks-nat.pid -c %sredsocks-nat.conf"
.formatLocal(Locale.ENGLISH, Path.BASE, Path.BASE)
ConfigUtils.printToFile(new File(Path.BASE + "redsocks-nat.conf"))(p => {
p.println(conf)
})
if (BuildConfig.DEBUG) Log.d(TAG, cmd)
Console.runCommand(cmd)
}
/** Called when the activity is first created. */
def handleConnection: Boolean = {
startTunnel()
if (!config.isUdpDns) startDnsDaemon()
startRedsocksDaemon()
startShadowsocksDaemon()
setupIptables()
true
}
def invokeMethod(method: Method, args: Array[AnyRef]) {
try {
method.invoke(this, mStartForegroundArgs: _*)
} catch {
case e: InvocationTargetException =>
Log.w(TAG, "Unable to invoke method", e)
case e: IllegalAccessException =>
Log.w(TAG, "Unable to invoke method", e)
}
}
def notifyForegroundAlert(title: String, info: String, visible: Boolean) {
val openIntent = new Intent(this, classOf[Shadowsocks])
openIntent.setFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT)
val contentIntent = PendingIntent.getActivity(this, 0, openIntent, 0)
val closeIntent = new Intent(Action.CLOSE)
val actionIntent = PendingIntent.getBroadcast(this, 0, closeIntent, 0)
val builder = new NotificationCompat.Builder(this)
builder
.setWhen(0)
.setTicker(title)
.setContentTitle(getString(R.string.app_name))
.setContentText(info)
.setContentIntent(contentIntent)
.setSmallIcon(R.drawable.ic_stat_shadowsocks)
.addAction(android.R.drawable.ic_menu_close_clear_cancel, getString(R.string.stop),
actionIntent)
if (visible)
builder.setPriority(NotificationCompat.PRIORITY_DEFAULT)
else
builder.setPriority(NotificationCompat.PRIORITY_MIN)
startForegroundCompat(1, builder.build)
}
def onBind(intent: Intent): IBinder = {
Log.d(TAG, "onBind")
if (Action.SERVICE == intent.getAction) {
binder
} else {
null
}
}
override def onCreate() {
super.onCreate()
ConfigUtils.refresh(this)
notificationManager = this
.getSystemService(Context.NOTIFICATION_SERVICE)
.asInstanceOf[NotificationManager]
try {
mStartForeground = getClass.getMethod("startForeground", mStartForegroundSignature: _*)
mStopForeground = getClass.getMethod("stopForeground", mStopForegroundSignature: _*)
} catch {
case e: NoSuchMethodException =>
mStartForeground = {
mStopForeground = null
mStopForeground
}
}
try {
mSetForeground = getClass.getMethod("setForeground", mSetForegroundSignature: _*)
} catch {
case e: NoSuchMethodException =>
throw new IllegalStateException(
"OS doesn't have Service.startForeground OR Service.setForeground!")
}
}
def killProcesses() {
val cmd = new ArrayBuffer[String]()
for (task <- Array("ss-local", "ss-tunnel", "pdnsd", "redsocks")) {
cmd.append("chmod 666 %s%s-nat.pid".formatLocal(Locale.ENGLISH, Path.BASE, task))
}
Console.runRootCommand(cmd.toArray)
cmd.clear()
for (task <- Array("ss-local", "ss-tunnel", "pdnsd", "redsocks")) {
try {
val pid = scala.io.Source.fromFile(Path.BASE + task + "-nat.pid").mkString.trim.toInt
cmd.append("kill -9 %d".formatLocal(Locale.ENGLISH, pid))
Process.killProcess(pid)
} catch {
case e: Throwable => Log.e(TAG, "unable to kill " + task)
}
cmd.append("rm -f %s%s-nat.pid".formatLocal(Locale.ENGLISH, Path.BASE, task))
cmd.append("rm -f %s%s-nat.conf".formatLocal(Locale.ENGLISH, Path.BASE, task))
}
Console.runRootCommand(cmd.toArray)
Console.runRootCommand(Utils.getIptables + " -t nat -F OUTPUT")
}
def setupIptables() = {
val init_sb = new ArrayBuffer[String]
val http_sb = new ArrayBuffer[String]
init_sb.append("ulimit -n 4096")
init_sb.append(Utils.getIptables + " -t nat -F OUTPUT")
val cmd_bypass = Utils.getIptables + CMD_IPTABLES_RETURN
if (!InetAddressUtils.isIPv6Address(config.proxy.toUpperCase)) {
init_sb.append(cmd_bypass.replace("-p tcp -d 0.0.0.0", "-d " + config.proxy))
}
init_sb.append(cmd_bypass.replace("-p tcp -d 0.0.0.0", "-d 127.0.0.1"))
init_sb.append(cmd_bypass.replace("-p tcp -d 0.0.0.0", "-m owner --uid-owner " + myUid))
init_sb.append(cmd_bypass.replace("-d 0.0.0.0", "--dport 53"))
init_sb.append(Utils.getIptables
+ " -t nat -A OUTPUT -p udp --dport 53 -j DNAT --to-destination 127.0.0.1:" + 8153)
if (config.isGlobalProxy || config.isBypassApps) {
http_sb.append(Utils.getIptables + CMD_IPTABLES_DNAT_ADD_SOCKS)
}
if (!config.isGlobalProxy) {
if (apps == null || apps.length <= 0) {
apps = AppManager.getProxiedApps(this, config.proxiedAppString)
}
val uidSet: mutable.HashSet[Int] = new mutable.HashSet[Int]
for (app <- apps) {
if (app.proxied) {
uidSet.add(app.uid)
}
}
for (uid <- uidSet) {
if (!config.isBypassApps) {
http_sb.append((Utils.getIptables + CMD_IPTABLES_DNAT_ADD_SOCKS).replace("-t nat", "-t nat -m owner --uid-owner " + uid))
} else {
init_sb.append(cmd_bypass.replace("-d 0.0.0.0", "-m owner --uid-owner " + uid))
}
}
}
Console.runRootCommand(init_sb.toArray)
Console.runRootCommand(http_sb.toArray)
}
/**
* This is a wrapper around the new startForeground method, using the older
* APIs if it is not available.
*/
def startForegroundCompat(id: Int, notification: Notification) {
if (mStartForeground != null) {
mStartForegroundArgs(0) = int2Integer(id)
mStartForegroundArgs(1) = notification
invokeMethod(mStartForeground, mStartForegroundArgs)
return
}
mSetForegroundArgs(0) = boolean2Boolean(x = true)
invokeMethod(mSetForeground, mSetForegroundArgs)
notificationManager.notify(id, notification)
}
/**
* This is a wrapper around the new stopForeground method, using the older
* APIs if it is not available.
*/
def stopForegroundCompat(id: Int) {
if (mStopForeground != null) {
mStopForegroundArgs(0) = boolean2Boolean(x = true)
try {
mStopForeground.invoke(this, mStopForegroundArgs: _*)
} catch {
case e: InvocationTargetException =>
Log.w(TAG, "Unable to invoke stopForeground", e)
case e: IllegalAccessException =>
Log.w(TAG, "Unable to invoke stopForeground", e)
}
return
}
notificationManager.cancel(id)
mSetForegroundArgs(0) = boolean2Boolean(x = false)
invokeMethod(mSetForeground, mSetForegroundArgs)
}
override def startRunner(c: Config) {
config = c
// register close receiver
val filter = new IntentFilter()
filter.addAction(Intent.ACTION_SHUTDOWN)
filter.addAction(Action.CLOSE)
closeReceiver = new BroadcastReceiver() {
def onReceive(context: Context, intent: Intent) {
Toast.makeText(context, R.string.stopping, Toast.LENGTH_SHORT).show()
stopRunner()
}
}
registerReceiver(closeReceiver, filter)
if (Utils.isLollipopOrAbove) {
val screenFilter = new IntentFilter()
screenFilter.addAction(Intent.ACTION_SCREEN_ON)
screenFilter.addAction(Intent.ACTION_SCREEN_OFF)
screenFilter.addAction(Intent.ACTION_USER_PRESENT)
lockReceiver = new BroadcastReceiver() {
def onReceive(context: Context, intent: Intent) {
if (getState == State.CONNECTED) {
val action = intent.getAction
if (action == Intent.ACTION_SCREEN_OFF) {
notifyForegroundAlert(getString(R.string.forward_success),
getString(R.string.service_running).formatLocal(Locale.ENGLISH, config.profileName), false)
} else if (action == Intent.ACTION_SCREEN_ON) {
val keyGuard = getSystemService(Context.KEYGUARD_SERVICE).asInstanceOf[KeyguardManager]
if (!keyGuard.inKeyguardRestrictedInputMode) {
notifyForegroundAlert(getString(R.string.forward_success),
getString(R.string.service_running).formatLocal(Locale.ENGLISH, config.profileName), true)
}
} else if (action == Intent.ACTION_USER_PRESENT) {
notifyForegroundAlert(getString(R.string.forward_success),
getString(R.string.service_running).formatLocal(Locale.ENGLISH, config.profileName), true)
}
}
}
}
registerReceiver(lockReceiver, screenFilter)
}
// send event
application.tracker.send(new HitBuilders.EventBuilder()
.setCategory(TAG)
.setAction("start")
.setLabel(getVersionName)
.build())
changeState(State.CONNECTING)
spawn {
if (config.proxy == "198.199.101.152") {
val holder = application.containerHolder
try {
config = ConfigUtils.getPublicConfig(getBaseContext, holder.getContainer, config)
} catch {
case ex: Exception =>
changeState(State.STOPPED, getString(R.string.service_failed))
stopRunner()
config = null
}
}
if (config != null) {
// Clean up
killProcesses()
var resolved: Boolean = false
if (!InetAddressUtils.isIPv4Address(config.proxy) &&
!InetAddressUtils.isIPv6Address(config.proxy)) {
Utils.resolve(config.proxy, enableIPv6 = true) match {
case Some(a) =>
config.proxy = a
resolved = true
case None => resolved = false
}
} else {
resolved = true
}
if (resolved && handleConnection) {
// Set DNS
flushDns()
notifyForegroundAlert(getString(R.string.forward_success),
getString(R.string.service_running).formatLocal(Locale.ENGLISH, config.profileName), true)
changeState(State.CONNECTED)
} else {
changeState(State.STOPPED, getString(R.string.service_failed))
stopRunner()
}
}
}
}
override def stopRunner() {
// channge the state
changeState(State.STOPPING)
// clean up recevier
if (closeReceiver != null) {
unregisterReceiver(closeReceiver)
closeReceiver = null
}
if (Utils.isLollipopOrAbove) {
if (lockReceiver != null) {
unregisterReceiver(lockReceiver)
lockReceiver = null
}
}
// send event
application.tracker.send(new HitBuilders.EventBuilder()
.setCategory(TAG)
.setAction("stop")
.setLabel(getVersionName)
.build())
// reset NAT
killProcesses()
// stop the service if no callback registered
if (getCallbackCount == 0) {
stopSelf()
}
stopForegroundCompat(1)
// change the state
changeState(State.STOPPED)
}
override def stopBackgroundService() {
stopSelf()
}
override def getTag = TAG
override def getServiceMode = Mode.NAT
override def getContext = getBaseContext
}
|
catinred2/shadowsocks-android
|
src/main/scala/com/github/shadowsocks/ShadowsocksNatService.scala
|
Scala
|
gpl-3.0
| 21,780 |
package se.citerus.dddsample.domain.model.handling
import se.citerus.dddsample.domain.model.location.UnLocode
case class UnknownLocationException(val unlocode:UnLocode) extends Exception() {
override def getMessage = "No location with UN locode " + unlocode.idString + " exists in the system"
}
|
oluies/ddd-sample-scala
|
src/main/scala/se/citerus/dddsample/domain/model/handling/UnknownLocationException.scala
|
Scala
|
mit
| 304 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.charts.component
import io.gatling.commons.stats.ErrorStats
import io.gatling.commons.util.StringHelper.EmptyFastring
import io.gatling.commons.util.HtmlHelper.HtmlRichString
import io.gatling.commons.util.NumberHelper._
import com.dongxiguo.fastring.Fastring.Implicits._
private[charts] class ErrorsTableComponent(errors: Seq[ErrorStats]) extends Component {
def js = fast"""
$$('#container_errors').sortable('#container_errors');
"""
def html = if (errors.isEmpty)
EmptyFastring
else
fast"""<div class="statistics extensible-geant collapsed">
<div class="title">
<div class="title_collapsed" style="cursor: auto;">ERRORS</div>
</div>
<table id="container_errors" class="statistics-in extensible-geant">
<thead>
<tr>
<th id="error-col-1" class="header sortable"><span>Error</span></th>
<th id="error-col-2" class="header sortable"><span>Count</span></th>
<th id="error-col-3" class="header sortable"><span>Percentage</span></th>
</tr>
</thead>
<tbody>
${
errors.zipWithIndex.map {
case (error, index) => fast"""
<tr>
<td class="error-col-1 total">${error.message.htmlEscape}<span class="value" style="display:none">$index</span></td>
<td class="value error-col-2 total">${error.count}</td>
<td class="value error-col-3 total">${error.percentage.toPrintableString} %</td>
</tr>"""
}.mkFastring
}
</tbody>
</table>
</div>
"""
def jsFiles: Seq[String] = Seq.empty
}
|
MykolaB/gatling
|
gatling-charts/src/main/scala/io/gatling/charts/component/ErrorsTableComponent.scala
|
Scala
|
apache-2.0
| 2,211 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.check.body
import java.nio.charset.StandardCharsets._
import scala.collection.mutable
import org.mockito.Mockito._
import io.gatling.{ ValidationValues, BaseSpec }
import io.gatling.core.CoreDsl
import io.gatling.core.check.CheckResult
import io.gatling.core.config.GatlingConfiguration
import io.gatling.core.session._
import io.gatling.http.HttpDsl
import io.gatling.http.check.HttpCheckSupport
import io.gatling.http.response.{ StringResponseBody, Response }
class HttpBodyRegexCheckSpec extends BaseSpec with ValidationValues with CoreDsl with HttpDsl {
object RegexSupport extends HttpCheckSupport
val regexCheck = RegexSupport.regex _
implicit val configuration = GatlingConfiguration.loadForTest()
implicit def cache: mutable.Map[Any, Any] = mutable.Map.empty
val session = Session("mockSession", 0)
private def mockResponse(body: String) = {
val response = mock[Response]
when(response.body) thenReturn new StringResponseBody(body, UTF_8)
response
}
"regex.find.exists" should "find single result" in {
val response = mockResponse("""{"id":"1072920417"}""")
regexCheck(""""id":"(.+?)"""").find.exists.build.check(response, session).succeeded shouldBe CheckResult(Some("1072920417"), None)
}
it should "find first occurrence" in {
val response = mockResponse("""[{"id":"1072920417"},"id":"1072920418"]""")
regexCheck(""""id":"(.+?)"""").find.exists.build.check(response, session).succeeded shouldBe CheckResult(Some("1072920417"), None)
}
"regex.findAll.exists" should "find all occurrences" in {
val response = mockResponse("""[{"id":"1072920417"},"id":"1072920418"]""")
regexCheck(""""id":"(.+?)"""").findAll.exists.build.check(response, session).succeeded shouldBe CheckResult(Some(Seq("1072920417", "1072920418")), None)
}
it should "fail when finding nothing instead of returning an empty Seq" in {
val response = mockResponse("""[{"id":"1072920417"},"id":"1072920418"]""")
val regexValue = """"foo":"(.+?)""""
regexCheck(regexValue).findAll.exists.build.check(response, session).failed shouldBe s"regex($regexValue).findAll.exists, found nothing"
}
it should "fail with expected message when transforming" in {
val response = mockResponse("""[{"id":"1072920417"},"id":"1072920418"]""")
val regexValue = """"foo":"(.+?)""""
regexCheck(regexValue).findAll.transform(_.map(_ + "foo")).exists.build.check(response, session).failed shouldBe s"regex($regexValue).findAll.transform.exists, found nothing"
}
"regex.count.exists" should "find all occurrences" in {
val response = mockResponse("""[{"id":"1072920417"},"id":"1072920418"]""")
regexCheck(""""id":"(.+?)"""").count.exists.build.check(response, session).succeeded shouldBe CheckResult(Some(2), None)
}
it should "return 0 when finding nothing instead of failing" in {
val response = mockResponse("""[{"id":"1072920417"},"id":"1072920418"]""")
val regexValue = """"foo":"(.+?)""""
regexCheck(regexValue).count.exists.build.check(response, session).succeeded shouldBe CheckResult(Some(0), None)
}
}
|
GabrielPlassard/gatling
|
gatling-http/src/test/scala/io/gatling/http/check/body/HttpBodyRegexCheckSpec.scala
|
Scala
|
apache-2.0
| 3,742 |
package carldata.sf.compiler
/**
* Helper object for definition of Result.
* Status code which is return by compiler Checkers
*/
object Result {
sealed trait Result {
/** Join 2 results */
def andThen(r2: => Result): Result
}
object Ok extends Result {
override def andThen(r2: => Result): Result = r2
}
case class Err(reason: String) extends Result {
override def andThen(r2: => Result): Result = this
}
}
|
carldata/flow-script
|
src/main/scala/carldata/sf/compiler/Result.scala
|
Scala
|
apache-2.0
| 445 |
/*
* Copyright 2021 ABSA Group Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package za.co.absa.spline.persistence.model
case class Attribute(
override val _key: ArangoDocument.Key,
override val _belongsTo: Option[ArangoDocument.Id],
dataType: Option[Any],
extra: Map[String, Any],
name: String,
) extends Vertex {
def this() = this(null, null, null, null, null)
}
|
AbsaOSS/spline
|
persistence/src/main/scala/za/co/absa/spline/persistence/model/Attribute.scala
|
Scala
|
apache-2.0
| 904 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection
import java.util.Objects
import scala.collection.mutable.ArrayBuffer
import scala.ref.WeakReference
import org.scalatest.Matchers
import org.scalatest.concurrent.Eventually
import org.apache.spark._
import org.apache.spark.internal.config._
import org.apache.spark.io.CompressionCodec
import org.apache.spark.memory.MemoryTestingUtils
import org.apache.spark.util.CompletionIterator
class ExternalAppendOnlyMapSuite extends SparkFunSuite
with LocalSparkContext
with Eventually
with Matchers{
import TestUtils.{assertNotSpilled, assertSpilled}
private val allCompressionCodecs = CompressionCodec.ALL_COMPRESSION_CODECS
private def createCombiner[T](i: T) = ArrayBuffer[T](i)
private def mergeValue[T](buffer: ArrayBuffer[T], i: T): ArrayBuffer[T] = buffer += i
private def mergeCombiners[T](buf1: ArrayBuffer[T], buf2: ArrayBuffer[T]): ArrayBuffer[T] =
buf1 ++= buf2
private def createExternalMap[T] = {
val context = MemoryTestingUtils.fakeTaskContext(sc.env)
new ExternalAppendOnlyMap[T, T, ArrayBuffer[T]](
createCombiner[T], mergeValue[T], mergeCombiners[T], context = context)
}
private def createSparkConf(loadDefaults: Boolean, codec: Option[String] = None): SparkConf = {
val conf = new SparkConf(loadDefaults)
// Make the Java serializer write a reset instruction (TC_RESET) after each object to test
// for a bug we had with bytes written past the last object in a batch (SPARK-2792)
conf.set("spark.serializer.objectStreamReset", "1")
conf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer")
conf.set("spark.shuffle.spill.compress", codec.isDefined.toString)
conf.set("spark.shuffle.compress", codec.isDefined.toString)
codec.foreach { c => conf.set("spark.io.compression.codec", c) }
// Ensure that we actually have multiple batches per spill file
conf.set("spark.shuffle.spill.batchSize", "10")
conf
}
test("single insert") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
map.insert(1, 10)
val it = map.iterator
assert(it.hasNext)
val kv = it.next()
assert(kv._1 === 1 && kv._2 === ArrayBuffer[Int](10))
assert(!it.hasNext)
sc.stop()
}
test("multiple insert") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
map.insert(1, 10)
map.insert(2, 20)
map.insert(3, 30)
val it = map.iterator
assert(it.hasNext)
assert(it.toSet === Set[(Int, ArrayBuffer[Int])](
(1, ArrayBuffer[Int](10)),
(2, ArrayBuffer[Int](20)),
(3, ArrayBuffer[Int](30))))
sc.stop()
}
test("insert with collision") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
map.insertAll(Seq(
(1, 10),
(2, 20),
(3, 30),
(1, 100),
(2, 200),
(1, 1000)))
val it = map.iterator
assert(it.hasNext)
val result = it.toSet[(Int, ArrayBuffer[Int])].map(kv => (kv._1, kv._2.toSet))
assert(result === Set[(Int, Set[Int])](
(1, Set[Int](10, 100, 1000)),
(2, Set[Int](20, 200)),
(3, Set[Int](30))))
sc.stop()
}
test("ordering") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map1 = createExternalMap[Int]
map1.insert(1, 10)
map1.insert(2, 20)
map1.insert(3, 30)
val map2 = createExternalMap[Int]
map2.insert(2, 20)
map2.insert(3, 30)
map2.insert(1, 10)
val map3 = createExternalMap[Int]
map3.insert(3, 30)
map3.insert(1, 10)
map3.insert(2, 20)
val it1 = map1.iterator
val it2 = map2.iterator
val it3 = map3.iterator
var kv1 = it1.next()
var kv2 = it2.next()
var kv3 = it3.next()
assert(kv1._1 === kv2._1 && kv2._1 === kv3._1)
assert(kv1._2 === kv2._2 && kv2._2 === kv3._2)
kv1 = it1.next()
kv2 = it2.next()
kv3 = it3.next()
assert(kv1._1 === kv2._1 && kv2._1 === kv3._1)
assert(kv1._2 === kv2._2 && kv2._2 === kv3._2)
kv1 = it1.next()
kv2 = it2.next()
kv3 = it3.next()
assert(kv1._1 === kv2._1 && kv2._1 === kv3._1)
assert(kv1._2 === kv2._2 && kv2._2 === kv3._2)
sc.stop()
}
test("null keys and values") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val map = createExternalMap[Int]
val nullInt = null.asInstanceOf[Int]
map.insert(1, 5)
map.insert(2, 6)
map.insert(3, 7)
map.insert(4, nullInt)
map.insert(nullInt, 8)
map.insert(nullInt, nullInt)
val result = map.iterator.toSet[(Int, ArrayBuffer[Int])].map(kv => (kv._1, kv._2.sorted))
assert(result === Set[(Int, Seq[Int])](
(1, Seq[Int](5)),
(2, Seq[Int](6)),
(3, Seq[Int](7)),
(4, Seq[Int](nullInt)),
(nullInt, Seq[Int](nullInt, 8))
))
sc.stop()
}
test("simple aggregator") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
// reduceByKey
val rdd = sc.parallelize(1 to 10).map(i => (i%2, 1))
val result1 = rdd.reduceByKey(_ + _).collect()
assert(result1.toSet === Set[(Int, Int)]((0, 5), (1, 5)))
// groupByKey
val result2 = rdd.groupByKey().collect().map(x => (x._1, x._2.toList)).toSet
assert(result2.toSet === Set[(Int, Seq[Int])]
((0, List[Int](1, 1, 1, 1, 1)), (1, List[Int](1, 1, 1, 1, 1))))
sc.stop()
}
test("simple cogroup") {
val conf = createSparkConf(loadDefaults = false)
sc = new SparkContext("local", "test", conf)
val rdd1 = sc.parallelize(1 to 4).map(i => (i, i))
val rdd2 = sc.parallelize(1 to 4).map(i => (i%2, i))
val result = rdd1.cogroup(rdd2).collect()
result.foreach { case (i, (seq1, seq2)) =>
i match {
case 0 => assert(seq1.toSet === Set[Int]() && seq2.toSet === Set[Int](2, 4))
case 1 => assert(seq1.toSet === Set[Int](1) && seq2.toSet === Set[Int](1, 3))
case 2 => assert(seq1.toSet === Set[Int](2) && seq2.toSet === Set[Int]())
case 3 => assert(seq1.toSet === Set[Int](3) && seq2.toSet === Set[Int]())
case 4 => assert(seq1.toSet === Set[Int](4) && seq2.toSet === Set[Int]())
}
}
sc.stop()
}
test("spilling") {
testSimpleSpilling()
}
test("spilling with compression") {
// Keep track of which compression codec we're using to report in test failure messages
var lastCompressionCodec: Option[String] = None
try {
allCompressionCodecs.foreach { c =>
lastCompressionCodec = Some(c)
testSimpleSpilling(Some(c))
}
} catch {
// Include compression codec used in test failure message
// We need to catch Throwable here because assertion failures are not covered by Exceptions
case t: Throwable =>
val compressionMessage = lastCompressionCodec
.map { c => "with compression using codec " + c }
.getOrElse("without compression")
val newException = new Exception(s"Test failed $compressionMessage:\\n\\n${t.getMessage}")
newException.setStackTrace(t.getStackTrace)
throw newException
}
}
test("spilling with compression and encryption") {
testSimpleSpilling(Some(CompressionCodec.DEFAULT_COMPRESSION_CODEC), encrypt = true)
}
/**
* Test spilling through simple aggregations and cogroups.
* If a compression codec is provided, use it. Otherwise, do not compress spills.
*/
private def testSimpleSpilling(codec: Option[String] = None, encrypt: Boolean = false): Unit = {
val size = 1000
val conf = createSparkConf(loadDefaults = true, codec) // Load defaults for Spark home
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 4).toString)
conf.set(IO_ENCRYPTION_ENABLED, encrypt)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
assertSpilled(sc, "reduceByKey") {
val result = sc.parallelize(0 until size)
.map { i => (i / 2, i) }.reduceByKey(math.max).collect()
assert(result.length === size / 2)
result.foreach { case (k, v) =>
val expected = k * 2 + 1
assert(v === expected, s"Value for $k was wrong: expected $expected, got $v")
}
}
assertSpilled(sc, "groupByKey") {
val result = sc.parallelize(0 until size).map { i => (i / 2, i) }.groupByKey().collect()
assert(result.length == size / 2)
result.foreach { case (i, seq) =>
val actual = seq.toSet
val expected = Set(i * 2, i * 2 + 1)
assert(actual === expected, s"Value for $i was wrong: expected $expected, got $actual")
}
}
assertSpilled(sc, "cogroup") {
val rdd1 = sc.parallelize(0 until size).map { i => (i / 2, i) }
val rdd2 = sc.parallelize(0 until size).map { i => (i / 2, i) }
val result = rdd1.cogroup(rdd2).collect()
assert(result.length === size / 2)
result.foreach { case (i, (seq1, seq2)) =>
val actual1 = seq1.toSet
val actual2 = seq2.toSet
val expected = Set(i * 2, i * 2 + 1)
assert(actual1 === expected, s"Value 1 for $i was wrong: expected $expected, got $actual1")
assert(actual2 === expected, s"Value 2 for $i was wrong: expected $expected, got $actual2")
}
}
sc.stop()
}
test("ExternalAppendOnlyMap shouldn't fail when forced to spill before calling its iterator") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 2).toString)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[String]
val consumer = createExternalMap[String]
map.insertAll((1 to size).iterator.map(_.toString).map(i => (i, i)))
assert(map.spill(10000, consumer) == 0L)
}
test("spilling with hash collisions") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 2).toString)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[String]
val collisionPairs = Seq(
("Aa", "BB"), // 2112
("to", "v1"), // 3707
("variants", "gelato"), // -1249574770
("Teheran", "Siblings"), // 231609873
("misused", "horsemints"), // 1069518484
("isohel", "epistolaries"), // -1179291542
("righto", "buzzards"), // -931102253
("hierarch", "crinolines"), // -1732884796
("inwork", "hypercatalexes"), // -1183663690
("wainages", "presentencing"), // 240183619
("trichothecenes", "locular"), // 339006536
("pomatoes", "eructation") // 568647356
)
collisionPairs.foreach { case (w1, w2) =>
// String.hashCode is documented to use a specific algorithm, but check just in case
assert(w1.hashCode === w2.hashCode)
}
map.insertAll((1 to size).iterator.map(_.toString).map(i => (i, i)))
collisionPairs.foreach { case (w1, w2) =>
map.insert(w1, w2)
map.insert(w2, w1)
}
assert(map.numSpills > 0, "map did not spill")
// A map of collision pairs in both directions
val collisionPairsMap = (collisionPairs ++ collisionPairs.map(_.swap)).toMap
// Avoid map.size or map.iterator.length because this destructively sorts the underlying map
var count = 0
val it = map.iterator
while (it.hasNext) {
val kv = it.next()
val expectedValue = ArrayBuffer[String](collisionPairsMap.getOrElse(kv._1, kv._1))
assert(kv._2.equals(expectedValue))
count += 1
}
assert(count === size + collisionPairs.size * 2)
sc.stop()
}
test("spilling with many hash collisions") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 2).toString)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val context = MemoryTestingUtils.fakeTaskContext(sc.env)
val map =
new ExternalAppendOnlyMap[FixedHashObject, Int, Int](_ => 1, _ + _, _ + _, context = context)
// Insert 10 copies each of lots of objects whose hash codes are either 0 or 1. This causes
// problems if the map fails to group together the objects with the same code (SPARK-2043).
for (i <- 1 to 10) {
for (j <- 1 to size) {
map.insert(FixedHashObject(j, j % 2), 1)
}
}
assert(map.numSpills > 0, "map did not spill")
val it = map.iterator
var count = 0
while (it.hasNext) {
val kv = it.next()
assert(kv._2 === 10)
count += 1
}
assert(count === size)
sc.stop()
}
test("spilling with hash collisions using the Int.MaxValue key") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 2).toString)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
(1 to size).foreach { i => map.insert(i, i) }
map.insert(Int.MaxValue, Int.MaxValue)
assert(map.numSpills > 0, "map did not spill")
val it = map.iterator
while (it.hasNext) {
// Should not throw NoSuchElementException
it.next()
}
sc.stop()
}
test("spilling with null keys and values") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
conf.set("spark.shuffle.spill.numElementsForceSpillThreshold", (size / 2).toString)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
map.insertAll((1 to size).iterator.map(i => (i, i)))
map.insert(null.asInstanceOf[Int], 1)
map.insert(1, null.asInstanceOf[Int])
map.insert(null.asInstanceOf[Int], null.asInstanceOf[Int])
assert(map.numSpills > 0, "map did not spill")
val it = map.iterator
while (it.hasNext) {
// Should not throw NullPointerException
it.next()
}
sc.stop()
}
test("SPARK-22713 spill during iteration leaks internal map") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
map.insertAll((0 until size).iterator.map(i => (i / 10, i)))
assert(map.numSpills == 0, "map was not supposed to spill")
val it = map.iterator
assert(it.isInstanceOf[CompletionIterator[_, _]])
// org.apache.spark.util.collection.AppendOnlyMap.destructiveSortedIterator returns
// an instance of an annonymous Iterator class.
val underlyingMapRef = WeakReference(map.currentMap)
{
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(!tmpIsNull)
}
val first50Keys = for ( _ <- 0 until 50) yield {
val (k, vs) = it.next
val sortedVs = vs.sorted
assert(sortedVs.seq == (0 until 10).map(10 * k + _))
k
}
assert(map.numSpills == 0)
map.spill(Long.MaxValue, null)
// these asserts try to show that we're no longer holding references to the underlying map.
// it'd be nice to use something like
// https://github.com/scala/scala/blob/2.13.x/test/junit/scala/tools/testing/AssertUtil.scala
// (lines 69-89)
// assert(map.currentMap == null)
eventually {
System.gc()
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(tmpIsNull)
}
val next50Keys = for ( _ <- 0 until 50) yield {
val (k, vs) = it.next
val sortedVs = vs.sorted
assert(sortedVs.seq == (0 until 10).map(10 * k + _))
k
}
assert(!it.hasNext)
val keys = (first50Keys ++ next50Keys).sorted
assert(keys == (0 until 100))
}
test("drop all references to the underlying map once the iterator is exhausted") {
val size = 1000
val conf = createSparkConf(loadDefaults = true)
sc = new SparkContext("local-cluster[1,1,1024]", "test", conf)
val map = createExternalMap[Int]
map.insertAll((0 until size).iterator.map(i => (i / 10, i)))
assert(map.numSpills == 0, "map was not supposed to spill")
val underlyingMapRef = WeakReference(map.currentMap)
{
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(!tmpIsNull)
}
val it = map.iterator
assert( it.isInstanceOf[CompletionIterator[_, _]])
val keys = it.map{
case (k, vs) =>
val sortedVs = vs.sorted
assert(sortedVs.seq == (0 until 10).map(10 * k + _))
k
}
.toList
.sorted
assert(it.isEmpty)
assert(keys == (0 until 100))
assert(map.numSpills == 0)
// these asserts try to show that we're no longer holding references to the underlying map.
// it'd be nice to use something like
// https://github.com/scala/scala/blob/2.13.x/test/junit/scala/tools/testing/AssertUtil.scala
// (lines 69-89)
assert(map.currentMap == null)
eventually {
Thread.sleep(500)
System.gc()
// direct asserts introduced some macro generated code that held a reference to the map
val tmpIsNull = null == underlyingMapRef.get.orNull
assert(tmpIsNull)
}
assert(it.toList.isEmpty)
}
test("SPARK-22713 external aggregation updates peak execution memory") {
val spillThreshold = 1000
val conf = createSparkConf(loadDefaults = false)
.set("spark.shuffle.spill.numElementsForceSpillThreshold", spillThreshold.toString)
sc = new SparkContext("local", "test", conf)
// No spilling
AccumulatorSuite.verifyPeakExecutionMemorySet(sc, "external map without spilling") {
assertNotSpilled(sc, "verify peak memory") {
sc.parallelize(1 to spillThreshold / 2, 2).map { i => (i, i) }.reduceByKey(_ + _).count()
}
}
// With spilling
AccumulatorSuite.verifyPeakExecutionMemorySet(sc, "external map with spilling") {
assertSpilled(sc, "verify peak memory") {
sc.parallelize(1 to spillThreshold * 3, 2).map { i => (i, i) }.reduceByKey(_ + _).count()
}
}
}
test("force to spill for external aggregation") {
val conf = createSparkConf(loadDefaults = false)
.set("spark.shuffle.memoryFraction", "0.01")
.set("spark.memory.useLegacyMode", "true")
.set("spark.testing.memory", "100000000")
.set("spark.shuffle.sort.bypassMergeThreshold", "0")
sc = new SparkContext("local", "test", conf)
val N = 2e5.toInt
sc.parallelize(1 to N, 2)
.map { i => (i, i) }
.groupByKey()
.reduceByKey(_ ++ _)
.count()
}
}
|
rikima/spark
|
core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
|
Scala
|
apache-2.0
| 19,912 |
/*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
* */
package io.github.mandar2812.dynaml.probability
import breeze.numerics.log
import breeze.stats.distributions.ContinuousDistr
import io.github.mandar2812.dynaml.analysis.PushforwardMap
import io.github.mandar2812.dynaml.pipes.DataPipe
import io.github.mandar2812.dynaml.pipes.Encoder
/**
*
* A measurable function is any mapping/function applied
* to samples generated by some base random variable instance.
*
* @tparam Domain1 Type over which the base random variable is defined.
* @tparam Domain2 Type over which output of function takes its values.
* @tparam R The type of the base Random Variable, must inherit from [[RandomVariable]]
* @author mandar2812 date 24/09/2016.
*
* */
trait MeasurableFunction[Domain1, Domain2, +R <: RandomVariable[Domain1]]
extends RandomVariable[Domain2] {
val baseRV: R
val func: DataPipe[Domain1, Domain2]
def _baseRandomVar: R = baseRV
}
object MeasurableFunction {
def apply[Domain1, Domain2, R <: RandomVariable[Domain1]](f: Domain1 => Domain2)(base: R)
: MeasurableFunction[Domain1, Domain2, R] = new MeasurableFunction[Domain1, Domain2, R] {
override val baseRV: R = base
override val func = DataPipe(f)
override val sample: DataPipe[Unit, Domain2] = baseRV.sample > func
}
def apply[Domain1, Domain2, R <: RandomVariable[Domain1]](base: R)(f: Domain1 => Domain2)
: MeasurableFunction[Domain1, Domain2, R] = new MeasurableFunction[Domain1, Domain2, R] {
override val baseRV: R = base
override val func = DataPipe(f)
override val sample: DataPipe[Unit, Domain2] = baseRV.sample > func
}
def apply[Domain1, Domain2, R <: RandomVariable[Domain1]](base: R, f: DataPipe[Domain1, Domain2])
: MeasurableFunction[Domain1, Domain2, R] = new MeasurableFunction[Domain1, Domain2, R] {
override val baseRV: R = base
override val func: DataPipe[Domain1, Domain2] = f
override val sample: DataPipe[Unit, Domain2] = baseRV.sample > func
}
}
trait ContinuousMeasurableFunc[Domain1, Domain2, +R <: ContinuousRandomVariable[Domain1]]
extends ContinuousRandomVariable[Domain2] with MeasurableFunction[Domain1, Domain2, R]
object ContinuousMeasurableFunc {
def apply[
Domain1,
Domain2,
R <: ContinuousRandomVariable[Domain1]](
f: Domain1 => Domain2)(
base: R): ContinuousMeasurableFunc[Domain1, Domain2, R] =
new ContinuousMeasurableFunc[Domain1, Domain2, R] {
override val baseRV: R = base
override val func = DataPipe(f)
override val sample: DataPipe[Unit, Domain2] = baseRV.sample > func
}
}
/**
* A measurable function of a continuous random variable
* with a defined probability density function.
*
* @tparam Domain1 The domain of the base random variable
* @tparam Domain2 The output set of the function
* @tparam Jacobian The type representing the Jacobian of inverse of the map [[func]]
* @param baseRV The base random variable
* @param func A function with a defined inverse and Jacobian of inverse
* as an [[PushforwardMap]] instance.
*
* */
class MeasurableDistrRV[Domain1, Domain2, Jacobian, Distr1 <: ContinuousDistr[Domain1]](
override val baseRV: ContinuousRVWithDistr[Domain1, Distr1])(
override val func: PushforwardMap[Domain1, Domain2, Jacobian]) extends
ContinuousMeasurableFunc[
Domain1, Domain2,
ContinuousRVWithDistr[Domain1, Distr1]] with
ContinuousRVWithDistr[Domain2, ContinuousDistr[Domain2]] {
override val underlyingDist: ContinuousDistr[Domain2] = new ContinuousDistr[Domain2] {
override def unnormalizedLogPdf(x: Domain2): Double =
baseRV.underlyingDist.unnormalizedLogPdf(func.i(x)) + log(func._det(func.i.J(x)))
override def logNormalizer: Double = baseRV.underlyingDist.logNormalizer
override def draw(): Domain2 = func.run(baseRV.underlyingDist.draw())
}
}
/**
* A random variable which consists of a base random variable in [[Domain1]]
* and an invertible homeomorphism from [[Domain1]] to [[Domain2]].
*
* @tparam Domain1 Domain of the base random variable
* @tparam Domain2 Domain of the morphed random variable.
* @param base A distribution [[Distr]] over [[Domain1]] which extends breeze [[ContinuousDistr]]
* @param encoder The morphing function from [[Domain1]] to [[Domain2]]
* @author mandar2812 date 16/05/2017.
* */
class EncodedContDistrRV[
Domain1, Domain2,
Distr <: ContinuousDistr[Domain1]](
base: Distr, encoder: Encoder[Domain1, Domain2]) extends
ContinuousRVWithDistr[Domain2, ContinuousDistr[Domain2]] {
override val underlyingDist: ContinuousDistr[Domain2] = new ContinuousDistr[Domain2] {
override def unnormalizedLogPdf(x: Domain2): Double = base.unnormalizedLogPdf(encoder.i(x))
override def logNormalizer: Double = base.logNormalizer
override def draw(): Domain2 = encoder(base.draw())
}
}
object EncodedContDistrRV {
def apply[Domain1, Domain2, Distr <: ContinuousDistr[Domain1]](
base: Distr, encoder: Encoder[Domain1, Domain2]): EncodedContDistrRV[Domain1, Domain2, Distr] =
new EncodedContDistrRV(base, encoder)
}
|
transcendent-ai-labs/DynaML
|
dynaml-core/src/main/scala/io/github/mandar2812/dynaml/probability/MeasurableFunction.scala
|
Scala
|
apache-2.0
| 6,000 |
package models
import formats.MongoJsonFormats
import play.modules.reactivemongo.ReactiveMongoPlugin
import play.api.Play.current
import play.modules.reactivemongo.json.collection.JSONCollection
class MongoModel(collectionName: String) extends MongoJsonFormats {
def driver = ReactiveMongoPlugin.driver
def connection = ReactiveMongoPlugin.connection
def db = ReactiveMongoPlugin.db
def collection: JSONCollection = db.collection[JSONCollection](collectionName)
}
|
jdauphant/play_api_example
|
app/models/MongoModel.scala
|
Scala
|
isc
| 475 |
package br.unb.cic.poo.gol
trait Rules {
/* metodo abstrato para verificar se uma celula deve ser mantida viva */
def shouldKeepAlive(i: Int, j: Int): Boolean
/* metodo abstrato para verificar se uma celula deve (re)nascer */
def shouldRevive(i: Int, j: Int): Boolean
}
|
PeterTowers/TP1-022017
|
GoLScala/GoLScala_INF/src/br/unb/cic/poo/gol/Rules.scala
|
Scala
|
mit
| 280 |
package artisanal.pickle.maker
import models._
import parser._
import org.specs2._
import mutable._
import specification._
import scala.reflect.internal.pickling.ByteCodecs
import scala.tools.scalap.scalax.rules.scalasig._
import com.novus.salat.annotations.util._
import scala.reflect.ScalaSignature
class OptionListUserListUserSpec extends mutable.Specification {
"a ScalaSig for case class MyRecord_OptionListUserListUser(oa1: Option[List[MyRecord_User]], oa2: List[MyRecord_User])" should {
"have the correct string" in {
val mySig = new artisanal.pickle.maker.ScalaSig(List("case class"), List("models", "MyRecord_OptionListUserListUser"), List(("oa1", "Option[List[MyRecord_User]]"), ("oa2", "List[MyRecord_User]")))
val correctParsedSig = SigParserHelper.parseByteCodeFromAnnotation(classOf[MyRecord_OptionListUserListUser]).map(ScalaSigAttributeParsers.parse(_)).get
val myParsedSig = SigParserHelper.parseByteCodeFromMySig(mySig).map(ScalaSigAttributeParsers.parse(_)).get
correctParsedSig.toString === myParsedSig.toString
}
}
}
|
julianpeeters/artisanal-pickle-maker
|
src/test/scala/doubleValueMember/OptionSpecs/OptionListUserListUserSpec.scala
|
Scala
|
apache-2.0
| 1,082 |
package com.github.mdr.mash.os
import java.nio.file.Paths
import com.github.mdr.mash.os.linux.GlobHelper
import org.scalatest.{ FlatSpec, Matchers }
class GlobHelperTest extends FlatSpec with Matchers {
"Finding the start directory for a glob" should "work" in {
GlobHelper.globStart("/etc/*") should equal(Paths.get("/etc/"))
GlobHelper.globStart("/*") should equal(Paths.get("/"))
GlobHelper.globStart("foo/bar/*.java") should equal(Paths.get("foo/bar"))
GlobHelper.globStart("foo/*.java") should equal(Paths.get("foo/"))
GlobHelper.globStart("*.java") should equal(Paths.get(""))
}
}
|
mdr/mash
|
src/test/scala/com/github/mdr/mash/os/GlobHelperTest.scala
|
Scala
|
mit
| 615 |
/*
* Copyright 2016 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Input}
case class CP36(value: Option[Int]) extends CtBoxIdentifier(name = "Administration and office expenses") with CtOptionalInteger with Input
object CP36 {
def apply(int: Int): CP36 = CP36(Some(int))
}
|
ahudspith-equalexperts/ct-calculations
|
src/main/scala/uk/gov/hmrc/ct/computations/CP36.scala
|
Scala
|
apache-2.0
| 917 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.classification
import java.util.Locale
import scala.collection.mutable
import breeze.linalg.{DenseVector => BDV}
import breeze.optimize.{CachedDiffFunction, DiffFunction, LBFGS => BreezeLBFGS, LBFGSB => BreezeLBFGSB, OWLQN => BreezeOWLQN}
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkException
import org.apache.spark.annotation.{Experimental, Since}
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.internal.Logging
import org.apache.spark.ml.feature.Instance
import org.apache.spark.ml.linalg._
import org.apache.spark.ml.linalg.BLAS._
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared._
import org.apache.spark.ml.util._
import org.apache.spark.mllib.evaluation.BinaryClassificationMetrics
import org.apache.spark.mllib.linalg.VectorImplicits._
import org.apache.spark.mllib.stat.MultivariateOnlineSummarizer
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row}
import org.apache.spark.sql.functions.{col, lit}
import org.apache.spark.sql.types.{DataType, DoubleType, StructType}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.VersionUtils
/**
* Params for logistic regression.
*/
private[classification] trait LogisticRegressionParams extends ProbabilisticClassifierParams
with HasRegParam with HasElasticNetParam with HasMaxIter with HasFitIntercept with HasTol
with HasStandardization with HasWeightCol with HasThreshold with HasAggregationDepth {
import org.apache.spark.ml.classification.LogisticRegression.supportedFamilyNames
/**
* Set threshold in binary classification, in range [0, 1].
*
* If the estimated probability of class label 1 is greater than threshold, then predict 1,
* else 0. A high threshold encourages the model to predict 0 more often;
* a low threshold encourages the model to predict 1 more often.
*
* Note: Calling this with threshold p is equivalent to calling `setThresholds(Array(1-p, p))`.
* When `setThreshold()` is called, any user-set value for `thresholds` will be cleared.
* If both `threshold` and `thresholds` are set in a ParamMap, then they must be
* equivalent.
*
* Default is 0.5.
*
* @group setParam
*/
// TODO: Implement SPARK-11543?
def setThreshold(value: Double): this.type = {
if (isSet(thresholds)) clear(thresholds)
set(threshold, value)
}
/**
* Param for the name of family which is a description of the label distribution
* to be used in the model.
* Supported options:
* - "auto": Automatically select the family based on the number of classes:
* If numClasses == 1 || numClasses == 2, set to "binomial".
* Else, set to "multinomial"
* - "binomial": Binary logistic regression with pivoting.
* - "multinomial": Multinomial logistic (softmax) regression without pivoting.
* Default is "auto".
*
* @group param
*/
@Since("2.1.0")
final val family: Param[String] = new Param(this, "family",
"The name of family which is a description of the label distribution to be used in the " +
s"model. Supported options: ${supportedFamilyNames.mkString(", ")}.",
(value: String) => supportedFamilyNames.contains(value.toLowerCase(Locale.ROOT)))
/** @group getParam */
@Since("2.1.0")
def getFamily: String = $(family)
/**
* Get threshold for binary classification.
*
* If `thresholds` is set with length 2 (i.e., binary classification),
* this returns the equivalent threshold: {{{1 / (1 + thresholds(0) / thresholds(1))}}}.
* Otherwise, returns `threshold` if set, or its default value if unset.
*
* @group getParam
* @throws IllegalArgumentException if `thresholds` is set to an array of length other than 2.
*/
override def getThreshold: Double = {
checkThresholdConsistency()
if (isSet(thresholds)) {
val ts = $(thresholds)
require(ts.length == 2, "Logistic Regression getThreshold only applies to" +
" binary classification, but thresholds has length != 2. thresholds: " + ts.mkString(","))
1.0 / (1.0 + ts(0) / ts(1))
} else {
$(threshold)
}
}
/**
* Set thresholds in multiclass (or binary) classification to adjust the probability of
* predicting each class. Array must have length equal to the number of classes,
* with values greater than 0, excepting that at most one value may be 0.
* The class with largest value p/t is predicted, where p is the original probability of that
* class and t is the class's threshold.
*
* Note: When `setThresholds()` is called, any user-set value for `threshold` will be cleared.
* If both `threshold` and `thresholds` are set in a ParamMap, then they must be
* equivalent.
*
* @group setParam
*/
def setThresholds(value: Array[Double]): this.type = {
if (isSet(threshold)) clear(threshold)
set(thresholds, value)
}
/**
* Get thresholds for binary or multiclass classification.
*
* If `thresholds` is set, return its value.
* Otherwise, if `threshold` is set, return the equivalent thresholds for binary
* classification: (1-threshold, threshold).
* If neither are set, throw an exception.
*
* @group getParam
*/
override def getThresholds: Array[Double] = {
checkThresholdConsistency()
if (!isSet(thresholds) && isSet(threshold)) {
val t = $(threshold)
Array(1-t, t)
} else {
$(thresholds)
}
}
/**
* If `threshold` and `thresholds` are both set, ensures they are consistent.
*
* @throws IllegalArgumentException if `threshold` and `thresholds` are not equivalent
*/
protected def checkThresholdConsistency(): Unit = {
if (isSet(threshold) && isSet(thresholds)) {
val ts = $(thresholds)
require(ts.length == 2, "Logistic Regression found inconsistent values for threshold and" +
s" thresholds. Param threshold is set (${$(threshold)}), indicating binary" +
s" classification, but Param thresholds is set with length ${ts.length}." +
" Clear one Param value to fix this problem.")
val t = 1.0 / (1.0 + ts(0) / ts(1))
require(math.abs($(threshold) - t) < 1E-5, "Logistic Regression getThreshold found" +
s" inconsistent values for threshold (${$(threshold)}) and thresholds (equivalent to $t)")
}
}
/**
* The lower bounds on coefficients if fitting under bound constrained optimization.
* The bound matrix must be compatible with the shape (1, number of features) for binomial
* regression, or (number of classes, number of features) for multinomial regression.
* Otherwise, it throws exception.
* Default is none.
*
* @group expertParam
*/
@Since("2.2.0")
val lowerBoundsOnCoefficients: Param[Matrix] = new Param(this, "lowerBoundsOnCoefficients",
"The lower bounds on coefficients if fitting under bound constrained optimization.")
/** @group expertGetParam */
@Since("2.2.0")
def getLowerBoundsOnCoefficients: Matrix = $(lowerBoundsOnCoefficients)
/**
* The upper bounds on coefficients if fitting under bound constrained optimization.
* The bound matrix must be compatible with the shape (1, number of features) for binomial
* regression, or (number of classes, number of features) for multinomial regression.
* Otherwise, it throws exception.
* Default is none.
*
* @group expertParam
*/
@Since("2.2.0")
val upperBoundsOnCoefficients: Param[Matrix] = new Param(this, "upperBoundsOnCoefficients",
"The upper bounds on coefficients if fitting under bound constrained optimization.")
/** @group expertGetParam */
@Since("2.2.0")
def getUpperBoundsOnCoefficients: Matrix = $(upperBoundsOnCoefficients)
/**
* The lower bounds on intercepts if fitting under bound constrained optimization.
* The bounds vector size must be equal to 1 for binomial regression, or the number
* of classes for multinomial regression. Otherwise, it throws exception.
* Default is none.
*
* @group expertParam
*/
@Since("2.2.0")
val lowerBoundsOnIntercepts: Param[Vector] = new Param(this, "lowerBoundsOnIntercepts",
"The lower bounds on intercepts if fitting under bound constrained optimization.")
/** @group expertGetParam */
@Since("2.2.0")
def getLowerBoundsOnIntercepts: Vector = $(lowerBoundsOnIntercepts)
/**
* The upper bounds on intercepts if fitting under bound constrained optimization.
* The bound vector size must be equal to 1 for binomial regression, or the number
* of classes for multinomial regression. Otherwise, it throws exception.
* Default is none.
*
* @group expertParam
*/
@Since("2.2.0")
val upperBoundsOnIntercepts: Param[Vector] = new Param(this, "upperBoundsOnIntercepts",
"The upper bounds on intercepts if fitting under bound constrained optimization.")
/** @group expertGetParam */
@Since("2.2.0")
def getUpperBoundsOnIntercepts: Vector = $(upperBoundsOnIntercepts)
protected def usingBoundConstrainedOptimization: Boolean = {
isSet(lowerBoundsOnCoefficients) || isSet(upperBoundsOnCoefficients) ||
isSet(lowerBoundsOnIntercepts) || isSet(upperBoundsOnIntercepts)
}
override protected def validateAndTransformSchema(
schema: StructType,
fitting: Boolean,
featuresDataType: DataType): StructType = {
checkThresholdConsistency()
if (usingBoundConstrainedOptimization) {
require($(elasticNetParam) == 0.0, "Fitting under bound constrained optimization only " +
s"supports L2 regularization, but got elasticNetParam = $getElasticNetParam.")
}
if (!$(fitIntercept)) {
require(!isSet(lowerBoundsOnIntercepts) && !isSet(upperBoundsOnIntercepts),
"Please don't set bounds on intercepts if fitting without intercept.")
}
super.validateAndTransformSchema(schema, fitting, featuresDataType)
}
}
/**
* Logistic regression. Supports:
* - Multinomial logistic (softmax) regression.
* - Binomial logistic regression.
*
* This class supports fitting traditional logistic regression model by LBFGS/OWLQN and
* bound (box) constrained logistic regression model by LBFGSB.
*/
@Since("1.2.0")
class LogisticRegression @Since("1.2.0") (
@Since("1.4.0") override val uid: String)
extends ProbabilisticClassifier[Vector, LogisticRegression, LogisticRegressionModel]
with LogisticRegressionParams with DefaultParamsWritable with Logging {
@Since("1.4.0")
def this() = this(Identifiable.randomUID("logreg"))
/**
* Set the regularization parameter.
* Default is 0.0.
*
* @group setParam
*/
@Since("1.2.0")
def setRegParam(value: Double): this.type = set(regParam, value)
setDefault(regParam -> 0.0)
/**
* Set the ElasticNet mixing parameter.
* For alpha = 0, the penalty is an L2 penalty.
* For alpha = 1, it is an L1 penalty.
* For alpha in (0,1), the penalty is a combination of L1 and L2.
* Default is 0.0 which is an L2 penalty.
*
* Note: Fitting under bound constrained optimization only supports L2 regularization,
* so throws exception if this param is non-zero value.
*
* @group setParam
*/
@Since("1.4.0")
def setElasticNetParam(value: Double): this.type = set(elasticNetParam, value)
setDefault(elasticNetParam -> 0.0)
/**
* Set the maximum number of iterations.
* Default is 100.
*
* @group setParam
*/
@Since("1.2.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
setDefault(maxIter -> 100)
/**
* Set the convergence tolerance of iterations.
* Smaller value will lead to higher accuracy at the cost of more iterations.
* Default is 1E-6.
*
* @group setParam
*/
@Since("1.4.0")
def setTol(value: Double): this.type = set(tol, value)
setDefault(tol -> 1E-6)
/**
* Whether to fit an intercept term.
* Default is true.
*
* @group setParam
*/
@Since("1.4.0")
def setFitIntercept(value: Boolean): this.type = set(fitIntercept, value)
setDefault(fitIntercept -> true)
/**
* Sets the value of param [[family]].
* Default is "auto".
*
* @group setParam
*/
@Since("2.1.0")
def setFamily(value: String): this.type = set(family, value)
setDefault(family -> "auto")
/**
* Whether to standardize the training features before fitting the model.
* The coefficients of models will be always returned on the original scale,
* so it will be transparent for users. Note that with/without standardization,
* the models should be always converged to the same solution when no regularization
* is applied. In R's GLMNET package, the default behavior is true as well.
* Default is true.
*
* @group setParam
*/
@Since("1.5.0")
def setStandardization(value: Boolean): this.type = set(standardization, value)
setDefault(standardization -> true)
@Since("1.5.0")
override def setThreshold(value: Double): this.type = super.setThreshold(value)
@Since("1.5.0")
override def getThreshold: Double = super.getThreshold
/**
* Sets the value of param [[weightCol]].
* If this is not set or empty, we treat all instance weights as 1.0.
* Default is not set, so all instances have weight one.
*
* @group setParam
*/
@Since("1.6.0")
def setWeightCol(value: String): this.type = set(weightCol, value)
@Since("1.5.0")
override def setThresholds(value: Array[Double]): this.type = super.setThresholds(value)
@Since("1.5.0")
override def getThresholds: Array[Double] = super.getThresholds
/**
* Suggested depth for treeAggregate (greater than or equal to 2).
* If the dimensions of features or the number of partitions are large,
* this param could be adjusted to a larger size.
* Default is 2.
*
* @group expertSetParam
*/
@Since("2.1.0")
def setAggregationDepth(value: Int): this.type = set(aggregationDepth, value)
setDefault(aggregationDepth -> 2)
/**
* Set the lower bounds on coefficients if fitting under bound constrained optimization.
*
* @group expertSetParam
*/
@Since("2.2.0")
def setLowerBoundsOnCoefficients(value: Matrix): this.type = set(lowerBoundsOnCoefficients, value)
/**
* Set the upper bounds on coefficients if fitting under bound constrained optimization.
*
* @group expertSetParam
*/
@Since("2.2.0")
def setUpperBoundsOnCoefficients(value: Matrix): this.type = set(upperBoundsOnCoefficients, value)
/**
* Set the lower bounds on intercepts if fitting under bound constrained optimization.
*
* @group expertSetParam
*/
@Since("2.2.0")
def setLowerBoundsOnIntercepts(value: Vector): this.type = set(lowerBoundsOnIntercepts, value)
/**
* Set the upper bounds on intercepts if fitting under bound constrained optimization.
*
* @group expertSetParam
*/
@Since("2.2.0")
def setUpperBoundsOnIntercepts(value: Vector): this.type = set(upperBoundsOnIntercepts, value)
private def assertBoundConstrainedOptimizationParamsValid(
numCoefficientSets: Int,
numFeatures: Int): Unit = {
if (isSet(lowerBoundsOnCoefficients)) {
require($(lowerBoundsOnCoefficients).numRows == numCoefficientSets &&
$(lowerBoundsOnCoefficients).numCols == numFeatures,
"The shape of LowerBoundsOnCoefficients must be compatible with (1, number of features) " +
"for binomial regression, or (number of classes, number of features) for multinomial " +
"regression, but found: " +
s"(${getLowerBoundsOnCoefficients.numRows}, ${getLowerBoundsOnCoefficients.numCols}).")
}
if (isSet(upperBoundsOnCoefficients)) {
require($(upperBoundsOnCoefficients).numRows == numCoefficientSets &&
$(upperBoundsOnCoefficients).numCols == numFeatures,
"The shape of upperBoundsOnCoefficients must be compatible with (1, number of features) " +
"for binomial regression, or (number of classes, number of features) for multinomial " +
"regression, but found: " +
s"(${getUpperBoundsOnCoefficients.numRows}, ${getUpperBoundsOnCoefficients.numCols}).")
}
if (isSet(lowerBoundsOnIntercepts)) {
require($(lowerBoundsOnIntercepts).size == numCoefficientSets, "The size of " +
"lowerBoundsOnIntercepts must be equal to 1 for binomial regression, or the number of " +
s"classes for multinomial regression, but found: ${getLowerBoundsOnIntercepts.size}.")
}
if (isSet(upperBoundsOnIntercepts)) {
require($(upperBoundsOnIntercepts).size == numCoefficientSets, "The size of " +
"upperBoundsOnIntercepts must be equal to 1 for binomial regression, or the number of " +
s"classes for multinomial regression, but found: ${getUpperBoundsOnIntercepts.size}.")
}
if (isSet(lowerBoundsOnCoefficients) && isSet(upperBoundsOnCoefficients)) {
require($(lowerBoundsOnCoefficients).toArray.zip($(upperBoundsOnCoefficients).toArray)
.forall(x => x._1 <= x._2), "LowerBoundsOnCoefficients should always be " +
"less than or equal to upperBoundsOnCoefficients, but found: " +
s"lowerBoundsOnCoefficients = $getLowerBoundsOnCoefficients, " +
s"upperBoundsOnCoefficients = $getUpperBoundsOnCoefficients.")
}
if (isSet(lowerBoundsOnIntercepts) && isSet(upperBoundsOnIntercepts)) {
require($(lowerBoundsOnIntercepts).toArray.zip($(upperBoundsOnIntercepts).toArray)
.forall(x => x._1 <= x._2), "LowerBoundsOnIntercepts should always be " +
"less than or equal to upperBoundsOnIntercepts, but found: " +
s"lowerBoundsOnIntercepts = $getLowerBoundsOnIntercepts, " +
s"upperBoundsOnIntercepts = $getUpperBoundsOnIntercepts.")
}
}
private var optInitialModel: Option[LogisticRegressionModel] = None
private[spark] def setInitialModel(model: LogisticRegressionModel): this.type = {
this.optInitialModel = Some(model)
this
}
override protected[spark] def train(dataset: Dataset[_]): LogisticRegressionModel = {
val handlePersistence = dataset.rdd.getStorageLevel == StorageLevel.NONE
train(dataset, handlePersistence)
}
protected[spark] def train(
dataset: Dataset[_],
handlePersistence: Boolean): LogisticRegressionModel = {
val w = if (!isDefined(weightCol) || $(weightCol).isEmpty) lit(1.0) else col($(weightCol))
val instances: RDD[Instance] =
dataset.select(col($(labelCol)), w, col($(featuresCol))).rdd.map {
case Row(label: Double, weight: Double, features: Vector) =>
Instance(label, weight, features)
}
if (handlePersistence) instances.persist(StorageLevel.MEMORY_AND_DISK)
val instr = Instrumentation.create(this, instances)
instr.logParams(regParam, elasticNetParam, standardization, threshold,
maxIter, tol, fitIntercept)
val (summarizer, labelSummarizer) = {
val seqOp = (c: (MultivariateOnlineSummarizer, MultiClassSummarizer),
instance: Instance) =>
(c._1.add(instance.features, instance.weight), c._2.add(instance.label, instance.weight))
val combOp = (c1: (MultivariateOnlineSummarizer, MultiClassSummarizer),
c2: (MultivariateOnlineSummarizer, MultiClassSummarizer)) =>
(c1._1.merge(c2._1), c1._2.merge(c2._2))
instances.treeAggregate(
new MultivariateOnlineSummarizer, new MultiClassSummarizer
)(seqOp, combOp, $(aggregationDepth))
}
val histogram = labelSummarizer.histogram
val numInvalid = labelSummarizer.countInvalid
val numFeatures = summarizer.mean.size
val numFeaturesPlusIntercept = if (getFitIntercept) numFeatures + 1 else numFeatures
val numClasses = MetadataUtils.getNumClasses(dataset.schema($(labelCol))) match {
case Some(n: Int) =>
require(n >= histogram.length, s"Specified number of classes $n was " +
s"less than the number of unique labels ${histogram.length}.")
n
case None => histogram.length
}
val isMultinomial = getFamily.toLowerCase(Locale.ROOT) match {
case "binomial" =>
require(numClasses == 1 || numClasses == 2, s"Binomial family only supports 1 or 2 " +
s"outcome classes but found $numClasses.")
false
case "multinomial" => true
case "auto" => numClasses > 2
case other => throw new IllegalArgumentException(s"Unsupported family: $other")
}
val numCoefficientSets = if (isMultinomial) numClasses else 1
// Check params interaction is valid if fitting under bound constrained optimization.
if (usingBoundConstrainedOptimization) {
assertBoundConstrainedOptimizationParamsValid(numCoefficientSets, numFeatures)
}
if (isDefined(thresholds)) {
require($(thresholds).length == numClasses, this.getClass.getSimpleName +
".train() called with non-matching numClasses and thresholds.length." +
s" numClasses=$numClasses, but thresholds has length ${$(thresholds).length}")
}
instr.logNumClasses(numClasses)
instr.logNumFeatures(numFeatures)
val (coefficientMatrix, interceptVector, objectiveHistory) = {
if (numInvalid != 0) {
val msg = s"Classification labels should be in [0 to ${numClasses - 1}]. " +
s"Found $numInvalid invalid labels."
logError(msg)
throw new SparkException(msg)
}
val isConstantLabel = histogram.count(_ != 0.0) == 1
if ($(fitIntercept) && isConstantLabel && !usingBoundConstrainedOptimization) {
logWarning(s"All labels are the same value and fitIntercept=true, so the coefficients " +
s"will be zeros. Training is not needed.")
val constantLabelIndex = Vectors.dense(histogram).argmax
val coefMatrix = new SparseMatrix(numCoefficientSets, numFeatures,
new Array[Int](numCoefficientSets + 1), Array.empty[Int], Array.empty[Double],
isTransposed = true).compressed
val interceptVec = if (isMultinomial) {
Vectors.sparse(numClasses, Seq((constantLabelIndex, Double.PositiveInfinity)))
} else {
Vectors.dense(if (numClasses == 2) Double.PositiveInfinity else Double.NegativeInfinity)
}
(coefMatrix, interceptVec, Array.empty[Double])
} else {
if (!$(fitIntercept) && isConstantLabel) {
logWarning(s"All labels belong to a single class and fitIntercept=false. It's a " +
s"dangerous ground, so the algorithm may not converge.")
}
val featuresMean = summarizer.mean.toArray
val featuresStd = summarizer.variance.toArray.map(math.sqrt)
if (!$(fitIntercept) && (0 until numFeatures).exists { i =>
featuresStd(i) == 0.0 && featuresMean(i) != 0.0 }) {
logWarning("Fitting LogisticRegressionModel without intercept on dataset with " +
"constant nonzero column, Spark MLlib outputs zero coefficients for constant " +
"nonzero columns. This behavior is the same as R glmnet but different from LIBSVM.")
}
val regParamL1 = $(elasticNetParam) * $(regParam)
val regParamL2 = (1.0 - $(elasticNetParam)) * $(regParam)
val bcFeaturesStd = instances.context.broadcast(featuresStd)
val costFun = new LogisticCostFun(instances, numClasses, $(fitIntercept),
$(standardization), bcFeaturesStd, regParamL2, multinomial = isMultinomial,
$(aggregationDepth))
val numCoeffsPlusIntercepts = numFeaturesPlusIntercept * numCoefficientSets
val (lowerBounds, upperBounds): (Array[Double], Array[Double]) = {
if (usingBoundConstrainedOptimization) {
val lowerBounds = Array.fill[Double](numCoeffsPlusIntercepts)(Double.NegativeInfinity)
val upperBounds = Array.fill[Double](numCoeffsPlusIntercepts)(Double.PositiveInfinity)
val isSetLowerBoundsOnCoefficients = isSet(lowerBoundsOnCoefficients)
val isSetUpperBoundsOnCoefficients = isSet(upperBoundsOnCoefficients)
val isSetLowerBoundsOnIntercepts = isSet(lowerBoundsOnIntercepts)
val isSetUpperBoundsOnIntercepts = isSet(upperBoundsOnIntercepts)
var i = 0
while (i < numCoeffsPlusIntercepts) {
val coefficientSetIndex = i % numCoefficientSets
val featureIndex = i / numCoefficientSets
if (featureIndex < numFeatures) {
if (isSetLowerBoundsOnCoefficients) {
lowerBounds(i) = $(lowerBoundsOnCoefficients)(
coefficientSetIndex, featureIndex) * featuresStd(featureIndex)
}
if (isSetUpperBoundsOnCoefficients) {
upperBounds(i) = $(upperBoundsOnCoefficients)(
coefficientSetIndex, featureIndex) * featuresStd(featureIndex)
}
} else {
if (isSetLowerBoundsOnIntercepts) {
lowerBounds(i) = $(lowerBoundsOnIntercepts)(coefficientSetIndex)
}
if (isSetUpperBoundsOnIntercepts) {
upperBounds(i) = $(upperBoundsOnIntercepts)(coefficientSetIndex)
}
}
i += 1
}
(lowerBounds, upperBounds)
} else {
(null, null)
}
}
val optimizer = if ($(elasticNetParam) == 0.0 || $(regParam) == 0.0) {
if (lowerBounds != null && upperBounds != null) {
new BreezeLBFGSB(
BDV[Double](lowerBounds), BDV[Double](upperBounds), $(maxIter), 10, $(tol))
} else {
new BreezeLBFGS[BDV[Double]]($(maxIter), 10, $(tol))
}
} else {
val standardizationParam = $(standardization)
def regParamL1Fun = (index: Int) => {
// Remove the L1 penalization on the intercept
val isIntercept = $(fitIntercept) && index >= numFeatures * numCoefficientSets
if (isIntercept) {
0.0
} else {
if (standardizationParam) {
regParamL1
} else {
val featureIndex = index / numCoefficientSets
// If `standardization` is false, we still standardize the data
// to improve the rate of convergence; as a result, we have to
// perform this reverse standardization by penalizing each component
// differently to get effectively the same objective function when
// the training dataset is not standardized.
if (featuresStd(featureIndex) != 0.0) {
regParamL1 / featuresStd(featureIndex)
} else {
0.0
}
}
}
}
new BreezeOWLQN[Int, BDV[Double]]($(maxIter), 10, regParamL1Fun, $(tol))
}
/*
The coefficients are laid out in column major order during training. Here we initialize
a column major matrix of initial coefficients.
*/
val initialCoefWithInterceptMatrix =
Matrices.zeros(numCoefficientSets, numFeaturesPlusIntercept)
val initialModelIsValid = optInitialModel match {
case Some(_initialModel) =>
val providedCoefs = _initialModel.coefficientMatrix
val modelIsValid = (providedCoefs.numRows == numCoefficientSets) &&
(providedCoefs.numCols == numFeatures) &&
(_initialModel.interceptVector.size == numCoefficientSets) &&
(_initialModel.getFitIntercept == $(fitIntercept))
if (!modelIsValid) {
logWarning(s"Initial coefficients will be ignored! Its dimensions " +
s"(${providedCoefs.numRows}, ${providedCoefs.numCols}) did not match the " +
s"expected size ($numCoefficientSets, $numFeatures)")
}
modelIsValid
case None => false
}
if (initialModelIsValid) {
val providedCoef = optInitialModel.get.coefficientMatrix
providedCoef.foreachActive { (classIndex, featureIndex, value) =>
// We need to scale the coefficients since they will be trained in the scaled space
initialCoefWithInterceptMatrix.update(classIndex, featureIndex,
value * featuresStd(featureIndex))
}
if ($(fitIntercept)) {
optInitialModel.get.interceptVector.foreachActive { (classIndex, value) =>
initialCoefWithInterceptMatrix.update(classIndex, numFeatures, value)
}
}
} else if ($(fitIntercept) && isMultinomial) {
/*
For multinomial logistic regression, when we initialize the coefficients as zeros,
it will converge faster if we initialize the intercepts such that
it follows the distribution of the labels.
{{{
P(1) = \\exp(b_1) / Z
...
P(K) = \\exp(b_K) / Z
where Z = \\sum_{k=1}^{K} \\exp(b_k)
}}}
Since this doesn't have a unique solution, one of the solutions that satisfies the
above equations is
{{{
\\exp(b_k) = count_k * \\exp(\\lambda)
b_k = \\log(count_k) * \\lambda
}}}
\\lambda is a free parameter, so choose the phase \\lambda such that the
mean is centered. This yields
{{{
b_k = \\log(count_k)
b_k' = b_k - \\mean(b_k)
}}}
*/
val rawIntercepts = histogram.map(c => math.log(c + 1)) // add 1 for smoothing
val rawMean = rawIntercepts.sum / rawIntercepts.length
rawIntercepts.indices.foreach { i =>
initialCoefWithInterceptMatrix.update(i, numFeatures, rawIntercepts(i) - rawMean)
}
} else if ($(fitIntercept)) {
/*
For binary logistic regression, when we initialize the coefficients as zeros,
it will converge faster if we initialize the intercept such that
it follows the distribution of the labels.
{{{
P(0) = 1 / (1 + \\exp(b)), and
P(1) = \\exp(b) / (1 + \\exp(b))
}}}, hence
{{{
b = \\log{P(1) / P(0)} = \\log{count_1 / count_0}
}}}
*/
initialCoefWithInterceptMatrix.update(0, numFeatures,
math.log(histogram(1) / histogram(0)))
}
if (usingBoundConstrainedOptimization) {
// Make sure all initial values locate in the corresponding bound.
var i = 0
while (i < numCoeffsPlusIntercepts) {
val coefficientSetIndex = i % numCoefficientSets
val featureIndex = i / numCoefficientSets
if (initialCoefWithInterceptMatrix(coefficientSetIndex, featureIndex) < lowerBounds(i))
{
initialCoefWithInterceptMatrix.update(
coefficientSetIndex, featureIndex, lowerBounds(i))
} else if (
initialCoefWithInterceptMatrix(coefficientSetIndex, featureIndex) > upperBounds(i))
{
initialCoefWithInterceptMatrix.update(
coefficientSetIndex, featureIndex, upperBounds(i))
}
i += 1
}
}
val states = optimizer.iterations(new CachedDiffFunction(costFun),
new BDV[Double](initialCoefWithInterceptMatrix.toArray))
/*
Note that in Logistic Regression, the objective history (loss + regularization)
is log-likelihood which is invariant under feature standardization. As a result,
the objective history from optimizer is the same as the one in the original space.
*/
val arrayBuilder = mutable.ArrayBuilder.make[Double]
var state: optimizer.State = null
while (states.hasNext) {
state = states.next()
arrayBuilder += state.adjustedValue
}
bcFeaturesStd.destroy(blocking = false)
if (state == null) {
val msg = s"${optimizer.getClass.getName} failed."
logError(msg)
throw new SparkException(msg)
}
/*
The coefficients are trained in the scaled space; we're converting them back to
the original space.
Additionally, since the coefficients were laid out in column major order during training
to avoid extra computation, we convert them back to row major before passing them to the
model.
Note that the intercept in scaled space and original space is the same;
as a result, no scaling is needed.
*/
val allCoefficients = state.x.toArray.clone()
val allCoefMatrix = new DenseMatrix(numCoefficientSets, numFeaturesPlusIntercept,
allCoefficients)
val denseCoefficientMatrix = new DenseMatrix(numCoefficientSets, numFeatures,
new Array[Double](numCoefficientSets * numFeatures), isTransposed = true)
val interceptVec = if ($(fitIntercept) || !isMultinomial) {
Vectors.zeros(numCoefficientSets)
} else {
Vectors.sparse(numCoefficientSets, Seq())
}
// separate intercepts and coefficients from the combined matrix
allCoefMatrix.foreachActive { (classIndex, featureIndex, value) =>
val isIntercept = $(fitIntercept) && (featureIndex == numFeatures)
if (!isIntercept && featuresStd(featureIndex) != 0.0) {
denseCoefficientMatrix.update(classIndex, featureIndex,
value / featuresStd(featureIndex))
}
if (isIntercept) interceptVec.toArray(classIndex) = value
}
if ($(regParam) == 0.0 && isMultinomial && !usingBoundConstrainedOptimization) {
/*
When no regularization is applied, the multinomial coefficients lack identifiability
because we do not use a pivot class. We can add any constant value to the coefficients
and get the same likelihood. So here, we choose the mean centered coefficients for
reproducibility. This method follows the approach in glmnet, described here:
Friedman, et al. "Regularization Paths for Generalized Linear Models via
Coordinate Descent," https://core.ac.uk/download/files/153/6287975.pdf
*/
val centers = Array.fill(numFeatures)(0.0)
denseCoefficientMatrix.foreachActive { case (i, j, v) =>
centers(j) += v
}
centers.transform(_ / numCoefficientSets)
denseCoefficientMatrix.foreachActive { case (i, j, v) =>
denseCoefficientMatrix.update(i, j, v - centers(j))
}
}
// center the intercepts when using multinomial algorithm
if ($(fitIntercept) && isMultinomial && !usingBoundConstrainedOptimization) {
val interceptArray = interceptVec.toArray
val interceptMean = interceptArray.sum / interceptArray.length
(0 until interceptVec.size).foreach { i => interceptArray(i) -= interceptMean }
}
(denseCoefficientMatrix.compressed, interceptVec.compressed, arrayBuilder.result())
}
}
if (handlePersistence) instances.unpersist()
val model = copyValues(new LogisticRegressionModel(uid, coefficientMatrix, interceptVector,
numClasses, isMultinomial))
// TODO: implement summary model for multinomial case
val m = if (!isMultinomial) {
val (summaryModel, probabilityColName) = model.findSummaryModelAndProbabilityCol()
val logRegSummary = new BinaryLogisticRegressionTrainingSummary(
summaryModel.transform(dataset),
probabilityColName,
$(labelCol),
$(featuresCol),
objectiveHistory)
model.setSummary(Some(logRegSummary))
} else {
model
}
instr.logSuccess(m)
m
}
@Since("1.4.0")
override def copy(extra: ParamMap): LogisticRegression = defaultCopy(extra)
}
@Since("1.6.0")
object LogisticRegression extends DefaultParamsReadable[LogisticRegression] {
@Since("1.6.0")
override def load(path: String): LogisticRegression = super.load(path)
private[classification] val supportedFamilyNames =
Array("auto", "binomial", "multinomial").map(_.toLowerCase(Locale.ROOT))
}
/**
* Model produced by [[LogisticRegression]].
*/
@Since("1.4.0")
class LogisticRegressionModel private[spark] (
@Since("1.4.0") override val uid: String,
@Since("2.1.0") val coefficientMatrix: Matrix,
@Since("2.1.0") val interceptVector: Vector,
@Since("1.3.0") override val numClasses: Int,
private val isMultinomial: Boolean)
extends ProbabilisticClassificationModel[Vector, LogisticRegressionModel]
with LogisticRegressionParams with MLWritable {
require(coefficientMatrix.numRows == interceptVector.size, s"Dimension mismatch! Expected " +
s"coefficientMatrix.numRows == interceptVector.size, but ${coefficientMatrix.numRows} != " +
s"${interceptVector.size}")
private[spark] def this(uid: String, coefficients: Vector, intercept: Double) =
this(uid, new DenseMatrix(1, coefficients.size, coefficients.toArray, isTransposed = true),
Vectors.dense(intercept), 2, isMultinomial = false)
/**
* A vector of model coefficients for "binomial" logistic regression. If this model was trained
* using the "multinomial" family then an exception is thrown.
*
* @return Vector
*/
@Since("2.0.0")
def coefficients: Vector = if (isMultinomial) {
throw new SparkException("Multinomial models contain a matrix of coefficients, use " +
"coefficientMatrix instead.")
} else {
_coefficients
}
// convert to appropriate vector representation without replicating data
private lazy val _coefficients: Vector = {
require(coefficientMatrix.isTransposed,
"LogisticRegressionModel coefficients should be row major for binomial model.")
coefficientMatrix match {
case dm: DenseMatrix => Vectors.dense(dm.values)
case sm: SparseMatrix => Vectors.sparse(coefficientMatrix.numCols, sm.rowIndices, sm.values)
}
}
/**
* The model intercept for "binomial" logistic regression. If this model was fit with the
* "multinomial" family then an exception is thrown.
*
* @return Double
*/
@Since("1.3.0")
def intercept: Double = if (isMultinomial) {
throw new SparkException("Multinomial models contain a vector of intercepts, use " +
"interceptVector instead.")
} else {
_intercept
}
private lazy val _intercept = interceptVector.toArray.head
@Since("1.5.0")
override def setThreshold(value: Double): this.type = super.setThreshold(value)
@Since("1.5.0")
override def getThreshold: Double = super.getThreshold
@Since("1.5.0")
override def setThresholds(value: Array[Double]): this.type = super.setThresholds(value)
@Since("1.5.0")
override def getThresholds: Array[Double] = super.getThresholds
/** Margin (rawPrediction) for class label 1. For binary classification only. */
private val margin: Vector => Double = (features) => {
BLAS.dot(features, _coefficients) + _intercept
}
/** Margin (rawPrediction) for each class label. */
private val margins: Vector => Vector = (features) => {
val m = interceptVector.toDense.copy
BLAS.gemv(1.0, coefficientMatrix, features, 1.0, m)
m
}
/** Score (probability) for class label 1. For binary classification only. */
private val score: Vector => Double = (features) => {
val m = margin(features)
1.0 / (1.0 + math.exp(-m))
}
@Since("1.6.0")
override val numFeatures: Int = coefficientMatrix.numCols
private var trainingSummary: Option[LogisticRegressionTrainingSummary] = None
/**
* Gets summary of model on training set. An exception is
* thrown if `trainingSummary == None`.
*/
@Since("1.5.0")
def summary: LogisticRegressionTrainingSummary = trainingSummary.getOrElse {
throw new SparkException("No training summary available for this LogisticRegressionModel")
}
/**
* If the probability column is set returns the current model and probability column,
* otherwise generates a new column and sets it as the probability column on a new copy
* of the current model.
*/
private[classification] def findSummaryModelAndProbabilityCol():
(LogisticRegressionModel, String) = {
$(probabilityCol) match {
case "" =>
val probabilityColName = "probability_" + java.util.UUID.randomUUID.toString
(copy(ParamMap.empty).setProbabilityCol(probabilityColName), probabilityColName)
case p => (this, p)
}
}
private[classification]
def setSummary(summary: Option[LogisticRegressionTrainingSummary]): this.type = {
this.trainingSummary = summary
this
}
/** Indicates whether a training summary exists for this model instance. */
@Since("1.5.0")
def hasSummary: Boolean = trainingSummary.isDefined
/**
* Evaluates the model on a test dataset.
*
* @param dataset Test dataset to evaluate model on.
*/
@Since("2.0.0")
def evaluate(dataset: Dataset[_]): LogisticRegressionSummary = {
// Handle possible missing or invalid prediction columns
val (summaryModel, probabilityColName) = findSummaryModelAndProbabilityCol()
new BinaryLogisticRegressionSummary(summaryModel.transform(dataset),
probabilityColName, $(labelCol), $(featuresCol))
}
/**
* Predict label for the given feature vector.
* The behavior of this can be adjusted using `thresholds`.
*/
override protected def predict(features: Vector): Double = if (isMultinomial) {
super.predict(features)
} else {
// Note: We should use getThreshold instead of $(threshold) since getThreshold is overridden.
if (score(features) > getThreshold) 1 else 0
}
override protected def raw2probabilityInPlace(rawPrediction: Vector): Vector = {
rawPrediction match {
case dv: DenseVector =>
if (isMultinomial) {
val size = dv.size
val values = dv.values
// get the maximum margin
val maxMarginIndex = rawPrediction.argmax
val maxMargin = rawPrediction(maxMarginIndex)
if (maxMargin == Double.PositiveInfinity) {
var k = 0
while (k < size) {
values(k) = if (k == maxMarginIndex) 1.0 else 0.0
k += 1
}
} else {
val sum = {
var temp = 0.0
var k = 0
while (k < numClasses) {
values(k) = if (maxMargin > 0) {
math.exp(values(k) - maxMargin)
} else {
math.exp(values(k))
}
temp += values(k)
k += 1
}
temp
}
BLAS.scal(1 / sum, dv)
}
dv
} else {
var i = 0
val size = dv.size
while (i < size) {
dv.values(i) = 1.0 / (1.0 + math.exp(-dv.values(i)))
i += 1
}
dv
}
case sv: SparseVector =>
throw new RuntimeException("Unexpected error in LogisticRegressionModel:" +
" raw2probabilitiesInPlace encountered SparseVector")
}
}
override protected def predictRaw(features: Vector): Vector = {
if (isMultinomial) {
margins(features)
} else {
val m = margin(features)
Vectors.dense(-m, m)
}
}
@Since("1.4.0")
override def copy(extra: ParamMap): LogisticRegressionModel = {
val newModel = copyValues(new LogisticRegressionModel(uid, coefficientMatrix, interceptVector,
numClasses, isMultinomial), extra)
newModel.setSummary(trainingSummary).setParent(parent)
}
override protected def raw2prediction(rawPrediction: Vector): Double = {
if (isMultinomial) {
super.raw2prediction(rawPrediction)
} else {
// Note: We should use getThreshold instead of $(threshold) since getThreshold is overridden.
val t = getThreshold
val rawThreshold = if (t == 0.0) {
Double.NegativeInfinity
} else if (t == 1.0) {
Double.PositiveInfinity
} else {
math.log(t / (1.0 - t))
}
if (rawPrediction(1) > rawThreshold) 1 else 0
}
}
override protected def probability2prediction(probability: Vector): Double = {
if (isMultinomial) {
super.probability2prediction(probability)
} else {
// Note: We should use getThreshold instead of $(threshold) since getThreshold is overridden.
if (probability(1) > getThreshold) 1 else 0
}
}
/**
* Returns a [[org.apache.spark.ml.util.MLWriter]] instance for this ML instance.
*
* For [[LogisticRegressionModel]], this does NOT currently save the training [[summary]].
* An option to save [[summary]] may be added in the future.
*
* This also does not save the [[parent]] currently.
*/
@Since("1.6.0")
override def write: MLWriter = new LogisticRegressionModel.LogisticRegressionModelWriter(this)
}
@Since("1.6.0")
object LogisticRegressionModel extends MLReadable[LogisticRegressionModel] {
@Since("1.6.0")
override def read: MLReader[LogisticRegressionModel] = new LogisticRegressionModelReader
@Since("1.6.0")
override def load(path: String): LogisticRegressionModel = super.load(path)
/** [[MLWriter]] instance for [[LogisticRegressionModel]] */
private[LogisticRegressionModel]
class LogisticRegressionModelWriter(instance: LogisticRegressionModel)
extends MLWriter with Logging {
private case class Data(
numClasses: Int,
numFeatures: Int,
interceptVector: Vector,
coefficientMatrix: Matrix,
isMultinomial: Boolean)
override protected def saveImpl(path: String): Unit = {
// Save metadata and Params
DefaultParamsWriter.saveMetadata(instance, path, sc)
// Save model data: numClasses, numFeatures, intercept, coefficients
val data = Data(instance.numClasses, instance.numFeatures, instance.interceptVector,
instance.coefficientMatrix, instance.isMultinomial)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class LogisticRegressionModelReader extends MLReader[LogisticRegressionModel] {
/** Checked against metadata when loading model */
private val className = classOf[LogisticRegressionModel].getName
override def load(path: String): LogisticRegressionModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val (major, minor) = VersionUtils.majorMinorVersion(metadata.sparkVersion)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.format("parquet").load(dataPath)
val model = if (major.toInt < 2 || (major.toInt == 2 && minor.toInt == 0)) {
// 2.0 and before
val Row(numClasses: Int, numFeatures: Int, intercept: Double, coefficients: Vector) =
MLUtils.convertVectorColumnsToML(data, "coefficients")
.select("numClasses", "numFeatures", "intercept", "coefficients")
.head()
val coefficientMatrix =
new DenseMatrix(1, coefficients.size, coefficients.toArray, isTransposed = true)
val interceptVector = Vectors.dense(intercept)
new LogisticRegressionModel(metadata.uid, coefficientMatrix,
interceptVector, numClasses, isMultinomial = false)
} else {
// 2.1+
val Row(numClasses: Int, numFeatures: Int, interceptVector: Vector,
coefficientMatrix: Matrix, isMultinomial: Boolean) = data
.select("numClasses", "numFeatures", "interceptVector", "coefficientMatrix",
"isMultinomial").head()
new LogisticRegressionModel(metadata.uid, coefficientMatrix, interceptVector,
numClasses, isMultinomial)
}
DefaultParamsReader.getAndSetParams(model, metadata)
model
}
}
}
/**
* MultiClassSummarizer computes the number of distinct labels and corresponding counts,
* and validates the data to see if the labels used for k class multi-label classification
* are in the range of {0, 1, ..., k - 1} in an online fashion.
*
* Two MultilabelSummarizer can be merged together to have a statistical summary of the
* corresponding joint dataset.
*/
private[classification] class MultiClassSummarizer extends Serializable {
// The first element of value in distinctMap is the actually number of instances,
// and the second element of value is sum of the weights.
private val distinctMap = new mutable.HashMap[Int, (Long, Double)]
private var totalInvalidCnt: Long = 0L
/**
* Add a new label into this MultilabelSummarizer, and update the distinct map.
*
* @param label The label for this data point.
* @param weight The weight of this instances.
* @return This MultilabelSummarizer
*/
def add(label: Double, weight: Double = 1.0): this.type = {
require(weight >= 0.0, s"instance weight, $weight has to be >= 0.0")
if (weight == 0.0) return this
if (label - label.toInt != 0.0 || label < 0) {
totalInvalidCnt += 1
this
}
else {
val (counts: Long, weightSum: Double) = distinctMap.getOrElse(label.toInt, (0L, 0.0))
distinctMap.put(label.toInt, (counts + 1L, weightSum + weight))
this
}
}
/**
* Merge another MultilabelSummarizer, and update the distinct map.
* (Note that it will merge the smaller distinct map into the larger one using in-place
* merging, so either `this` or `other` object will be modified and returned.)
*
* @param other The other MultilabelSummarizer to be merged.
* @return Merged MultilabelSummarizer object.
*/
def merge(other: MultiClassSummarizer): MultiClassSummarizer = {
val (largeMap, smallMap) = if (this.distinctMap.size > other.distinctMap.size) {
(this, other)
} else {
(other, this)
}
smallMap.distinctMap.foreach {
case (key, value) =>
val (counts: Long, weightSum: Double) = largeMap.distinctMap.getOrElse(key, (0L, 0.0))
largeMap.distinctMap.put(key, (counts + value._1, weightSum + value._2))
}
largeMap.totalInvalidCnt += smallMap.totalInvalidCnt
largeMap
}
/** @return The total invalid input counts. */
def countInvalid: Long = totalInvalidCnt
/** @return The number of distinct labels in the input dataset. */
def numClasses: Int = if (distinctMap.isEmpty) 0 else distinctMap.keySet.max + 1
/** @return The weightSum of each label in the input dataset. */
def histogram: Array[Double] = {
val result = Array.ofDim[Double](numClasses)
var i = 0
val len = result.length
while (i < len) {
result(i) = distinctMap.getOrElse(i, (0L, 0.0))._2
i += 1
}
result
}
}
/**
* Abstraction for multinomial Logistic Regression Training results.
* Currently, the training summary ignores the training weights except
* for the objective trace.
*/
sealed trait LogisticRegressionTrainingSummary extends LogisticRegressionSummary {
/** objective function (scaled loss + regularization) at each iteration. */
def objectiveHistory: Array[Double]
/** Number of training iterations until termination */
def totalIterations: Int = objectiveHistory.length
}
/**
* Abstraction for Logistic Regression Results for a given model.
*/
sealed trait LogisticRegressionSummary extends Serializable {
/**
* Dataframe output by the model's `transform` method.
*/
def predictions: DataFrame
/** Field in "predictions" which gives the probability of each class as a vector. */
def probabilityCol: String
/** Field in "predictions" which gives the true label of each instance (if available). */
def labelCol: String
/** Field in "predictions" which gives the features of each instance as a vector. */
def featuresCol: String
}
/**
* :: Experimental ::
* Logistic regression training results.
*
* @param predictions dataframe output by the model's `transform` method.
* @param probabilityCol field in "predictions" which gives the probability of
* each class as a vector.
* @param labelCol field in "predictions" which gives the true label of each instance.
* @param featuresCol field in "predictions" which gives the features of each instance as a vector.
* @param objectiveHistory objective function (scaled loss + regularization) at each iteration.
*/
@Experimental
@Since("1.5.0")
class BinaryLogisticRegressionTrainingSummary private[classification] (
predictions: DataFrame,
probabilityCol: String,
labelCol: String,
featuresCol: String,
@Since("1.5.0") val objectiveHistory: Array[Double])
extends BinaryLogisticRegressionSummary(predictions, probabilityCol, labelCol, featuresCol)
with LogisticRegressionTrainingSummary {
}
/**
* :: Experimental ::
* Binary Logistic regression results for a given model.
*
* @param predictions dataframe output by the model's `transform` method.
* @param probabilityCol field in "predictions" which gives the probability of
* each class as a vector.
* @param labelCol field in "predictions" which gives the true label of each instance.
* @param featuresCol field in "predictions" which gives the features of each instance as a vector.
*/
@Experimental
@Since("1.5.0")
class BinaryLogisticRegressionSummary private[classification] (
@Since("1.5.0") @transient override val predictions: DataFrame,
@Since("1.5.0") override val probabilityCol: String,
@Since("1.5.0") override val labelCol: String,
@Since("1.6.0") override val featuresCol: String) extends LogisticRegressionSummary {
private val sparkSession = predictions.sparkSession
import sparkSession.implicits._
/**
* Returns a BinaryClassificationMetrics object.
*/
// TODO: Allow the user to vary the number of bins using a setBins method in
// BinaryClassificationMetrics. For now the default is set to 100.
@transient private val binaryMetrics = new BinaryClassificationMetrics(
predictions.select(col(probabilityCol), col(labelCol).cast(DoubleType)).rdd.map {
case Row(score: Vector, label: Double) => (score(1), label)
}, 100
)
/**
* Returns the receiver operating characteristic (ROC) curve,
* which is a Dataframe having two fields (FPR, TPR)
* with (0.0, 0.0) prepended and (1.0, 1.0) appended to it.
* See http://en.wikipedia.org/wiki/Receiver_operating_characteristic
*
* @note This ignores instance weights (setting all to 1.0) from `LogisticRegression.weightCol`.
* This will change in later Spark versions.
*/
@Since("1.5.0")
@transient lazy val roc: DataFrame = binaryMetrics.roc().toDF("FPR", "TPR")
/**
* Computes the area under the receiver operating characteristic (ROC) curve.
*
* @note This ignores instance weights (setting all to 1.0) from `LogisticRegression.weightCol`.
* This will change in later Spark versions.
*/
@Since("1.5.0")
lazy val areaUnderROC: Double = binaryMetrics.areaUnderROC()
/**
* Returns the precision-recall curve, which is a Dataframe containing
* two fields recall, precision with (0.0, 1.0) prepended to it.
*
* @note This ignores instance weights (setting all to 1.0) from `LogisticRegression.weightCol`.
* This will change in later Spark versions.
*/
@Since("1.5.0")
@transient lazy val pr: DataFrame = binaryMetrics.pr().toDF("recall", "precision")
/**
* Returns a dataframe with two fields (threshold, F-Measure) curve with beta = 1.0.
*
* @note This ignores instance weights (setting all to 1.0) from `LogisticRegression.weightCol`.
* This will change in later Spark versions.
*/
@Since("1.5.0")
@transient lazy val fMeasureByThreshold: DataFrame = {
binaryMetrics.fMeasureByThreshold().toDF("threshold", "F-Measure")
}
/**
* Returns a dataframe with two fields (threshold, precision) curve.
* Every possible probability obtained in transforming the dataset are used
* as thresholds used in calculating the precision.
*
* @note This ignores instance weights (setting all to 1.0) from `LogisticRegression.weightCol`.
* This will change in later Spark versions.
*/
@Since("1.5.0")
@transient lazy val precisionByThreshold: DataFrame = {
binaryMetrics.precisionByThreshold().toDF("threshold", "precision")
}
/**
* Returns a dataframe with two fields (threshold, recall) curve.
* Every possible probability obtained in transforming the dataset are used
* as thresholds used in calculating the recall.
*
* @note This ignores instance weights (setting all to 1.0) from `LogisticRegression.weightCol`.
* This will change in later Spark versions.
*/
@Since("1.5.0")
@transient lazy val recallByThreshold: DataFrame = {
binaryMetrics.recallByThreshold().toDF("threshold", "recall")
}
}
/**
* LogisticAggregator computes the gradient and loss for binary or multinomial logistic (softmax)
* loss function, as used in classification for instances in sparse or dense vector in an online
* fashion.
*
* Two LogisticAggregators can be merged together to have a summary of loss and gradient of
* the corresponding joint dataset.
*
* For improving the convergence rate during the optimization process and also to prevent against
* features with very large variances exerting an overly large influence during model training,
* packages like R's GLMNET perform the scaling to unit variance and remove the mean in order to
* reduce the condition number. The model is then trained in this scaled space, but returns the
* coefficients in the original scale. See page 9 in
* http://cran.r-project.org/web/packages/glmnet/glmnet.pdf
*
* However, we don't want to apply the [[org.apache.spark.ml.feature.StandardScaler]] on the
* training dataset, and then cache the standardized dataset since it will create a lot of overhead.
* As a result, we perform the scaling implicitly when we compute the objective function (though
* we do not subtract the mean).
*
* Note that there is a difference between multinomial (softmax) and binary loss. The binary case
* uses one outcome class as a "pivot" and regresses the other class against the pivot. In the
* multinomial case, the softmax loss function is used to model each class probability
* independently. Using softmax loss produces `K` sets of coefficients, while using a pivot class
* produces `K - 1` sets of coefficients (a single coefficient vector in the binary case). In the
* binary case, we can say that the coefficients are shared between the positive and negative
* classes. When regularization is applied, multinomial (softmax) loss will produce a result
* different from binary loss since the positive and negative don't share the coefficients while the
* binary regression shares the coefficients between positive and negative.
*
* The following is a mathematical derivation for the multinomial (softmax) loss.
*
* The probability of the multinomial outcome $y$ taking on any of the K possible outcomes is:
*
* <blockquote>
* $$
* P(y_i=0|\\vec{x}_i, \\beta) = \\frac{e^{\\vec{x}_i^T \\vec{\\beta}_0}}{\\sum_{k=0}^{K-1}
* e^{\\vec{x}_i^T \\vec{\\beta}_k}} \\\\
* P(y_i=1|\\vec{x}_i, \\beta) = \\frac{e^{\\vec{x}_i^T \\vec{\\beta}_1}}{\\sum_{k=0}^{K-1}
* e^{\\vec{x}_i^T \\vec{\\beta}_k}}\\\\
* P(y_i=K-1|\\vec{x}_i, \\beta) = \\frac{e^{\\vec{x}_i^T \\vec{\\beta}_{K-1}}\\,}{\\sum_{k=0}^{K-1}
* e^{\\vec{x}_i^T \\vec{\\beta}_k}}
* $$
* </blockquote>
*
* The model coefficients $\\beta = (\\beta_0, \\beta_1, \\beta_2, ..., \\beta_{K-1})$ become a matrix
* which has dimension of $K \\times (N+1)$ if the intercepts are added. If the intercepts are not
* added, the dimension will be $K \\times N$.
*
* Note that the coefficients in the model above lack identifiability. That is, any constant scalar
* can be added to all of the coefficients and the probabilities remain the same.
*
* <blockquote>
* $$
* \\begin{align}
* \\frac{e^{\\vec{x}_i^T \\left(\\vec{\\beta}_0 + \\vec{c}\\right)}}{\\sum_{k=0}^{K-1}
* e^{\\vec{x}_i^T \\left(\\vec{\\beta}_k + \\vec{c}\\right)}}
* = \\frac{e^{\\vec{x}_i^T \\vec{\\beta}_0}e^{\\vec{x}_i^T \\vec{c}}\\,}{e^{\\vec{x}_i^T \\vec{c}}
* \\sum_{k=0}^{K-1} e^{\\vec{x}_i^T \\vec{\\beta}_k}}
* = \\frac{e^{\\vec{x}_i^T \\vec{\\beta}_0}}{\\sum_{k=0}^{K-1} e^{\\vec{x}_i^T \\vec{\\beta}_k}}
* \\end{align}
* $$
* </blockquote>
*
* However, when regularization is added to the loss function, the coefficients are indeed
* identifiable because there is only one set of coefficients which minimizes the regularization
* term. When no regularization is applied, we choose the coefficients with the minimum L2
* penalty for consistency and reproducibility. For further discussion see:
*
* Friedman, et al. "Regularization Paths for Generalized Linear Models via Coordinate Descent"
*
* The loss of objective function for a single instance of data (we do not include the
* regularization term here for simplicity) can be written as
*
* <blockquote>
* $$
* \\begin{align}
* \\ell\\left(\\beta, x_i\\right) &= -log{P\\left(y_i \\middle| \\vec{x}_i, \\beta\\right)} \\\\
* &= log\\left(\\sum_{k=0}^{K-1}e^{\\vec{x}_i^T \\vec{\\beta}_k}\\right) - \\vec{x}_i^T \\vec{\\beta}_y\\\\
* &= log\\left(\\sum_{k=0}^{K-1} e^{margins_k}\\right) - margins_y
* \\end{align}
* $$
* </blockquote>
*
* where ${margins}_k = \\vec{x}_i^T \\vec{\\beta}_k$.
*
* For optimization, we have to calculate the first derivative of the loss function, and a simple
* calculation shows that
*
* <blockquote>
* $$
* \\begin{align}
* \\frac{\\partial \\ell(\\beta, \\vec{x}_i, w_i)}{\\partial \\beta_{j, k}}
* &= x_{i,j} \\cdot w_i \\cdot \\left(\\frac{e^{\\vec{x}_i \\cdot \\vec{\\beta}_k}}{\\sum_{k'=0}^{K-1}
* e^{\\vec{x}_i \\cdot \\vec{\\beta}_{k'}}\\,} - I_{y=k}\\right) \\\\
* &= x_{i, j} \\cdot w_i \\cdot multiplier_k
* \\end{align}
* $$
* </blockquote>
*
* where $w_i$ is the sample weight, $I_{y=k}$ is an indicator function
*
* <blockquote>
* $$
* I_{y=k} = \\begin{cases}
* 1 & y = k \\\\
* 0 & else
* \\end{cases}
* $$
* </blockquote>
*
* and
*
* <blockquote>
* $$
* multiplier_k = \\left(\\frac{e^{\\vec{x}_i \\cdot \\vec{\\beta}_k}}{\\sum_{k=0}^{K-1}
* e^{\\vec{x}_i \\cdot \\vec{\\beta}_k}} - I_{y=k}\\right)
* $$
* </blockquote>
*
* If any of margins is larger than 709.78, the numerical computation of multiplier and loss
* function will suffer from arithmetic overflow. This issue occurs when there are outliers in
* data which are far away from the hyperplane, and this will cause the failing of training once
* infinity is introduced. Note that this is only a concern when max(margins) > 0.
*
* Fortunately, when max(margins) = maxMargin > 0, the loss function and the multiplier can
* easily be rewritten into the following equivalent numerically stable formula.
*
* <blockquote>
* $$
* \\ell\\left(\\beta, x\\right) = log\\left(\\sum_{k=0}^{K-1} e^{margins_k - maxMargin}\\right) -
* margins_{y} + maxMargin
* $$
* </blockquote>
*
* Note that each term, $(margins_k - maxMargin)$ in the exponential is no greater than zero; as a
* result, overflow will not happen with this formula.
*
* For $multiplier$, a similar trick can be applied as the following,
*
* <blockquote>
* $$
* multiplier_k = \\left(\\frac{e^{\\vec{x}_i \\cdot \\vec{\\beta}_k - maxMargin}}{\\sum_{k'=0}^{K-1}
* e^{\\vec{x}_i \\cdot \\vec{\\beta}_{k'} - maxMargin}} - I_{y=k}\\right)
* $$
* </blockquote>
*
* @param bcCoefficients The broadcast coefficients corresponding to the features.
* @param bcFeaturesStd The broadcast standard deviation values of the features.
* @param numClasses the number of possible outcomes for k classes classification problem in
* Multinomial Logistic Regression.
* @param fitIntercept Whether to fit an intercept term.
* @param multinomial Whether to use multinomial (softmax) or binary loss
*
* @note In order to avoid unnecessary computation during calculation of the gradient updates
* we lay out the coefficients in column major order during training. This allows us to
* perform feature standardization once, while still retaining sequential memory access
* for speed. We convert back to row major order when we create the model,
* since this form is optimal for the matrix operations used for prediction.
*/
private class LogisticAggregator(
bcCoefficients: Broadcast[Vector],
bcFeaturesStd: Broadcast[Array[Double]],
numClasses: Int,
fitIntercept: Boolean,
multinomial: Boolean) extends Serializable with Logging {
private val numFeatures = bcFeaturesStd.value.length
private val numFeaturesPlusIntercept = if (fitIntercept) numFeatures + 1 else numFeatures
private val coefficientSize = bcCoefficients.value.size
private val numCoefficientSets = if (multinomial) numClasses else 1
if (multinomial) {
require(numClasses == coefficientSize / numFeaturesPlusIntercept, s"The number of " +
s"coefficients should be ${numClasses * numFeaturesPlusIntercept} but was $coefficientSize")
} else {
require(coefficientSize == numFeaturesPlusIntercept, s"Expected $numFeaturesPlusIntercept " +
s"coefficients but got $coefficientSize")
require(numClasses == 1 || numClasses == 2, s"Binary logistic aggregator requires numClasses " +
s"in {1, 2} but found $numClasses.")
}
private var weightSum = 0.0
private var lossSum = 0.0
@transient private lazy val coefficientsArray: Array[Double] = bcCoefficients.value match {
case DenseVector(values) => values
case _ => throw new IllegalArgumentException(s"coefficients only supports dense vector but " +
s"got type ${bcCoefficients.value.getClass}.)")
}
private lazy val gradientSumArray = new Array[Double](coefficientSize)
if (multinomial && numClasses <= 2) {
logInfo(s"Multinomial logistic regression for binary classification yields separate " +
s"coefficients for positive and negative classes. When no regularization is applied, the" +
s"result will be effectively the same as binary logistic regression. When regularization" +
s"is applied, multinomial loss will produce a result different from binary loss.")
}
/** Update gradient and loss using binary loss function. */
private def binaryUpdateInPlace(
features: Vector,
weight: Double,
label: Double): Unit = {
val localFeaturesStd = bcFeaturesStd.value
val localCoefficients = coefficientsArray
val localGradientArray = gradientSumArray
val margin = - {
var sum = 0.0
features.foreachActive { (index, value) =>
if (localFeaturesStd(index) != 0.0 && value != 0.0) {
sum += localCoefficients(index) * value / localFeaturesStd(index)
}
}
if (fitIntercept) sum += localCoefficients(numFeaturesPlusIntercept - 1)
sum
}
val multiplier = weight * (1.0 / (1.0 + math.exp(margin)) - label)
features.foreachActive { (index, value) =>
if (localFeaturesStd(index) != 0.0 && value != 0.0) {
localGradientArray(index) += multiplier * value / localFeaturesStd(index)
}
}
if (fitIntercept) {
localGradientArray(numFeaturesPlusIntercept - 1) += multiplier
}
if (label > 0) {
// The following is equivalent to log(1 + exp(margin)) but more numerically stable.
lossSum += weight * MLUtils.log1pExp(margin)
} else {
lossSum += weight * (MLUtils.log1pExp(margin) - margin)
}
}
/** Update gradient and loss using multinomial (softmax) loss function. */
private def multinomialUpdateInPlace(
features: Vector,
weight: Double,
label: Double): Unit = {
// TODO: use level 2 BLAS operations
/*
Note: this can still be used when numClasses = 2 for binary
logistic regression without pivoting.
*/
val localFeaturesStd = bcFeaturesStd.value
val localCoefficients = coefficientsArray
val localGradientArray = gradientSumArray
// marginOfLabel is margins(label) in the formula
var marginOfLabel = 0.0
var maxMargin = Double.NegativeInfinity
val margins = new Array[Double](numClasses)
features.foreachActive { (index, value) =>
val stdValue = value / localFeaturesStd(index)
var j = 0
while (j < numClasses) {
margins(j) += localCoefficients(index * numClasses + j) * stdValue
j += 1
}
}
var i = 0
while (i < numClasses) {
if (fitIntercept) {
margins(i) += localCoefficients(numClasses * numFeatures + i)
}
if (i == label.toInt) marginOfLabel = margins(i)
if (margins(i) > maxMargin) {
maxMargin = margins(i)
}
i += 1
}
/**
* When maxMargin is greater than 0, the original formula could cause overflow.
* We address this by subtracting maxMargin from all the margins, so it's guaranteed
* that all of the new margins will be smaller than zero to prevent arithmetic overflow.
*/
val multipliers = new Array[Double](numClasses)
val sum = {
var temp = 0.0
var i = 0
while (i < numClasses) {
if (maxMargin > 0) margins(i) -= maxMargin
val exp = math.exp(margins(i))
temp += exp
multipliers(i) = exp
i += 1
}
temp
}
margins.indices.foreach { i =>
multipliers(i) = multipliers(i) / sum - (if (label == i) 1.0 else 0.0)
}
features.foreachActive { (index, value) =>
if (localFeaturesStd(index) != 0.0 && value != 0.0) {
val stdValue = value / localFeaturesStd(index)
var j = 0
while (j < numClasses) {
localGradientArray(index * numClasses + j) +=
weight * multipliers(j) * stdValue
j += 1
}
}
}
if (fitIntercept) {
var i = 0
while (i < numClasses) {
localGradientArray(numFeatures * numClasses + i) += weight * multipliers(i)
i += 1
}
}
val loss = if (maxMargin > 0) {
math.log(sum) - marginOfLabel + maxMargin
} else {
math.log(sum) - marginOfLabel
}
lossSum += weight * loss
}
/**
* Add a new training instance to this LogisticAggregator, and update the loss and gradient
* of the objective function.
*
* @param instance The instance of data point to be added.
* @return This LogisticAggregator object.
*/
def add(instance: Instance): this.type = {
instance match { case Instance(label, weight, features) =>
if (weight == 0.0) return this
if (multinomial) {
multinomialUpdateInPlace(features, weight, label)
} else {
binaryUpdateInPlace(features, weight, label)
}
weightSum += weight
this
}
}
/**
* Merge another LogisticAggregator, and update the loss and gradient
* of the objective function.
* (Note that it's in place merging; as a result, `this` object will be modified.)
*
* @param other The other LogisticAggregator to be merged.
* @return This LogisticAggregator object.
*/
def merge(other: LogisticAggregator): this.type = {
if (other.weightSum != 0.0) {
weightSum += other.weightSum
lossSum += other.lossSum
var i = 0
val localThisGradientSumArray = this.gradientSumArray
val localOtherGradientSumArray = other.gradientSumArray
val len = localThisGradientSumArray.length
while (i < len) {
localThisGradientSumArray(i) += localOtherGradientSumArray(i)
i += 1
}
}
this
}
def loss: Double = {
require(weightSum > 0.0, s"The effective number of instances should be " +
s"greater than 0.0, but $weightSum.")
lossSum / weightSum
}
def gradient: Matrix = {
require(weightSum > 0.0, s"The effective number of instances should be " +
s"greater than 0.0, but $weightSum.")
val result = Vectors.dense(gradientSumArray.clone())
scal(1.0 / weightSum, result)
new DenseMatrix(numCoefficientSets, numFeaturesPlusIntercept, result.toArray)
}
}
/**
* LogisticCostFun implements Breeze's DiffFunction[T] for a multinomial (softmax) logistic loss
* function, as used in multi-class classification (it is also used in binary logistic regression).
* It returns the loss and gradient with L2 regularization at a particular point (coefficients).
* It's used in Breeze's convex optimization routines.
*/
private class LogisticCostFun(
instances: RDD[Instance],
numClasses: Int,
fitIntercept: Boolean,
standardization: Boolean,
bcFeaturesStd: Broadcast[Array[Double]],
regParamL2: Double,
multinomial: Boolean,
aggregationDepth: Int) extends DiffFunction[BDV[Double]] {
override def calculate(coefficients: BDV[Double]): (Double, BDV[Double]) = {
val coeffs = Vectors.fromBreeze(coefficients)
val bcCoeffs = instances.context.broadcast(coeffs)
val featuresStd = bcFeaturesStd.value
val numFeatures = featuresStd.length
val numCoefficientSets = if (multinomial) numClasses else 1
val numFeaturesPlusIntercept = if (fitIntercept) numFeatures + 1 else numFeatures
val logisticAggregator = {
val seqOp = (c: LogisticAggregator, instance: Instance) => c.add(instance)
val combOp = (c1: LogisticAggregator, c2: LogisticAggregator) => c1.merge(c2)
instances.treeAggregate(
new LogisticAggregator(bcCoeffs, bcFeaturesStd, numClasses, fitIntercept,
multinomial)
)(seqOp, combOp, aggregationDepth)
}
val totalGradientMatrix = logisticAggregator.gradient
val coefMatrix = new DenseMatrix(numCoefficientSets, numFeaturesPlusIntercept, coeffs.toArray)
// regVal is the sum of coefficients squares excluding intercept for L2 regularization.
val regVal = if (regParamL2 == 0.0) {
0.0
} else {
var sum = 0.0
coefMatrix.foreachActive { case (classIndex, featureIndex, value) =>
// We do not apply regularization to the intercepts
val isIntercept = fitIntercept && (featureIndex == numFeatures)
if (!isIntercept) {
// The following code will compute the loss of the regularization; also
// the gradient of the regularization, and add back to totalGradientArray.
sum += {
if (standardization) {
val gradValue = totalGradientMatrix(classIndex, featureIndex)
totalGradientMatrix.update(classIndex, featureIndex, gradValue + regParamL2 * value)
value * value
} else {
if (featuresStd(featureIndex) != 0.0) {
// If `standardization` is false, we still standardize the data
// to improve the rate of convergence; as a result, we have to
// perform this reverse standardization by penalizing each component
// differently to get effectively the same objective function when
// the training dataset is not standardized.
val temp = value / (featuresStd(featureIndex) * featuresStd(featureIndex))
val gradValue = totalGradientMatrix(classIndex, featureIndex)
totalGradientMatrix.update(classIndex, featureIndex, gradValue + regParamL2 * temp)
value * temp
} else {
0.0
}
}
}
}
}
0.5 * regParamL2 * sum
}
bcCoeffs.destroy(blocking = false)
(logisticAggregator.loss + regVal, new BDV(totalGradientMatrix.toArray))
}
}
|
aokolnychyi/spark
|
mllib/src/main/scala/org/apache/spark/ml/classification/LogisticRegression.scala
|
Scala
|
apache-2.0
| 76,712 |
package monocle
import scalaz.{Applicative, Choice, Maybe, Monoid, \\/}
import scalaz.syntax.std.option._
/**
* A [[POptional]] can be seen as a pair of functions:
* - `getOrModify: S => T \\/ A`
* - `set : (B, S) => T`
*
* A [[POptional]] could also be defined as a weaker [[PLens]] and
* weaker [[PPrism]]
*
* [[POptional]] stands for Polymorphic Optional as it set and modify methods change
* a type `A` to `B` and `S` to `T`.
* [[Optional]] is a type alias for [[POptional]] restricted to monomorphic updates:
* {{{
* type Optional[S, A] = POptional[S, S, A, A]
* }}}
*
* @see [[monocle.law.OptionalLaws]]
*
* @tparam S the source of a [[POptional]]
* @tparam T the modified source of a [[POptional]]
* @tparam A the target of a [[POptional]]
* @tparam B the modified target of a [[POptional]]
*/
abstract class POptional[S, T, A, B] extends Serializable { self =>
/** get the target of a [[POptional]] or modify the source in case there is no target */
def getOrModify(s: S): T \\/ A
/** get the modified source of a [[POptional]] */
def set(b: B): S => T
/** get the target of a [[POptional]] or nothing if there is no target */
def getOption(s: S): Option[A]
/** modify polymorphically the target of a [[POptional]] with an Applicative function */
def modifyF[F[_]: Applicative](f: A => F[B])(s: S): F[T]
/** modify polymorphically the target of a [[POptional]] with a function */
def modify(f: A => B): S => T
/**
* modify polymorphically the target of a [[POptional]] with a function.
* return empty if the [[POptional]] is not matching
*/
@inline final def modifyOption(f: A => B): S => Option[T] =
s => getOption(s).map(a => set(f(a))(s))
/**
* set polymorphically the target of a [[POptional]] with a value.
* return empty if the [[POptional]] is not matching
*/
@inline final def setOption(b: B): S => Option[T] =
modifyOption(_ => b)
/** check if a [[POptional]] has a target */
@inline final def isMatching(s: S): Boolean =
getOption(s).isDefined
/** join two [[POptional]] with the same target */
@inline final def sum[S1, T1](other: POptional[S1, T1, A, B]): POptional[S \\/ S1, T \\/ T1, A, B] =
POptional[S \\/ S1, T \\/ T1, A, B](_.fold(self.getOrModify(_).leftMap(\\/.left), other.getOrModify(_).leftMap(\\/.right))){
b => _.bimap(self.set(b), other.set(b))
}
@inline final def first[C]: POptional[(S, C), (T, C), (A, C), (B, C)] =
POptional[(S, C), (T, C), (A, C), (B, C)]{
case (s, c) => getOrModify(s).bimap(_ -> c, _ -> c)
}{ case (b, c) => {
case (s, _) => (set(b)(s), c)
}
}
@inline final def second[C]: POptional[(C, S), (C, T), (C, A), (C, B)] =
POptional[(C, S), (C, T), (C, A), (C, B)]{
case (c, s) => getOrModify(s).bimap(c -> _, c -> _)
}{ case (c, b) => {
case (_, s) => (c, set(b)(s))
}
}
@deprecated("use getOption", since = "1.1.0")
@inline final def getMaybe(s: S): Maybe[A] =
getOption(s).toMaybe
@deprecated("use modifyOption", since = "1.1.0")
@inline final def modifyMaybe(f: A => B): S => Maybe[T] =
s => modifyOption(f)(s).toMaybe
@deprecated("use setOption", since = "1.1.0")
@inline final def setMaybe(b: B): S => Maybe[T] =
s => setOption(b)(s).toMaybe
/***************************************************************/
/** Compose methods between a [[POptional]] and another Optics */
/***************************************************************/
/** compose a [[POptional]] with a [[Fold]] */
@inline final def composeFold[C](other: Fold[A, C]): Fold[S, C] =
asFold composeFold other
/** compose a [[POptional]] with a [[Getter]] */
@inline final def composeGetter[C](other: Getter[A, C]): Fold[S, C] =
asFold composeGetter other
/** compose a [[POptional]] with a [[PSetter]] */
@inline final def composeSetter[C, D](other: PSetter[A, B, C, D]): PSetter[S, T, C, D] =
asSetter composeSetter other
/** compose a [[POptional]] with a [[PTraversal]] */
@inline final def composeTraversal[C, D](other: PTraversal[A, B, C, D]): PTraversal[S, T, C, D] =
asTraversal composeTraversal other
/** compose a [[POptional]] with a [[POptional]] */
@inline final def composeOptional[C, D](other: POptional[A, B, C, D]): POptional[S, T, C, D] =
new POptional[S, T, C, D]{
def getOrModify(s: S): T \\/ C =
self.getOrModify(s).flatMap(a => other.getOrModify(a).bimap(self.set(_)(s), identity))
def set(d: D): S => T =
self.modify(other.set(d))
def getOption(s: S): Option[C] =
self.getOption(s) flatMap other.getOption
def modifyF[F[_]: Applicative](f: C => F[D])(s: S): F[T] =
self.modifyF(other.modifyF(f))(s)
def modify(f: C => D): S => T =
self.modify(other.modify(f))
}
/** compose a [[POptional]] with a [[PPrism]] */
@inline final def composePrism[C, D](other: PPrism[A, B, C, D]): POptional[S, T, C, D] =
composeOptional(other.asOptional)
/** compose a [[POptional]] with a [[PLens]] */
@inline final def composeLens[C, D](other: PLens[A, B, C, D]): POptional[S, T, C, D] =
composeOptional(other.asOptional)
/** compose a [[POptional]] with a [[PIso]] */
@inline final def composeIso[C, D](other: PIso[A, B, C, D]): POptional[S, T, C, D] =
composeOptional(other.asOptional)
/********************************************/
/** Experimental aliases of compose methods */
/********************************************/
/** alias to composeTraversal */
@inline final def ^|->>[C, D](other: PTraversal[A, B, C, D]): PTraversal[S, T, C, D] =
composeTraversal(other)
/** alias to composeOptional */
@inline final def ^|-?[C, D](other: POptional[A, B, C, D]): POptional[S, T, C, D] =
composeOptional(other)
/** alias to composePrism */
@inline final def ^<-?[C, D](other: PPrism[A, B, C, D]): POptional[S, T, C, D] =
composePrism(other)
/** alias to composeLens */
@inline final def ^|->[C, D](other: PLens[A, B, C, D]): POptional[S, T, C, D] =
composeLens(other)
/** alias to composeIso */
@inline final def ^<->[C, D](other: PIso[A, B, C, D]): POptional[S, T, C, D] =
composeIso(other)
/*********************************************************************/
/** Transformation methods to view a [[POptional]] as another Optics */
/*********************************************************************/
/** view a [[POptional]] as a [[Fold]] */
@inline final def asFold: Fold[S, A] = new Fold[S, A]{
def foldMap[M: Monoid](f: A => M)(s: S): M =
self.getOption(s) map f getOrElse Monoid[M].zero
}
/** view a [[POptional]] as a [[PSetter]] */
@inline final def asSetter: PSetter[S, T, A, B] =
new PSetter[S, T, A, B]{
def modify(f: A => B): S => T =
self.modify(f)
def set(b: B): S => T =
self.set(b)
}
/** view a [[POptional]] as a [[PTraversal]] */
@inline final def asTraversal: PTraversal[S, T, A, B] = new PTraversal[S, T, A, B] {
def modifyF[F[_]: Applicative](f: A => F[B])(s: S): F[T] =
self.modifyF(f)(s)
}
}
object POptional extends OptionalInstances {
def id[S, T]: POptional[S, T, S, T] =
PIso.id[S, T].asOptional
def codiagonal[S, T]: POptional[S \\/ S, T \\/ T, S, T] =
POptional[S \\/ S, T \\/ T, S, T](
_.fold(\\/.right, \\/.right)
)(t => _.bimap(_ => t, _ => t))
/** create a [[POptional]] using the canonical functions: getOrModify and set */
def apply[S, T, A, B](_getOrModify: S => T \\/ A)(_set: B => S => T): POptional[S, T, A, B] =
new POptional[S, T, A, B]{
def getOrModify(s: S): T \\/ A =
_getOrModify(s)
def set(b: B): S => T =
_set(b)
def getOption(s: S): Option[A] =
_getOrModify(s).toOption
def modifyF[F[_]: Applicative](f: A => F[B])(s: S): F[T] =
_getOrModify(s).fold(
t => Applicative[F].point(t),
a => Applicative[F].map(f(a))(_set(_)(s))
)
def modify(f: A => B): S => T =
s => _getOrModify(s).fold(identity, a => _set(f(a))(s))
}
}
object Optional {
def id[A]: Optional[A, A] =
Iso.id[A].asOptional
def codiagonal[S]: Optional[S \\/ S, S] =
POptional.codiagonal
/** [[Optional]] that points to nothing */
def void[S, A]: Optional[S, A] =
Optional[S, A](_ => None)(_ => identity)
/** alias for [[POptional]] apply restricted to monomorphic update */
def apply[S, A](_getOption: S => Option[A])(_set: A => S => S): Optional[S, A] =
new Optional[S, A]{
def getOrModify(s: S): S \\/ A =
_getOption(s).fold[S \\/ A](\\/.left(s))(\\/.right)
def set(a: A): S => S =
_set(a)
def getOption(s: S): Option[A] =
_getOption(s)
def modifyF[F[_]: Applicative](f: A => F[A])(s: S): F[S] =
_getOption(s).fold(
Applicative[F].point(s))(
a => Applicative[F].map(f(a))(_set(_)(s))
)
def modify(f: A => A): S => S =
s => _getOption(s).fold(s)(a => _set(f(a))(s))
}
}
sealed abstract class OptionalInstances {
implicit val optionalChoice: Choice[Optional] = new Choice[Optional] {
def choice[A, B, C](f: => Optional[A, C], g: => Optional[B, C]): Optional[A \\/ B, C] =
f sum g
def id[A]: Optional[A, A] =
Optional.id[A]
def compose[A, B, C](f: Optional[B, C], g: Optional[A, B]): Optional[A, C] =
g composeOptional f
}
}
|
malcolmgreaves/Monocle
|
core/src/main/scala/monocle/Optional.scala
|
Scala
|
mit
| 9,474 |
package sigmastate.utxo
import org.ergoplatform._
import sigmastate._
import sigmastate.Values._
import sigmastate.lang.Terms._
import sigmastate.serialization.OpCodes.OpCode
object ComplexityTable {
val MinimalComplexity = 100
val OpCodeComplexity: Map[OpCode, Int] = Seq(
Fold.opCode -> 4034, // count = 122
MapCollection.opCode -> 2514, // count = 402
BinAnd.opCode -> 2000, // count = 21858
BinOr.opCode -> 2000, // count = 9894
Exists.opCode -> 1997, // count = 4131
Apply.opCode -> 1592, // count = 327
Append.opCode -> 1524, // count = 63
ForAll.opCode -> 1451, // count = 7952
XorOf.opCode -> 1273, // count = 2
GroupGenerator.opCode -> 1212, // count = 10
Filter.opCode -> 849, // count = 1656
ByteArrayToBigInt.opCode -> 727, // count = 9
LastBlockUtxoRootHash.opCode -> 726, // count = 3
ModQ.opCode -> 690, // count = 1
GetVar.opCode -> 687, // count = 1150
Xor.opCode -> 632, // count = 9
Tuple.opCode -> 625, // count = 26
SubstConstants.opCode -> 621, // count = 131
CalcSha256.opCode -> 505, // count = 6
OptionGetOrElse.opCode -> 449, // count = 108
ConcreteCollectionBooleanConstant.opCode -> 428, // count = 3
CalcBlake2b256.opCode -> 381, // count = 609
FuncValue.opCode -> 352, // count = 5687
OptionIsDefined.opCode -> 343, // count = 58
Negation.opCode -> 328, // count = 9
ByteArrayToLong.opCode -> 284, // count = 3
If.opCode -> 284, // count = 3918
AtLeast.opCode -> 281, // count = 7540
ConcreteCollection.opCode -> 279, // count = 7956
BinXor.opCode -> 277, // count = 19
OR.opCode -> 274, // count = 837
Inputs.opCode -> 274, // count = 8961
ModQArithOp.PlusModQ.opCode -> 272, // count = 1
ExtractCreationInfo.opCode -> 266, // count = 430
Exponentiate.opCode -> 253, // count = 7
OptionGet.opCode -> 238, // count = 29116
AND.opCode -> 230, // count = 10153
EQ.opCode -> 227, // count = 33055
ArithOp.Min.opCode -> 227, // count = 30
Outputs.opCode -> 215, // count = 29061
ModQArithOp.MinusModQ.opCode -> 211, // count = 1
LongToByteArray.opCode -> 209, // count = 15
SelectField.opCode -> 205, // count = 1217
ExtractRegisterAs.opCode -> 197, // count = 28059
ArithOp.Modulo.opCode -> 186, // count = 34
ExtractId.opCode -> 186, // count = 66
ArithOp.Max.opCode -> 185, // count = 70
DecodePoint.opCode -> 184, // count = 133
SigmaOr.opCode -> 183, // count = 4666
SigmaAnd.opCode -> 177, // count = 4467
MultiplyGroup.opCode -> 176, // count = 3
ByIndex.opCode -> 174, // count = 32408
ExtractBytes.opCode -> 174, // count = 17
Downcast.opCode -> 168, // count = 79
CreateProveDHTuple.opCode -> 164, // count = 27
SizeOf.opCode -> 151, // count = 4952
Slice.opCode -> 143, // count = 580
Self.opCode -> 117, // count = 18395
ExtractBytesWithNoRef.opCode -> 116, // count = 1129
MinerPubkey.opCode -> 107, // count = 131
GT.opCode -> 101, // count = 7137
ExtractScriptBytes.opCode -> 100, // count = 13780
ArithOp.Plus.opCode -> 99, // count = 12850
LE.opCode -> 96, // count = 3549
NEQ.opCode -> 96, // count = 2079
GE.opCode -> 95, // count = 10941
ArithOp.Minus.opCode -> 94, // count = 18200
ArithOp.Multiply.opCode -> 94, // count = 12955
Upcast.opCode -> 94, // count = 15608
SigmaPropBytes.opCode -> 94, // count = 4135
ArithOp.Division.opCode -> 92, // count = 6809
LT.opCode -> 87, // count = 8715
TrueLeaf.opCode -> 86, // count = 6764
BoolToSigmaProp.opCode -> 84, // count = 11765
FalseLeaf.opCode -> 80, // count = 4825
Height.opCode -> 80, // count = 30856
Constant.opCode -> 80, // count = 251669
SigmaPropIsProven.opCode -> 78, // count = 20566
CreateProveDlog.opCode -> 76, // count = 147
BlockValue.opCode -> 74, // count = 895
ExtractAmount.opCode -> 74, // count = 14650
LogicalNot.opCode -> 56, // count = 1420
Global.opCode -> 7, // count = 3
ValUse.opCode -> 3, // count = 18771
Context.opCode -> 1 // count = 72
).toMap
val MethodCallComplexity: Map[(Byte, Byte), Int] = Seq(
(100.toByte, 13.toByte) -> 3911, // count = 1, AvlTree.update
(36.toByte, 7.toByte) -> 2183, // count = 7, SOption.map
(7.toByte, 2.toByte) -> 2107, // count = 2, GroupElement.getEncoded
(100.toByte, 11.toByte) -> 1960, // count = 9, AvlTree.getMany
(12.toByte, 15.toByte) -> 1853, // count = 820, SCollection.flatMap
(36.toByte, 8.toByte) -> 1719, // count = 7, SOption.filter
(12.toByte, 29.toByte) -> 1588, // count = 19, SCollection.zip
(100.toByte, 12.toByte) -> 1460, // count = 8, AvlTree.insert
(100.toByte, 4.toByte) -> 1343, // count = 5, AvlTree.valueLengthOpt
(100.toByte, 10.toByte) -> 1331, // count = 22, AvlTree.get
(100.toByte, 14.toByte) -> 1229, // count = 5, AvlTree.remove
(105.toByte, 1.toByte) -> 1214, // count = 3, PreHeader.version
(104.toByte, 1.toByte) -> 1157, // count = 3, Header.id
(12.toByte, 21.toByte) -> 966, // count = 5, SCollection.updateMany
(101.toByte, 1.toByte) -> 900, // count = 25, Context.dataInputs
(100.toByte, 9.toByte) -> 809, // count = 13, AvlTree.contains
(12.toByte, 26.toByte) -> 788, // count = 6, SCollection.indexOf
(100.toByte, 7.toByte) -> 694, // count = 1, AvlTree.isRemoveAllowed
(100.toByte, 5.toByte) -> 671, // count = 1, AvlTree.isInsertAllowed
(12.toByte, 19.toByte) -> 557, // count = 5, SCollection.patch
(101.toByte, 8.toByte) -> 510, // count = 1, Context.selfBoxIndex
(100.toByte, 2.toByte) -> 452, // count = 3, AvlTree.enabledOperations
(105.toByte, 7.toByte) -> 451, // count = 3, PreHeader.votes
(105.toByte, 6.toByte) -> 447, // count = 3, PreHeader.minerPk
(7.toByte, 5.toByte) -> 444, // count = 1, GroupElement.negate
(104.toByte, 14.toByte) -> 412, // count = 3, Header.powDistance
(100.toByte, 1.toByte) -> 384, // count = 6, AvlTree.digest
(104.toByte, 13.toByte) -> 378, // count = 3, Header.powNonce
(101.toByte, 3.toByte) -> 357, // count = 17, Context.preHeader
(104.toByte, 7.toByte) -> 343, // count = 3, Header.timestamp
(105.toByte, 3.toByte) -> 339, // count = 3, PreHeader.timestamp
(104.toByte, 10.toByte) -> 335, // count = 3, Header.extensionRoot
(12.toByte, 20.toByte) -> 329, // count = 5, SCollection.updated
(100.toByte, 3.toByte) -> 328, // count = 5, AvlTree.keyLength
(105.toByte, 5.toByte) -> 328, // count = 5, PreHeader.height
(104.toByte, 9.toByte) -> 326, // count = 3, Header.height
(105.toByte, 2.toByte) -> 323, // count = 3, PreHeader.parentId
(105.toByte, 4.toByte) -> 319, // count = 3, PreHeader.nBits
(104.toByte, 8.toByte) -> 317, // count = 3, Header.nBits
(104.toByte, 2.toByte) -> 316, // count = 3, Header.version
(101.toByte, 2.toByte) -> 315, // count = 33, Context.headers
(104.toByte, 4.toByte) -> 313, // count = 3, Header.ADProofsRoot
(104.toByte, 12.toByte) -> 303, // count = 3, Header.powOnetimePk
(104.toByte, 11.toByte) -> 303, // count = 3, Header.minerPk
(104.toByte, 15.toByte) -> 302, // count = 3, Header.votes
(104.toByte, 3.toByte) -> 299, // count = 3, Header.parentId
(104.toByte, 6.toByte) -> 296, // count = 3, Header.transactionsRoot
(101.toByte, 4.toByte) -> 293, // count = 4, Context.INPUTS
(100.toByte, 6.toByte) -> 288, // count = 6, AvlTree.isUpdateAllowed
(104.toByte, 5.toByte) -> 284, // count = 3, Header.stateRoot
(101.toByte, 7.toByte) -> 278, // count = 1, Context.SELF
(101.toByte, 5.toByte) -> 276, // count = 1, Context.OUTPUTS
(99.toByte, 8.toByte) -> 269, // count = 163, Box.tokens
(101.toByte, 10.toByte) -> 249, // count = 2, Context.minerPubKey
(12.toByte, 14.toByte) -> 238, // count = 1725, SCollection.indices
(101.toByte, 9.toByte) -> 182, // count = 2, Context.LastBlockUtxoRootHash
(106.toByte, 1.toByte) -> 169, // count = 3, SigmaDslBuilder.groupGenerator
(101.toByte, 6.toByte) -> 146 // count = 1, Context.HEIGHT
).toMap
}
|
ScorexFoundation/sigmastate-interpreter
|
sigmastate/src/main/scala/sigmastate/utxo/ComplexityTable.scala
|
Scala
|
mit
| 9,377 |
package com.xenopsconsulting.gamedayapi
import java.util.Date
import java.text.SimpleDateFormat
case class ScheduleYear(year: Int) {
val _scheduleLines: List[String] = readGamesFromFile()
def games() = {
_scheduleLines map(ScheduleGame)
}
def openingDay() = {
games().head.date()
}
def finalDay() = {
games().last.date()
}
def gamesForDay(date: Date) = {
(_scheduleLines filter dayMatches(new SimpleDateFormat("yyyyMMdd").format(date))) map ScheduleGame
}
def gamesForDayAndTeam(date: Date, team: String) = {
(_scheduleLines filter dayMatches(new SimpleDateFormat("yyyyMMdd").format(date))) filter teamMatches(team) map ScheduleGame
}
def teams() = {
_scheduleLines.map(ScheduleGame(_).homeTeam().toLowerCase).distinct
}
private def readGamesFromFile() = {
scala.io.Source.fromInputStream(getClass.getResourceAsStream(scheduleFileName())).getLines().toList
}
private def scheduleFileName() = {
"schedules/" + year.toString + "SKED.TXT"
}
private def dayMatches(dateString: String)(scheduleLine: String) = scheduleLine.contains(dateString)
private def teamMatches(team: String)(scheduleLine: String) = scheduleLine.contains(team.toUpperCase)
}
|
ecopony/scala-gameday-api
|
src/main/scala/com/xenopsconsulting/gamedayapi/ScheduleYear.scala
|
Scala
|
mit
| 1,225 |
package org.vaadin.addons.rinne
import com.vaadin.ui.{Component, CustomComponent}
import org.vaadin.addons.rinne.mixins.ComponentMixin
class VCustomComponent extends CustomComponent with ComponentMixin {
def compositionRoot: Option[Component] = Option(getCompositionRoot)
def compositionRoot_=(component: Component): Unit = {
setCompositionRoot(component)
}
def compositionRoot_=(component: Option[Component]): Unit = {
setCompositionRoot(component.orNull)
}
}
|
LukaszByczynski/rinne
|
src/main/scala/org/vaadin/addons/rinne/VCustomComponent.scala
|
Scala
|
apache-2.0
| 483 |
package pages.vrm_assign
import org.openqa.selenium.WebDriver
import org.scalatest.selenium.WebBrowser.{find, id}
import uk.gov.dvla.vehicles.presentation.common.helpers.webbrowser.{Page, WebDriverFactory}
import views.vrm_assign.PaymentNotAuthorised.{ExitId, TryAgainId}
object PaymentNotAuthorisedPage extends Page {
def address = buildAppUrl("payment-not-authorised")
override lazy val url = WebDriverFactory.testUrl + address.substring(1)
final override val title: String = "Payment Cancelled or Not Authorised"
def tryAgain(implicit driver: WebDriver) = find(id(TryAgainId)).get
def exit(implicit driver: WebDriver) = find(id(ExitId)).get
}
|
dvla/vrm-assign-online
|
test/pages/vrm_assign/PaymentNotAuthorisedPage.scala
|
Scala
|
mit
| 663 |
package com.twitter.finagle.client
import com.twitter.finagle._
import com.twitter.finagle.factory.BindingFactory
import com.twitter.finagle.param.Label
import com.twitter.util._
private[finagle] object EndpointRecorder {
val role = Stack.Role("EndpointRecorder")
/**
* Creates a [[com.twitter.finagle.Stackable]] [[EndpointRecorder]].
*/
def module[Req, Rep]: Stackable[ServiceFactory[Req, Rep]] =
new Stack.ModuleParams[ServiceFactory[Req, Rep]] {
val role: Stack.Role = EndpointRecorder.role
val description: String = "Records endpoints in the endpoint registry"
val parameters = Seq(
implicitly[Stack.Param[Label]],
implicitly[Stack.Param[BindingFactory.BaseDtab]],
implicitly[Stack.Param[BindingFactory.Dest]])
def make(params: Stack.Params, next: ServiceFactory[Req, Rep]): ServiceFactory[Req, Rep] = {
val BindingFactory.Dest(dest) = params[BindingFactory.Dest]
dest match {
case bound: Name.Bound =>
val Label(client) = params[Label]
val BindingFactory.BaseDtab(baseDtab) = params[BindingFactory.BaseDtab]
new EndpointRecorder(
next,
EndpointRegistry.registry,
client, baseDtab() ++ Dtab.local,
bound.idStr, bound.addr)
case _ => next
}
}
}
}
/**
* A [[com.twitter.finagle.ServiceFactoryProxy]] that passes endpoint information
* to the [[EndpointRegistry]]. It manifests as a module below
* [[BindingFactory]] so deregistration can happen when a connection is closed.
*
* @param registry Registry to register endpoints to
* @param client Name of the client
* @param dtab Dtab for this path
* @param path Path of this serverset
* @param endpoints collection of addrs for this serverset
*/
private[finagle] class EndpointRecorder[Req, Rep](
underlying: ServiceFactory[Req, Rep],
registry: EndpointRegistry,
client: String,
dtab: Dtab,
path: String,
endpoints: Var[Addr])
extends ServiceFactoryProxy[Req, Rep](underlying) {
registry.addObservation(client, dtab, path, endpoints)
override def close(deadline: Time): Future[Unit] = {
registry.removeObservation(client, dtab, path)
underlying.close(deadline)
}
}
|
sveinnfannar/finagle
|
finagle-core/src/main/scala/com/twitter/finagle/client/EndpointRecorder.scala
|
Scala
|
apache-2.0
| 2,307 |
package dhg.pos.tagdict
import org.junit.Test
import dhg.util.StringUtil._
import org.junit.Assert._
class TagDictionaryTests {
@Test
def test_SimpleTagDictionary {
val map = Map("a" -> Set(1), "b" -> Set(2), "c" -> Set(3), "d" -> Set(1, 2))
val td = SimpleTagDictionary.apply(map, "0", 0, "9", 9, Set("a", "c", "e"), Set(1, 5), excludedTags = Set(2, 4))
assertEquals(Set(0), td("0"))
assertEquals(Set(1), td("a"))
assertEquals(Set(1, 3, 5), td("b"))
assertEquals(Set(3), td("c"))
assertEquals(Set(1), td("d"))
assertEquals(Set(1, 3, 5), td("e"))
assertEquals(Set(1, 3, 5), td("f"))
assertEquals(Set(1, 3, 5), td("g"))
assertEquals(Set(9), td("9"))
assertEquals(Map("0" -> Set(0), "a" -> Set(1), "c" -> Set(3), "d" -> Set(1), "9" -> Set(9)), td.entries)
assertEquals(Map(0 -> Set("0"), 1 -> Set("a", "d"), 3 -> Set("c"), 5 -> Set(), 9 -> Set("9")), td.knownWordsForTag)
val td2 = td.withWords(Set("c", "e", "f"))
assertEquals(Set("a", "b", "c", "d", "e", "f"), td2.allWords)
assertEquals(Set("a", "b", "c", "d", "e", "f", "0", "9"), td2.allWordsSE)
assertEquals(td.allTags, td2.allTags)
assertEquals(td.startWord, td2.startWord)
assertEquals(td.startTag, td2.startTag)
assertEquals(td.endWord, td2.endWord)
assertEquals(td.endTag, td2.endTag)
assertEquals(td.excludedTags, td2.excludedTags)
assertEquals(Set(0), td2("0"))
assertEquals(Set(1), td2("a"))
assertEquals(Set(1, 3, 5), td2("b"))
assertEquals(Set(3), td2("c"))
assertEquals(Set(1), td2("d"))
assertEquals(Set(1, 3, 5), td2("e"))
assertEquals(Set(1, 3, 5), td2("f"))
assertEquals(Set(1, 3, 5), td2("g"))
assertEquals(Set(9), td2("9"))
val td3 = td.withTags(Set(3, 4, 6))
assertEquals(td.allWords, td3.allWords)
assertEquals(Set(1, 3, 5, 6), td3.allTags)
assertEquals(Set(1, 3, 5, 6, 0, 9), td3.allTagsSE)
assertEquals(td.startWord, td3.startWord)
assertEquals(td.startTag, td3.startTag)
assertEquals(td.endWord, td3.endWord)
assertEquals(td.endTag, td3.endTag)
assertEquals(td.excludedTags, td3.excludedTags)
assertEquals(Set(0), td3("0"))
assertEquals(Set(1), td3("a"))
assertEquals(Set(1, 3, 5, 6), td3("b"))
assertEquals(Set(3), td3("c"))
assertEquals(Set(1), td3("d"))
assertEquals(Set(1, 3, 5, 6), td3("e"))
assertEquals(Set(1, 3, 5, 6), td3("f"))
assertEquals(Set(1, 3, 5, 6), td3("g"))
assertEquals(Set(9), td3("9"))
val td4 = td.withExcludedTags(Set(1, 4, 6))
assertEquals(td.allWords, td4.allWords)
assertEquals(td.allTags, td4.allTags)
assertEquals(td.startWord, td4.startWord)
assertEquals(td.startTag, td4.startTag)
assertEquals(td.endWord, td4.endWord)
assertEquals(td.endTag, td4.endTag)
assertEquals(Set(1, 2, 4, 6), td4.excludedTags)
assertEquals(Set(0), td4("0"))
assertEquals(Set(3, 5), td4("a"))
assertEquals(Set(3, 5), td4("b"))
assertEquals(Set(3), td4("c"))
assertEquals(Set(3, 5), td4("d"))
assertEquals(Set(3, 5), td4("e"))
assertEquals(Set(3, 5), td4("f"))
assertEquals(Set(3, 5), td4("g"))
assertEquals(Set(9), td4("9"))
}
@Test
def test_SimpleTagDictionaryFactory {
val cutoff = Some(0.2)
val f = new SimpleTagDictionaryFactory[Char](cutoff)
val sentences = Vector(Vector(
"1" -> 'a',
"1" -> 'a',
"1" -> 'a',
"1" -> 'a',
"1" -> 'b',
"1" -> 'b',
"1" -> 'b',
"1" -> 'c',
"2" -> 'b',
"2" -> 'b',
"2" -> 'b',
"2" -> 'b',
"2" -> 'b',
"2" -> 'z',
"7" -> 'a',
"7" -> 'c'))
val td = f(sentences, "0", 'A', "9", 'Z', Set("7", "8"), Set('x', 'y'))
assertEquals(Set("1", "2", "7", "8"), td.allWords)
assertEquals(Set("1", "2", "7", "8", "0", "9"), td.allWordsSE)
assertEquals(Set('a', 'b', 'c', 'x', 'y'), td.allTags)
assertEquals(Set('a', 'b', 'c', 'x', 'y', 'A', 'Z'), td.allTagsSE)
assertEquals(Set('A'), td("0"))
assertEquals(Set('a', 'b'), td("1"))
assertEquals(Set('b'), td("2"))
assertEquals(Set('a', 'b', 'c', 'x', 'y'), td("3"))
assertEquals(Set('a', 'c'), td("7"))
assertEquals(Set('a', 'b', 'c', 'x', 'y'), td("8"))
assertEquals(Set('Z'), td("9"))
}
@Test
def test_StartEndSwappedTagDictionary {
val map = Map("a" -> Set(1), "b" -> Set(2), "c" -> Set(3), "d" -> Set(1, 2))
val td = new StartEndSwappedTagDictionary(SimpleTagDictionary(map, "0", 0, "9", 9, Set("a", "c", "e"), Set(1, 5), excludedTags = Set(2, 4)))
assertEquals(Set(0), td("0"))
assertEquals(Set(1), td("a"))
assertEquals(Set(1, 3, 5), td("b"))
assertEquals(Set(3), td("c"))
assertEquals(Set(1), td("d"))
assertEquals(Set(1, 3, 5), td("e"))
assertEquals(Set(1, 3, 5), td("f"))
assertEquals(Set(1, 3, 5), td("g"))
assertEquals(Set(9), td("9"))
assertEquals("9", td.startWord)
assertEquals(9, td.startTag)
assertEquals("0", td.endWord)
assertEquals(0, td.endTag)
assertEquals(Map("0" -> Set(0), "a" -> Set(1), "c" -> Set(3), "d" -> Set(1), "9" -> Set(9)), td.entries)
assertEquals(Map(0 -> Set("0"), 1 -> Set("a", "d"), 3 -> Set("c"), 5 -> Set(), 9 -> Set("9")), td.knownWordsForTag)
val td2 = td.withWords(Set("c", "e", "f"))
assertEquals(Set("a", "b", "c", "d", "e", "f"), td2.allWords)
assertEquals(Set("a", "b", "c", "d", "e", "f", "0", "9"), td2.allWordsSE)
assertEquals(td.allTags, td2.allTags)
assertEquals(td.startWord, td2.startWord)
assertEquals(td.startTag, td2.startTag)
assertEquals(td.endWord, td2.endWord)
assertEquals(td.endTag, td2.endTag)
assertEquals(td.excludedTags, td2.excludedTags)
assertEquals(Set(0), td2("0"))
assertEquals(Set(1), td2("a"))
assertEquals(Set(1, 3, 5), td2("b"))
assertEquals(Set(3), td2("c"))
assertEquals(Set(1), td2("d"))
assertEquals(Set(1, 3, 5), td2("e"))
assertEquals(Set(1, 3, 5), td2("f"))
assertEquals(Set(1, 3, 5), td2("g"))
assertEquals(Set(9), td2("9"))
val td3 = td.withTags(Set(3, 4, 6))
assertEquals(td.allWords, td3.allWords)
assertEquals(Set(1, 3, 5, 6), td3.allTags)
assertEquals(Set(1, 3, 5, 6, 0, 9), td3.allTagsSE)
assertEquals(td.startWord, td3.startWord)
assertEquals(td.startTag, td3.startTag)
assertEquals(td.endWord, td3.endWord)
assertEquals(td.endTag, td3.endTag)
assertEquals(td.excludedTags, td3.excludedTags)
assertEquals(Set(0), td3("0"))
assertEquals(Set(1), td3("a"))
assertEquals(Set(1, 3, 5, 6), td3("b"))
assertEquals(Set(3), td3("c"))
assertEquals(Set(1), td3("d"))
assertEquals(Set(1, 3, 5, 6), td3("e"))
assertEquals(Set(1, 3, 5, 6), td3("f"))
assertEquals(Set(1, 3, 5, 6), td3("g"))
assertEquals(Set(9), td3("9"))
val td4 = td.withExcludedTags(Set(1, 4, 6))
assertEquals(td.allWords, td4.allWords)
assertEquals(td.allTags, td4.allTags)
assertEquals(td.startWord, td4.startWord)
assertEquals(td.startTag, td4.startTag)
assertEquals(td.endWord, td4.endWord)
assertEquals(td.endTag, td4.endTag)
assertEquals(Set(1, 2, 4, 6), td4.excludedTags)
assertEquals(Set(0), td4("0"))
assertEquals(Set(3, 5), td4("a"))
assertEquals(Set(3, 5), td4("b"))
assertEquals(Set(3), td4("c"))
assertEquals(Set(3, 5), td4("d"))
assertEquals(Set(3, 5), td4("e"))
assertEquals(Set(3, 5), td4("f"))
assertEquals(Set(3, 5), td4("g"))
assertEquals(Set(9), td4("9"))
}
}
|
dhgarrette/low-resource-pos-tagging-2014
|
src/test/scala/dhg/pos/tagdict/TagDictionaryTests.scala
|
Scala
|
apache-2.0
| 7,513 |
/* Copyright 2014 Nest Labs
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package nest.sparkle.loader.kafka
import kafka.serializer.Encoder
object KafkaEncoders {
object Implicits {
implicit object StringEncoder extends Encoder[String] {
def toBytes(string:String): Array[Byte] = string.getBytes
}
}
}
|
mighdoll/sparkle
|
kafka/src/main/scala/nest/sparkle/loader/kafka/KafkaEncoders.scala
|
Scala
|
apache-2.0
| 836 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.net.SocketTimeoutException
import kafka.admin.AdminUtils
import kafka.cluster.BrokerEndPoint
import kafka.log.LogConfig
import kafka.message.ByteBufferMessageSet
import kafka.api.KAFKA_090
import kafka.common.{KafkaStorageException, TopicAndPartition}
import ReplicaFetcherThread._
import org.apache.kafka.clients.{ManualMetadataUpdater, NetworkClient, ClientRequest, ClientResponse}
import org.apache.kafka.common.network.{Selectable, ChannelBuilders, NetworkReceive, Selector}
import org.apache.kafka.common.requests.{ListOffsetResponse, FetchResponse, RequestSend, AbstractRequest, ListOffsetRequest}
import org.apache.kafka.common.requests.{FetchRequest => JFetchRequest}
import org.apache.kafka.common.{Node, TopicPartition}
import org.apache.kafka.common.metrics.Metrics
import org.apache.kafka.common.protocol.{Errors, ApiKeys}
import org.apache.kafka.common.security.ssl.SSLFactory
import org.apache.kafka.common.utils.Time
import scala.collection.{JavaConverters, Map, mutable}
import JavaConverters._
class ReplicaFetcherThread(name: String,
sourceBroker: BrokerEndPoint,
brokerConfig: KafkaConfig,
replicaMgr: ReplicaManager,
metrics: Metrics,
time: Time)
extends AbstractFetcherThread(name = name,
clientId = name,
sourceBroker = sourceBroker,
fetchBackOffMs = brokerConfig.replicaFetchBackoffMs,
isInterruptible = false) {
type REQ = FetchRequest
type PD = PartitionData
private val fetchRequestVersion: Short = if (brokerConfig.interBrokerProtocolVersion.onOrAfter(KAFKA_090)) 1 else 0
private val socketTimeout: Int = brokerConfig.replicaSocketTimeoutMs
private val replicaId = brokerConfig.brokerId
private val maxWait = brokerConfig.replicaFetchWaitMaxMs
private val minBytes = brokerConfig.replicaFetchMinBytes
private val fetchSize = brokerConfig.replicaFetchMaxBytes
private def clientId = name
private val sourceNode = new Node(sourceBroker.id, sourceBroker.host, sourceBroker.port)
private val networkClient = {
val selector = new Selector(
NetworkReceive.UNLIMITED,
brokerConfig.connectionsMaxIdleMs,
metrics,
time,
"replica-fetcher",
Map("broker-id" -> sourceBroker.id.toString).asJava,
false,
ChannelBuilders.create(brokerConfig.interBrokerSecurityProtocol, SSLFactory.Mode.CLIENT, brokerConfig.channelConfigs)
)
new NetworkClient(
selector,
new ManualMetadataUpdater(),
clientId,
1,
0,
Selectable.USE_DEFAULT_BUFFER_SIZE,
brokerConfig.replicaSocketReceiveBufferBytes,
brokerConfig.requestTimeoutMs
)
}
override def shutdown(): Unit = {
super.shutdown()
networkClient.close()
}
// process fetched data
def processPartitionData(topicAndPartition: TopicAndPartition, fetchOffset: Long, partitionData: PartitionData) {
try {
val TopicAndPartition(topic, partitionId) = topicAndPartition
val replica = replicaMgr.getReplica(topic, partitionId).get
val messageSet = partitionData.toByteBufferMessageSet
if (fetchOffset != replica.logEndOffset.messageOffset)
throw new RuntimeException("Offset mismatch: fetched offset = %d, log end offset = %d.".format(fetchOffset, replica.logEndOffset.messageOffset))
trace("Follower %d has replica log end offset %d for partition %s. Received %d messages and leader hw %d"
.format(replica.brokerId, replica.logEndOffset.messageOffset, topicAndPartition, messageSet.sizeInBytes, partitionData.highWatermark))
replica.log.get.append(messageSet, assignOffsets = false)
trace("Follower %d has replica log end offset %d after appending %d bytes of messages for partition %s"
.format(replica.brokerId, replica.logEndOffset.messageOffset, messageSet.sizeInBytes, topicAndPartition))
val followerHighWatermark = replica.logEndOffset.messageOffset.min(partitionData.highWatermark)
// for the follower replica, we do not need to keep
// its segment base offset the physical position,
// these values will be computed upon making the leader
replica.highWatermark = new LogOffsetMetadata(followerHighWatermark)
trace("Follower %d set replica high watermark for partition [%s,%d] to %s"
.format(replica.brokerId, topic, partitionId, followerHighWatermark))
} catch {
case e: KafkaStorageException =>
fatal("Disk error while replicating data.", e)
Runtime.getRuntime.halt(1)
}
}
/**
* Handle a partition whose offset is out of range and return a new fetch offset.
*/
def handleOffsetOutOfRange(topicAndPartition: TopicAndPartition): Long = {
val replica = replicaMgr.getReplica(topicAndPartition.topic, topicAndPartition.partition).get
/**
* Unclean leader election: A follower goes down, in the meanwhile the leader keeps appending messages. The follower comes back up
* and before it has completely caught up with the leader's logs, all replicas in the ISR go down. The follower is now uncleanly
* elected as the new leader, and it starts appending messages from the client. The old leader comes back up, becomes a follower
* and it may discover that the current leader's end offset is behind its own end offset.
*
* In such a case, truncate the current follower's log to the current leader's end offset and continue fetching.
*
* There is a potential for a mismatch between the logs of the two replicas here. We don't fix this mismatch as of now.
*/
val leaderEndOffset: Long = earliestOrLatestOffset(topicAndPartition, ListOffsetRequest.LATEST_TIMESTAMP,
brokerConfig.brokerId)
if (leaderEndOffset < replica.logEndOffset.messageOffset) {
// Prior to truncating the follower's log, ensure that doing so is not disallowed by the configuration for unclean leader election.
// This situation could only happen if the unclean election configuration for a topic changes while a replica is down. Otherwise,
// we should never encounter this situation since a non-ISR leader cannot be elected if disallowed by the broker configuration.
if (!LogConfig.fromProps(brokerConfig.originals, AdminUtils.fetchEntityConfig(replicaMgr.zkUtils,
ConfigType.Topic, topicAndPartition.topic)).uncleanLeaderElectionEnable) {
// Log a fatal error and shutdown the broker to ensure that data loss does not unexpectedly occur.
fatal("Halting because log truncation is not allowed for topic %s,".format(topicAndPartition.topic) +
" Current leader %d's latest offset %d is less than replica %d's latest offset %d"
.format(sourceBroker.id, leaderEndOffset, brokerConfig.brokerId, replica.logEndOffset.messageOffset))
Runtime.getRuntime.halt(1)
}
replicaMgr.logManager.truncateTo(Map(topicAndPartition -> leaderEndOffset))
warn("Replica %d for partition %s reset its fetch offset from %d to current leader %d's latest offset %d"
.format(brokerConfig.brokerId, topicAndPartition, replica.logEndOffset.messageOffset, sourceBroker.id, leaderEndOffset))
leaderEndOffset
} else {
/**
* The follower could have been down for a long time and when it starts up, its end offset could be smaller than the leader's
* start offset because the leader has deleted old logs (log.logEndOffset < leaderStartOffset).
*
* Roll out a new log at the follower with the start offset equal to the current leader's start offset and continue fetching.
*/
val leaderStartOffset: Long = earliestOrLatestOffset(topicAndPartition, ListOffsetRequest.EARLIEST_TIMESTAMP,
brokerConfig.brokerId)
warn("Replica %d for partition %s reset its fetch offset from %d to current leader %d's start offset %d"
.format(brokerConfig.brokerId, topicAndPartition, replica.logEndOffset.messageOffset, sourceBroker.id, leaderStartOffset))
replicaMgr.logManager.truncateFullyAndStartAt(topicAndPartition, leaderStartOffset)
leaderStartOffset
}
}
// any logic for partitions whose leader has changed
def handlePartitionsWithErrors(partitions: Iterable[TopicAndPartition]) {
delayPartitions(partitions, brokerConfig.replicaFetchBackoffMs.toLong)
}
protected def fetch(fetchRequest: FetchRequest): Map[TopicAndPartition, PartitionData] = {
val clientResponse = sendRequest(ApiKeys.FETCH, Some(fetchRequestVersion), fetchRequest.underlying)
new FetchResponse(clientResponse.responseBody).responseData.asScala.map { case (key, value) =>
TopicAndPartition(key.topic, key.partition) -> new PartitionData(value)
}
}
private def sendRequest(apiKey: ApiKeys, apiVersion: Option[Short], request: AbstractRequest): ClientResponse = {
import kafka.utils.NetworkClientBlockingOps._
val header = apiVersion.fold(networkClient.nextRequestHeader(apiKey))(networkClient.nextRequestHeader(apiKey, _))
try {
if (!networkClient.blockingReady(sourceNode, socketTimeout)(time))
throw new SocketTimeoutException(s"Failed to connect within $socketTimeout ms")
else {
val send = new RequestSend(sourceBroker.id.toString, header, request.toStruct)
val clientRequest = new ClientRequest(time.milliseconds(), true, send, null)
networkClient.blockingSendAndReceive(clientRequest, socketTimeout)(time).getOrElse {
throw new SocketTimeoutException(s"No response received within $socketTimeout ms")
}
}
}
catch {
case e: Throwable =>
networkClient.close(sourceBroker.id.toString)
throw e
}
}
private def earliestOrLatestOffset(topicAndPartition: TopicAndPartition, earliestOrLatest: Long, consumerId: Int): Long = {
val topicPartition = new TopicPartition(topicAndPartition.topic, topicAndPartition.partition)
val partitions = Map(
topicPartition -> new ListOffsetRequest.PartitionData(earliestOrLatest, 1)
)
val request = new ListOffsetRequest(consumerId, partitions.asJava)
val clientResponse = sendRequest(ApiKeys.LIST_OFFSETS, None, request)
val response = new ListOffsetResponse(clientResponse.responseBody)
val partitionData = response.responseData.get(topicPartition)
Errors.forCode(partitionData.errorCode) match {
case Errors.NONE => partitionData.offsets.asScala.head
case errorCode => throw errorCode.exception
}
}
protected def buildFetchRequest(partitionMap: Map[TopicAndPartition, PartitionFetchState]): FetchRequest = {
val requestMap = mutable.Map.empty[TopicPartition, JFetchRequest.PartitionData]
partitionMap.foreach { case ((TopicAndPartition(topic, partition), partitionFetchState)) =>
if (partitionFetchState.isActive)
requestMap(new TopicPartition(topic, partition)) = new JFetchRequest.PartitionData(partitionFetchState.offset, fetchSize)
}
new FetchRequest(new JFetchRequest(replicaId, maxWait, minBytes, requestMap.asJava))
}
}
object ReplicaFetcherThread {
private[server] class FetchRequest(val underlying: JFetchRequest) extends AbstractFetcherThread.FetchRequest {
def isEmpty: Boolean = underlying.fetchData.isEmpty
def offset(topicAndPartition: TopicAndPartition): Long =
underlying.fetchData.asScala(new TopicPartition(topicAndPartition.topic, topicAndPartition.partition)).offset
}
private[server] class PartitionData(val underlying: FetchResponse.PartitionData) extends AbstractFetcherThread.PartitionData {
def errorCode: Short = underlying.errorCode
def toByteBufferMessageSet: ByteBufferMessageSet = new ByteBufferMessageSet(underlying.recordSet)
def highWatermark: Long = underlying.highWatermark
def exception: Option[Throwable] = Errors.forCode(errorCode) match {
case Errors.NONE => None
case e => Some(e.exception)
}
}
}
|
vkroz/kafka
|
core/src/main/scala/kafka/server/ReplicaFetcherThread.scala
|
Scala
|
apache-2.0
| 12,913 |
package io.scylla.actors
import io.scylla.client.raw.ThriftToScyllaSchemaConversions
/**
* Created by edouard on 12/07/2015.
*/
trait SchemaVerifier extends ThriftToScyllaSchemaConversions{
}
|
le-doude/scylla
|
src/main/scala/io/scylla/actors/SchemaVerifier.scala
|
Scala
|
apache-2.0
| 196 |
package com.shellhive.i18n
import scala.scalajs.js
import org.scalajs.dom
/**
* Facade type of Play Messages <a href="https://github.com/ccampbell/mousetrap">MouseTrap</a> library
*/
object Messages extends js.Object {
def apply(i18nLang:String)(key:String): String = scala.scalajs.js.native
}
object i18nMessages {
val getDefaultLang: String = dom.document.querySelector("html").getAttribute("lang")
}
class i18nMessages(val key: String) {
def translate(): String = Messages(i18nMessages.getDefaultLang)(key)
def translate(i18nLang:String): String = Messages(i18nLang)(key)
}
|
OmarCastro/ShellHive-scala
|
client/src/main/scala/com/shellhive/i18n/Messages.scala
|
Scala
|
mit
| 597 |
package com.sksamuel.scapegoat.inspections.string
import com.sksamuel.scapegoat._
/** @author Stephen Samuel */
class StripMarginOnRegex extends Inspection {
def inspector(context: InspectionContext): Inspector = new Inspector(context) {
override def postTyperTraverser = Some apply new context.Traverser {
import context.global._
private val R = TermName("r")
private val StripMargin = TermName("stripMargin")
private val Augment = TermName("augmentString")
override def inspect(tree: Tree): Unit = {
tree match {
case Select(Apply(_, List(Select(Apply(Select(_, Augment), List(Literal(Constant(str: String)))), StripMargin))), R) if str.contains('|') =>
context
.warn("Strip margin on regex",
tree.pos,
Levels.Error,
"Strip margin will strip | from regex - possible corrupted regex",
StripMarginOnRegex.this)
case _ => continue(tree)
}
}
}
}
}
|
pwwpche/scalac-scapegoat-plugin
|
src/main/scala/com/sksamuel/scapegoat/inspections/string/StripMarginOnRegex.scala
|
Scala
|
apache-2.0
| 1,025 |
package models
import scala.slick.driver.MySQLDriver.simple._
import java.sql.Timestamp
import scala.slick.model.ForeignKeyAction
import java.util.Date
/**
* setting up database scheme (everything else is self explanatory)
*/
case class MissedCall(simNumber: String, phno: String, timestamp: Timestamp, id: Option[Long] = None)
class MissedCalls(tag: Tag) extends Table[MissedCall](tag, "MISSED_CALLS") {
def simNumber = column[String]("SIM_NUMBER", O.NotNull)
def phno = column[String]("PHNO", O.NotNull)
def timestamp = column[Timestamp]("TIMESTAMP", O.NotNull)
def id = column[Long]("ID", O.NotNull, O.AutoInc, O.PrimaryKey)
def * = (simNumber, phno, timestamp, id.?) <> (MissedCall.tupled, MissedCall.unapply)
}
case class AppReg(simId: String, androidPhoneNumber: String, timestamp: Timestamp, whitelisted: Char = 'N', id: Option[Long] = None)
class AppRegs(tag: Tag) extends Table[AppReg](tag, "APP_REGISTRATIONS") {
def simId = column[String]("SIM_NUMBER", O.NotNull)
def phoneNumber = column[String]("PHONE_NUMBER", O.NotNull)
def timestamp = column[Timestamp]("TIMESTAMP", O.NotNull)
def whitelisted = column[Char]("WHITE_LISTED", O.NotNull, O.Default('N'))
def id = column[Long]("ID", O.PrimaryKey, O.NotNull, O.AutoInc)
def * = (simId, phoneNumber, timestamp, whitelisted, id.?) <> (AppReg.tupled, AppReg.unapply)
}
case class WhitelistedItem(simId:String, androidPhoneNumber: String, timestamp: Timestamp, id: Option[Long] = None)
class WhitelistedItems(tag: Tag) extends Table[WhitelistedItem](tag, "WHITE_LISTED_ITEMS") {
def simId = column[String]("SIM_ID", O.NotNull)
def androidPhoneNumber = column[String]("ANDROID_PHONE_NUMBER", O.NotNull)
def timestamp = column[Timestamp]("TIMESTAMP", O.NotNull)
def id = column[Long]("ID", O.PrimaryKey, O.NotNull, O.AutoInc)
def * = (simId, androidPhoneNumber, timestamp, id.?) <> (WhitelistedItem.tupled, WhitelistedItem.unapply)
}
case class User(email: String, phno: String, id: Option[Long] = None)
class Users(tag: Tag) extends Table[User](tag, "USERS") {
def email = column[String]("EMAIL", O.NotNull)
def phno = column[String]("PHNO", O.NotNull)
def id = column[Long]("ID", O.NotNull, O.PrimaryKey, O.AutoInc)
def * = (email, phno, id.?) <> (User.tupled, User.unapply)
}
case class Admin(email: String, password: String, id: Option[Long] = None)
class Admins(tag: Tag) extends Table[Admin](tag, "ADMINS") {
def email = column[String]("EMAIL", O.NotNull)
def password = column[String]("PASSWORD", O.NotNull)
def id = column[Long]("ID", O.NotNull, O.PrimaryKey, O.AutoInc)
def * = (email, password, id.?) <> (Admin.tupled, Admin.unapply)
}
case class UserEntry(userId: Long, timestamp: Timestamp, id: Option[Long] = None)
class UserEntries(tag: Tag) extends Table[UserEntry](tag, "USER_ENTRIES") {
def userId = column[Long]("USER_ID", O.NotNull)
def timestamp = column[Timestamp]("TIMESTAMP", O.NotNull)
def id = column[Long]("ID", O.NotNull, O.PrimaryKey, O.AutoInc)
def * = (userId, timestamp, id.?) <> (UserEntry.tupled, UserEntry.unapply)
def userIdFK = foreignKey("USER_ENTRY_USER_ID_FK", userId, TableQuery[Users])(_.id, ForeignKeyAction.Cascade, ForeignKeyAction.Cascade)
}
case class SigninStatus(userId: Long, status: Int = 0, id: Option[Long] = None)
class SigninStatuses(tag: Tag) extends Table[SigninStatus](tag, "SIGNIN_STATUSES"){
def userId = column[Long]("USER_ID", O.NotNull)
def status = column[Int]("STATUS", O.NotNull)
def id = column[Long]("ID", O.NotNull, O.PrimaryKey, O.AutoInc)
def * = (userId, status, id.?) <> (SigninStatus.tupled, SigninStatus.unapply)
def userIdFk = foreignKey("SIGNIN_STATUS_USER_ID_FK", userId, TableQuery[Users])(_.id, ForeignKeyAction.Cascade, ForeignKeyAction.Cascade)
}
case class SigningupUser(email: String, phno: String)
case class SigningupUserEntry(email: String, phno: String, timestamp: Timestamp, id: Option[Long] = None)
class SigningupUserEntries(tag: Tag) extends Table[SigningupUserEntry](tag, "SIGNINGUP_USER_ENTRIES"){
def email = column[String]("EMAIL", O.NotNull)
def phno = column[String]("PHNO", O.NotNull)
def timestamp = column[Timestamp]("TIMESTAMP", O.NotNull)
def id = column[Long]("ID", O.PrimaryKey, O.AutoInc, O.NotNull)
def * = (email, phno, timestamp, id.?) <> (SigningupUserEntry.tupled, SigningupUserEntry.unapply)
}
|
pamu/FooService
|
FooService2/app/models/Tables.scala
|
Scala
|
apache-2.0
| 4,361 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions.aggregate
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.trees.TreePattern.{SUM, TreePattern}
import org.apache.spark.sql.catalyst.trees.UnaryLike
import org.apache.spark.sql.catalyst.util.TypeUtils
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
@ExpressionDescription(
usage = "_FUNC_(expr) - Returns the sum calculated from values of a group.",
examples = """
Examples:
> SELECT _FUNC_(col) FROM VALUES (5), (10), (15) AS tab(col);
30
> SELECT _FUNC_(col) FROM VALUES (NULL), (10), (15) AS tab(col);
25
> SELECT _FUNC_(col) FROM VALUES (NULL), (NULL) AS tab(col);
NULL
""",
group = "agg_funcs",
since = "1.0.0")
case class Sum(
child: Expression,
failOnError: Boolean = SQLConf.get.ansiEnabled)
extends DeclarativeAggregate
with ImplicitCastInputTypes
with UnaryLike[Expression] {
def this(child: Expression) = this(child, failOnError = SQLConf.get.ansiEnabled)
override def nullable: Boolean = true
// Return data type.
override def dataType: DataType = resultType
override def inputTypes: Seq[AbstractDataType] =
Seq(TypeCollection(NumericType, YearMonthIntervalType, DayTimeIntervalType))
override def checkInputDataTypes(): TypeCheckResult =
TypeUtils.checkForAnsiIntervalOrNumericType(child.dataType, "sum")
final override val nodePatterns: Seq[TreePattern] = Seq(SUM)
private lazy val resultType = child.dataType match {
case DecimalType.Fixed(precision, scale) =>
DecimalType.bounded(precision + 10, scale)
case _: IntegralType => LongType
case it: YearMonthIntervalType => it
case it: DayTimeIntervalType => it
case _ => DoubleType
}
private lazy val sum = AttributeReference("sum", resultType)()
private lazy val isEmpty = AttributeReference("isEmpty", BooleanType, nullable = false)()
private lazy val zero = Literal.default(resultType)
override lazy val aggBufferAttributes = resultType match {
case _: DecimalType => sum :: isEmpty :: Nil
case _ => sum :: Nil
}
override lazy val initialValues: Seq[Expression] = resultType match {
case _: DecimalType => Seq(zero, Literal(true, BooleanType))
case _ => Seq(Literal(null, resultType))
}
override lazy val updateExpressions: Seq[Expression] = {
resultType match {
case _: DecimalType =>
// For decimal type, the initial value of `sum` is 0. We need to keep `sum` unchanged if
// the input is null, as SUM function ignores null input. The `sum` can only be null if
// overflow happens under non-ansi mode.
val sumExpr = if (child.nullable) {
If(child.isNull, sum, sum + KnownNotNull(child).cast(resultType))
} else {
sum + child.cast(resultType)
}
// The buffer becomes non-empty after seeing the first not-null input.
val isEmptyExpr = if (child.nullable) {
isEmpty && child.isNull
} else {
Literal(false, BooleanType)
}
Seq(sumExpr, isEmptyExpr)
case _ =>
// For non-decimal type, the initial value of `sum` is null, which indicates no value.
// We need `coalesce(sum, zero)` to start summing values. And we need an outer `coalesce`
// in case the input is nullable. The `sum` can only be null if there is no value, as
// non-decimal type can produce overflowed value under non-ansi mode.
if (child.nullable) {
Seq(coalesce(coalesce(sum, zero) + child.cast(resultType), sum))
} else {
Seq(coalesce(sum, zero) + child.cast(resultType))
}
}
}
/**
* For decimal type:
* If isEmpty is false and if sum is null, then it means we have had an overflow.
*
* update of the sum is as follows:
* Check if either portion of the left.sum or right.sum has overflowed
* If it has, then the sum value will remain null.
* If it did not have overflow, then add the sum.left and sum.right
*
* isEmpty: Set to false if either one of the left or right is set to false. This
* means we have seen atleast a value that was not null.
*/
override lazy val mergeExpressions: Seq[Expression] = {
resultType match {
case _: DecimalType =>
val bufferOverflow = !isEmpty.left && sum.left.isNull
val inputOverflow = !isEmpty.right && sum.right.isNull
Seq(
If(
bufferOverflow || inputOverflow,
Literal.create(null, resultType),
// If both the buffer and the input do not overflow, just add them, as they can't be
// null. See the comments inside `updateExpressions`: `sum` can only be null if
// overflow happens.
KnownNotNull(sum.left) + KnownNotNull(sum.right)),
isEmpty.left && isEmpty.right)
case _ => Seq(coalesce(coalesce(sum.left, zero) + sum.right, sum.left))
}
}
/**
* If the isEmpty is true, then it means there were no values to begin with or all the values
* were null, so the result will be null.
* If the isEmpty is false, then if sum is null that means an overflow has happened.
* So now, if ansi is enabled, then throw exception, if not then return null.
* If sum is not null, then return the sum.
*/
override lazy val evaluateExpression: Expression = resultType match {
case d: DecimalType =>
If(isEmpty, Literal.create(null, resultType),
CheckOverflowInSum(sum, d, !failOnError))
case _ => sum
}
override protected def withNewChildInternal(newChild: Expression): Sum = copy(child = newChild)
// The flag `failOnError` won't be shown in the `toString` or `toAggString` methods
override def flatArguments: Iterator[Any] = Iterator(child)
}
|
mahak/spark
|
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Sum.scala
|
Scala
|
apache-2.0
| 6,754 |
// problem 6.2
abstract class UnitConversion(private val initRatio: Double) {
def converse(base: Double) = {initRatio * base}
}
object InchesToCentimeters extends UnitConversion(2.54){
def apply(base: Double) = InchesToCentimeters.converse(base)
}
object GallonsToLiters extends UnitConversion(4.546){
def apply(base: Double) = GallonsToLiters.converse(base)
}
object MilesToKilometers extends UnitConversion(1.609344){
def apply(base: Double) = MilesToKilometers.converse(base)
}
object Test {
def main(args: Array[String])
{
println(InchesToCentimeters(2))
println(MilesToKilometers(2))
println(GallonsToLiters(2))
}
}
// problem 6.3
object PrintArgsReversed extends App{
for(i <- (0 until args.length).reverse)
print(args(i) + "|")
println("")
}
|
Com-Mean/Scala_for_the_Impatient
|
chapter6/problem.scala
|
Scala
|
gpl-3.0
| 788 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.cogmath.circuit
import cogx.cogmath.collection.{IdentityHashSetDeterministic, IdentityHashSet}
import scala.collection.mutable.ArrayBuffer
/** A Node is a circuit element that sources and sinks signals. It has zero or
* more outputs, called Sources, and zero or more inputs, called Sinks.
*
* @param in Input sources driving the inputs to this node.
*
* @author Greg Snider
*/
private [cogx] abstract class Node[T <: Node[T]](private val in: Array[T]) {
// Sources driven by this node. Initially these sources have no sinks.
private val _sinks = new ArrayBuffer[T]
/** All nodes are created alive; when removed from circuit, they are dead. */
private var dead = false
/** This instance, case to type T. */
private val me = this.asInstanceOf[T]
// Inputs of this node, connect them to their sources.
for (i <- 0 until in.length)
in(i)._sinks += me
/** Circuit that this Node belongs to. If the node has no inputs, it must
* get the owning circuit from the Circuit object (which, as has been noted
* above, is not thread safe). If it has inputs, it gets the owning circuit
* from the first input.
*/
val circuit: Circuit[T] =
if (in.length == 0)
Circuit.add(me)
else
in(0).circuit
in.foreach(source => require(source.circuit == circuit))
/** Check if node is dead (removed from circuit). */
def isDead: Boolean = dead
/** Get the sources driven by this node. */
def outputs: Seq[T] = _sinks.toSeq
/** Get the inputs consumed by this node. */
def inputs: Seq[T] = in.toSeq
/** Remove `node` from the ArrayBuffer of sinks. This method should
* be called in preference to _sinks -= node because of its use of
* object identity (eq) over object equality (equals)
*/
private def removeSink(node: T) {
// Remove from back to front to preserve indexing as we remove elements
val lastIndex = _sinks.length - 1
for (i <- lastIndex to 0 by -1) {
if (_sinks(i) eq node) {
_sinks.remove(i)
}
}
}
/** Steal all sinks from another node, `from`, and add them to `this.`
*
* This makes the `from` useless since whatever signal it drives is not
* used by any other node. Consequently `from` is removed from the circuit.
*
* @param from The node from which to steal outputs; this node will then be
* removed from the circuit after the theft is complete.
*/
def stealOutputsFrom(from: T) {
// Transfer sinks from `from` to `this`.
while (from._sinks.length > 0) {
// This is subtle because because nodes can be multiply-connected, e.g.
// a single node can appear on two inputs of another node. Compounding
// this is that we must distinguish between "eq" and "equals" operators
// when putting nodes into collections. ArrayBuffers use "equals" but we
// really want "eq" when adding or subtracting, so we must use our own
// methods for those operations.
val sink = from._sinks.head
/** Is node in buffer (using 'eq' instead of 'equals')? */
def containsObject(buffer: ArrayBuffer[T], node: T): Boolean = {
for (i <- 0 until buffer.length)
if (buffer(i) eq node)
return true
return false
}
from.removeSink(sink)
if (!containsObject(this._sinks, sink))
this._sinks += sink
// Find `from` on the sink and replace it with `this`, its new source.
// Since nodes can be multiply-connected, we must replace all instances
// of `from` on the sink.
for (index <- 0 until sink.in.length)
if (sink.in(index) eq from)
sink.in(index) = me
}
require(from._sinks.length == 0)
// Disconnect `from` from its sources
removeFromCircuit(from)
circuit.outputStolen(from, me)
}
/** Remove `node` from circuit, recursively removing inputs also that are no
* longer used because of the removal of `node`. Note that
*/
private def removeFromCircuit(node: T) {
assume(node._sinks.length == 0)
node.in.foreach(_.removeSink(node))
node.dead = true
// Recursion. If any input no longer has sinks, it needs to be removed also.
// However, primary inputs (nodes with no inputs) may NOT be removed since
// they form the foundation of the Circuit.
node.in.foreach {
input =>
if ((input._sinks.length == 0) && (input.in.length > 0)) {
removeFromCircuit(input)
}
}
}
/** Traverse tree, executing "f" for each node not already visited. */
private[circuit] def traversePreorder(f: T => Unit, visited: IdentityHashSet[T]) {
if (!visited.contains(me)) {
visited += me
f(me)
for (child <- inputs) {
child.traversePreorder(f, visited)
}
}
}
/** Traverse tree, executing "f" for each node not already visited. */
private[circuit] def traversePostorder(f: T => Unit, visited: IdentityHashSetDeterministic[T]) {
System.out.flush
if (!visited.contains(me)) {
visited += me
for (child <- inputs) {
child.traversePostorder(f, visited)
}
f(me)
}
}
/** Recursive printing of the DAG using "toString" for each node. */
private[circuit] def print(level: Int, visited: IdentityHashSetDeterministic[T]) {
val alreadyVisited = visited.contains(me)
visited += me
// Put a leading "|" if this has already been printed
if (alreadyVisited)
printf("|")
else
printf(" ")
// Indent 2 spaces for each level
for (i <- 0 until level * 2)
printf(" ")
println(this.toString)
// If this has already been visited, we don't bother to print out the
// children--redundant and creates an overly verbose printout for highly
// reconvergent circuits.
if (!alreadyVisited)
for (child <- inputs)
child.print(level + 1, visited)
}
override def toString = "Node!!!"
}
|
hpe-cct/cct-core
|
src/main/scala/cogx/cogmath/circuit/Node.scala
|
Scala
|
apache-2.0
| 6,566 |
/*
* Semiring.scala
* Sum and product operations according to a semiring algebraic structure.
*
* Created By: Michael Howard ([email protected])
* Creation Date: Jun 3, 2013
*
* Copyright 2013 Avrom J. Pfeffer and Charles River Analytics, Inc.
* See http://www.cra.com or email [email protected] for information.
*
* See http://www.github.com/p2t2/figaro for a copy of the software license.
*/
package com.cra.figaro.algorithm.factored.factors
/**
* Operations in factored algorithms are defined by a semiring algebraic structure.
* Each semiring defines a product and sum operation, and a value for zero and one which satisfy a set of properties.
* Different semirings are appropriate for certain algorithms and data types.
*/
trait Semiring[T] {
/**
* Product of two entries, as defined by the particular inference problem.
*/
def product(x: T, y: T): T
/**
* Sum of two entries, as defined by the particular inference problem. E.g., for computing conditional probabilities,
* sum is ordinary addition; for most probable explanation, it is max.
*/
def sum(x: T, y: T): T
/**
* Sum of many entries. Typically, this would be implemented by the ordinary sum,
* but there may be more efficient implementations.
*/
def sumMany(xs: Traversable[T]): T = {
xs.foldLeft(zero)(sum(_, _))
}
/**
* A value such that a + 0 = a
*/
val zero: T
/**
* A value such that a*1 = a
*/
val one: T
}
trait DivideableSemiRing[T] extends Semiring[T] {
/**
* Division of two entries, as defined by the particular inference problem.
*/
def divide(x: T, y: T): T
}
case class SumProductUtilitySemiring() extends DivideableSemiRing[(Double, Double)] {
/**
* Decision joint factor combination.
*/
def product(x: (Double, Double), y: (Double, Double)) = (x._1 * y._1, x._2 + y._2)
/**
* Decision joint factor division.
*/
def divide(x: (Double, Double), y: (Double, Double)) = if (y._1 == zero._1) (zero._1, x._2 - y._2) else (x._1 / y._1, x._2 - y._2)
/**
* Decision joint factor marginalization.
*/
def sum(x: (Double, Double), y: (Double, Double)) = if (x._1 + y._1 != 0.0) (x._1 + y._1, (x._1 * x._2 + y._1 * y._2) / (x._1 + y._1)); else (0.0, 0.0)
/**
* 0 probability and 0 utility.
*/
val zero = (0.0, 0.0)
/**
* 1 probability and 0 utility.
*/
val one = (1.0, 0.0)
}
case class BooleanSemiring() extends Semiring[Boolean] {
/**
* x AND y
*/
def product(x: Boolean, y: Boolean): Boolean = x && y
/**
* x OR y
*/
def sum(x: Boolean, y: Boolean): Boolean = x || y
/**
* False
*/
val zero = false
/**
* True
*/
val one = true
}
case class SumProductSemiring() extends DivideableSemiRing[Double] {
/**
* Standard multiplication
*/
def product(x: Double, y: Double) = x* y
/**
* Standard division
*/
def divide(x: Double, y: Double) = if (y == zero) zero else x / y
/**
* Standard addition
*/
def sum(x: Double, y: Double) = x + y
/**
* 0
*/
val zero = 0.0
/**
* 1
*/
val one = 1.0
}
/**
* Semiring for computing sums and products with log probabilities.
*/
case class LogSumProductSemiring() extends DivideableSemiRing[Double] {
val zero = Double.NegativeInfinity
val one = 0.0
def product(x: Double, y: Double) = x + y
def divide(x: Double, y: Double) = if (y == zero) zero else x - y
override def sumMany(xs: Traversable[Double]): Double = {
val max = xs.foldLeft(Double.NegativeInfinity)(_ max _)
if (max == Double.NegativeInfinity) Double.NegativeInfinity
else {
var total = 0.0
for (x <- xs) { total += Math.exp(x - max) }
Math.log(total) + max
}
}
def sum(x: Double, y: Double) = sumMany(List(x, y))
}
/**
* Semiring for computing maxs and products with log probabilities.
*/
case class LogMaxProductSemiring() extends DivideableSemiRing[Double] {
val zero = Double.NegativeInfinity
val one = 0.0
def product(x: Double, y: Double) = x + y
def divide(x: Double, y: Double) = if (y == zero) zero else x - y
def sum(x: Double, y: Double) = x max y
}
/**
* Semiring for computing sums and products with lower and upper bounds.
*/
case class BoundsSumProductSemiring() extends DivideableSemiRing[(Double, Double)] {
def product(x: (Double, Double), y: (Double, Double)) = {
val (lx, ux) = x
val (ly, uy) = y
(lx * ly, ux * uy)
}
def divide(x: (Double, Double), y: (Double, Double)) = {
val (lx, ux) = x
val (ly, uy) = y
(if (ly == zero._1) zero._1 else lx / ly, if (uy == zero._2) zero._2 else ux / uy)
}
def sum(x: (Double, Double), y: (Double, Double)) = {
val (lx, ux) = x
val (ly, uy) = y
(lx + ly, ux + uy)
}
val zero = (0.0, 0.0)
val one = (1.0, 1.0)
}
case class MaxProductSemiring() extends DivideableSemiRing[Double] {
/**
* Standard multiplication
*/
def product(x: Double, y: Double) = x * y
/**
* Standard division
*/
def divide(x: Double, y: Double) = if (y == zero) zero else x / y
/**
*
* The maximum of x and y.
*/
def sum(x: Double, y: Double) = x max y
/**
* 0
*/
val zero = 0.0
/**
* 1
*/
val one = 1.0
}
|
jyuhuan/figaro
|
Figaro/src/main/scala/com/cra/figaro/algorithm/factored/factors/Semiring.scala
|
Scala
|
bsd-3-clause
| 5,235 |
/*
* This software is licensed under the GNU Affero General Public License, quoted below.
*
* This file is a part of PowerAPI.
*
* Copyright (C) 2011-2016 Inria, University of Lille 1.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI.
*
* If not, please consult http://www.gnu.org/licenses/agpl-3.0.html.
*/
package org.powerapi.module.cpu.simple
import java.util.UUID
import akka.actor.Actor
import org.powerapi.core.MonitorChannel.{MonitorTick, subscribeMonitorTick, unsubscribeMonitorTick}
import org.powerapi.core.target.{All, Target, TargetUsageRatio}
import org.powerapi.core.{MessageBus, OSHelper}
import org.powerapi.module.Sensor
import org.powerapi.module.cpu.UsageMetricsChannel.publishUsageReport
/**
* CPU sensor component that collects data from /proc directory, presents on Linux platform.
*
* @see http://www.kernel.org/doc/man-pages/online/pages/man5/proc.5.html
* @author <a href="mailto:[email protected]">Aurélien Bourdon</a
* @author <a href="mailto:[email protected]">Maxime Colmant</a>
*/
class CpuSimpleSensor(eventBus: MessageBus, muid: UUID, target: Target, osHelper: OSHelper) extends Sensor(eventBus, muid, target) {
def init(): Unit = subscribeMonitorTick(muid, target)(eventBus)(self)
def terminate(): Unit = unsubscribeMonitorTick(muid, target)(eventBus)(self)
def currentTimes(target: Target): (Long, Long) = {
val globalTimes = osHelper.getGlobalCpuTimes
val allTime = globalTimes.idleTime + globalTimes.activeTime
val targetTime = target match {
case All => globalTimes.activeTime
case _ => osHelper.getTargetCpuTime(target)
}
(targetTime, allTime)
}
def usageRatio(oldT: Long, newT: Long, oldG: Long, newG: Long): TargetUsageRatio = {
val targetTime = if (newT - oldT > 0) newT - oldT else 0
val globalTime = if (newG - oldG > 0) newG - oldG else 0
if (globalTime > 0) {
TargetUsageRatio(targetTime / globalTime.toDouble)
}
else
TargetUsageRatio(0)
}
def handler: Actor.Receive = {
val initTimes = currentTimes(target)
sense(initTimes._1, initTimes._2)
}
def sense(oldTargetTime: Long, oldGlobalTime: Long): Actor.Receive = {
case msg: MonitorTick =>
val newTimes = currentTimes(target)
publishUsageReport(muid, target, usageRatio(oldTargetTime, newTimes._1, oldGlobalTime, newTimes._2), msg.tick)(eventBus)
context.become(sense(newTimes._1, newTimes._2) orElse sensorDefault)
}
}
|
Spirals-Team/powerapi
|
powerapi-core/src/main/scala/org/powerapi/module/cpu/simple/CpuSimpleSensor.scala
|
Scala
|
agpl-3.0
| 3,041 |
package net.ruippeixotog.scalafbp.component.stream
import akka.actor.Props
import rx.lang.scala.Observable
import spray.json.{ JsArray, JsValue }
import net.ruippeixotog.scalafbp.component._
import net.ruippeixotog.scalafbp.util.NashornEngine
case object MapConcat extends Component {
val name = "stream/MapConcat"
val description = "Transforms the elements of a stream into arrays of elements and flatterns them"
val icon = Some("code")
val inPort = InPort[JsValue]("in", "The stream to transform")
val funcPort = InPort[String]("func", "The function with argument x to use for transformation. " +
"Must return an array. While not defined, all elements pass untouched.")
val inPorts = List(inPort, funcPort)
val outPort = OutPort[JsValue]("out", "The transformed stream")
val outPorts = List(outPort)
val instanceProps = Props(new ComponentActor(this) with NashornEngine {
val defaultFunc = Observable.just[JsFunction](JsArray(_))
val func = defaultFunc ++ funcPort.stream.map(JsFunction(_))
inPort.stream.withLatestFrom(func) { (x, f) => f(x) }.flatMapIterable {
case JsArray(elems) => elems
case js => throw new IllegalArgumentException(
s"The value ${js.compactPrint} returned by the function is not an array")
}.pipeTo(outPort)
})
}
|
ruippeixotog/scalafbp
|
components/stream/src/main/scala/net/ruippeixotog/scalafbp/component/stream/MapConcat.scala
|
Scala
|
mit
| 1,305 |
package info.mukel.codeforces4s.api
import info.mukel.codeforces4s.http.HttpClient
import info.mukel.codeforces4s.http.ScalajHttpClient
import info.mukel.codeforces4s.json.JsonUtils
import org.json4s._
import org.json4s.native.JsonMethods._
import org.json4s.native.Serialization
import org.json4s.native.Serialization._
import scala.concurrent._
import org.json4s.DefaultFormats
/**
* CodeforcesApi
*
* Anonymous (non-authenticated) Codeforces API
*/
object CodeforcesApi extends ScalajHttpClient {
//this : HttpClient =>
implicit val formats = DefaultFormats
private val apiBaseURL = "http://codeforces.com/api/"
private def apiCall(action: String, params: (String, Any)*): JValue = {
val requestUrl = apiBaseURL + action
val response = request(requestUrl, params : _*)
val json = parse(response)
if((json \\ "status").extract[String] == "OK")
(json \\ "result")
else
throw new Exception("Invalid reponse:\\n" + response)
}
private def getAs[T : Manifest](action: String, params: (String, Any)*): T = {
val json = apiCall(action, params : _*)
JsonUtils.unjsonify[T](json)
}
/**
* contest.hacks
*
* Returns list of hacks in the specified contests. Full information about hacks is available only after some time after the contest end. During the contest user can see only own hacks.
*
* @param contestId (Required) Id of the contest. It is not the round number. It can be seen in contest URL. For example: /contest/374/status
* @return Returns a list of Hack objects.
*
* Example: http://codeforces.com/api/contest.hacks?contestId=374
*/
def contestHacks(contestId: Int): Array[Hack] = {
getAs[Array[Hack]]("contest.hacks",
"contestId" -> contestId)
}
/**
* contest.list
*
* Returns information about all available contests.
*
* @param gym Boolean. If true — than gym contests are returned. Otherwide, regular contests are returned.
* @return Returns a list of Contest objects. If this method is called not anonymously, then all available contests for a calling user will be returned too, including mashups and private gyms.
*
* Example: http://codeforces.com/api/contest.list?gym=true
*/
def contestList(gym: Option[Boolean] = None): Array[Contest] = {
getAs[Array[Contest]]("contest.list",
"gym" -> gym)
}
/**
* contest.standings
*
* Returns the description of the contest and the requested part of the standings.
*
* @param contestId (Required) Id of the contest. It is not the round number. It can be seen in contest URL. For example: /contest/374/status
* @param from 1-based index of the standings row to start the ranklist.
* @param count Number of standing rows to return.
* @param handles Semicolon-separated list of handles. No more than 10000 handles is accepted.
* @param room If specified, than only participants from this room will be shown in the result. If not — all the participants will be shown.
* @param showUnofficial If true than all participants (virtual, out of competition) are shown. Otherwise, only official contestants are shown.
* @return Returns object with three fields: "contest", "problems" and "rows". Field "contest" contains a Contest object. Field "problems" contains a list of Problem objects. Field "rows" contains a list of RanklistRow objects.
* Example: http://codeforces.com/api/contest.standings?contestId=374&from=1&count=5&showUnofficial=true
*/
def contestStandings(contestId : Int,
from : Int,
count : Int,
handles : Array[String],
room : Option[Int] = None,
showUnofficial : Option[Boolean] = None): ContestStandingsResult = {
getAs[ContestStandingsResult]("contest.standings",
"contestId" -> contestId,
"from" -> from,
"count" -> count,
"handles" -> (handles mkString ";"),
"room" -> room,
"showUnofficial" -> showUnofficial
)
}
case class ContestStandingsResult(
contest : Contest,
problems : Array[Problem],
rows : List[RanklistRow]
)
/**
* contest.status
*
* Returns submissions for specified contest. Optionally can return submissions of specified user.
*
* @param contestId (Required) Id of the contest. It is not the round number. It can be seen in contest URL. For example: /contest/374/status
* @param handle Codeforces user handle.
* @param from 1-based index of the first submission to return.
* @param count Number of returned submissions.
* @return Returns a list of Submission objects, sorted in decreasing order of submission id.
*
* Example: http://codeforces.com/api/contest.status?contestId=374&from=1&count=10
*/
def contestStatus(contestId : Int,
handle : String,
from : Int,
count : Int): Array[Submission] = {
getAs[Array[Submission]]("contest.status",
"contestId" -> contestId,
"handle" -> handle,
"from" -> from,
"count" -> count
)
}
/**
* problemset.problems
*
* Returns all problems from problemset. Problems can be filtered by tags.
*
* @param tags Semicolon-separated list of tags.
* @return Returns two lists. List of Problem objects and list of ProblemStatistics objects.
*
* Example: http://codeforces.com/api/problemset.problems?tags=implementation
*/
def problemsetProblems(tags : Array[String]): Array[(Problem, ProblemStatistics)] = {
val result = getAs[ProblemsetProblemsResult]("problemset.problems", "tags" -> (tags mkString ";"))
result.problems zip result.problemStatistics
}
case class ProblemsetProblemsResult(
problems: Array[Problem],
problemStatistics: Array[ProblemStatistics]
)
/**
* problemset.recentStatus
*
* Returns recent submissions.
*
* @param count (Required) Number of submissions to return. Can be up to 1000.
* @return Returns a list of Submission objects, sorted in decreasing order of submission id.
*
* Example: http://codeforces.com/api/problemset.recentStatus?count=10
*/
def problemsetRecentStatus(count: Int): Array[Submission] = {
getAs[Array[Submission]]("problemset.recentStatus",
"count" -> count
)
}
/**
* user.info
*
* Returns information about one or several users.
*
* @param handles (Required) Semicolon-separated list of handles. No more than 10000 handles is accepted.
* @return Returns a list of User objects for requested handles.
*
* Example: http://codeforces.com/api/user.info?handles=DmitriyH;Fefer_Ivan
*/
def userInfo(handles: Array[String]): Array[User] = {
getAs[Array[User]]("user.info",
"handles" -> (handles mkString ";")
)
}
/**
* user.ratedList
*
* Returns the list of all rated users.
*
* @param activeOnly Boolean. If true then only users, who participated in rated contest during the last month are returned. Otherwise, all users with at least one rated contest are returned.
* @return Returns a list of User objects, sorted in decreasing order of rating.
*
* Example: http://codeforces.com/api/user.ratedList?activeOnly=true
*/
def userRatedList(activeOnly: Option[Boolean] = None): Array[User] = {
getAs[Array[User]]("user.ratedList",
"activeOnly" -> activeOnly
)
}
/**
* user.rating
*
* Returns rating history of the specified user.
*
* @param handle (Required) Codeforces user handle.
* @return Returns a list of RatingChange objects for requested user.
*
* Example: http://codeforces.com/api/user.rating?handle=Fefer_Ivan
*/
def userRating(handle: String): Array[RatingChange] = {
getAs[Array[RatingChange]]("user.ratedList",
"handle" -> handle
)
}
/**
* user.status
*
* Returns submissions of specified user.
*
* @param handle (Required) Codeforces user handle.
* @param from 1-based index of the first submission to return.
* @param count Number of returned submissions.
* @return Returns a list of Submission objects, sorted in decreasing order of submission id.
*
* Example: http://codeforces.com/api/user.status?handle=Fefer_Ivan&from=1&count=10
*/
def userStatus(handle : String,
from : Int,
count : Int): Array[Submission] = {
getAs[Array[Submission]]("user.status",
"handle" -> handle,
"from" -> from,
"count" -> count
)
}
}
|
mukel/codeforces4s
|
src/main/scala/info/mukel/codeforces4s/api/CodeforcesApi.scala
|
Scala
|
gpl-2.0
| 8,991 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.