code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.shuffle.sort
import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters._
import org.apache.spark._
import org.apache.spark.internal.Logging
import org.apache.spark.shuffle._
import org.apache.spark.shuffle.api.ShuffleExecutorComponents
import org.apache.spark.util.collection.OpenHashSet
/**
* In sort-based shuffle, incoming records are sorted according to their target partition ids, then
* written to a single map output file. Reducers fetch contiguous regions of this file in order to
* read their portion of the map output. In cases where the map output data is too large to fit in
* memory, sorted subsets of the output can be spilled to disk and those on-disk files are merged
* to produce the final output file.
*
* Sort-based shuffle has two different write paths for producing its map output files:
*
* - Serialized sorting: used when all three of the following conditions hold:
* 1. The shuffle dependency specifies no map-side combine.
* 2. The shuffle serializer supports relocation of serialized values (this is currently
* supported by KryoSerializer and Spark SQL's custom serializers).
* 3. The shuffle produces fewer than or equal to 16777216 output partitions.
* - Deserialized sorting: used to handle all other cases.
*
* -----------------------
* Serialized sorting mode
* -----------------------
*
* In the serialized sorting mode, incoming records are serialized as soon as they are passed to the
* shuffle writer and are buffered in a serialized form during sorting. This write path implements
* several optimizations:
*
* - Its sort operates on serialized binary data rather than Java objects, which reduces memory
* consumption and GC overheads. This optimization requires the record serializer to have certain
* properties to allow serialized records to be re-ordered without requiring deserialization.
* See SPARK-4550, where this optimization was first proposed and implemented, for more details.
*
* - It uses a specialized cache-efficient sorter ([[ShuffleExternalSorter]]) that sorts
* arrays of compressed record pointers and partition ids. By using only 8 bytes of space per
* record in the sorting array, this fits more of the array into cache.
*
* - The spill merging procedure operates on blocks of serialized records that belong to the same
* partition and does not need to deserialize records during the merge.
*
* - When the spill compression codec supports concatenation of compressed data, the spill merge
* simply concatenates the serialized and compressed spill partitions to produce the final output
* partition. This allows efficient data copying methods, like NIO's `transferTo`, to be used
* and avoids the need to allocate decompression or copying buffers during the merge.
*
* For more details on these optimizations, see SPARK-7081.
*/
private[spark] class SortShuffleManager(conf: SparkConf) extends ShuffleManager with Logging {
import SortShuffleManager._
if (!conf.getBoolean("spark.shuffle.spill", true)) {
logWarning(
"spark.shuffle.spill was set to false, but this configuration is ignored as of Spark 1.6+." +
" Shuffle will continue to spill to disk when necessary.")
}
/**
* A mapping from shuffle ids to the task ids of mappers producing output for those shuffles.
*/
private[this] val taskIdMapsForShuffle = new ConcurrentHashMap[Int, OpenHashSet[Long]]()
private lazy val shuffleExecutorComponents = loadShuffleExecutorComponents(conf)
override val shuffleBlockResolver = new IndexShuffleBlockResolver(conf)
/**
* Obtains a [[ShuffleHandle]] to pass to tasks.
*/
override def registerShuffle[K, V, C](
shuffleId: Int,
dependency: ShuffleDependency[K, V, C]): ShuffleHandle = {
if (SortShuffleWriter.shouldBypassMergeSort(conf, dependency)) {
// If there are fewer than spark.shuffle.sort.bypassMergeThreshold partitions and we don't
// need map-side aggregation, then write numPartitions files directly and just concatenate
// them at the end. This avoids doing serialization and deserialization twice to merge
// together the spilled files, which would happen with the normal code path. The downside is
// having multiple files open at a time and thus more memory allocated to buffers.
new BypassMergeSortShuffleHandle[K, V](
shuffleId, dependency.asInstanceOf[ShuffleDependency[K, V, V]])
} else if (SortShuffleManager.canUseSerializedShuffle(dependency)) {
// Otherwise, try to buffer map outputs in a serialized form, since this is more efficient:
new SerializedShuffleHandle[K, V](
shuffleId, dependency.asInstanceOf[ShuffleDependency[K, V, V]])
} else {
// Otherwise, buffer map outputs in a deserialized form:
new BaseShuffleHandle(shuffleId, dependency)
}
}
/**
* Get a reader for a range of reduce partitions (startPartition to endPartition-1, inclusive) to
* read from a range of map outputs(startMapIndex to endMapIndex-1, inclusive).
* If endMapIndex=Int.MaxValue, the actual endMapIndex will be changed to the length of total map
* outputs of the shuffle in `getMapSizesByExecutorId`.
*
* Called on executors by reduce tasks.
*/
override def getReader[K, C](
handle: ShuffleHandle,
startMapIndex: Int,
endMapIndex: Int,
startPartition: Int,
endPartition: Int,
context: TaskContext,
metrics: ShuffleReadMetricsReporter): ShuffleReader[K, C] = {
val blocksByAddress = SparkEnv.get.mapOutputTracker.getMapSizesByExecutorId(
handle.shuffleId, startMapIndex, endMapIndex, startPartition, endPartition)
new BlockStoreShuffleReader(
handle.asInstanceOf[BaseShuffleHandle[K, _, C]], blocksByAddress, context, metrics,
shouldBatchFetch = canUseBatchFetch(startPartition, endPartition, context))
}
/** Get a writer for a given partition. Called on executors by map tasks. */
override def getWriter[K, V](
handle: ShuffleHandle,
mapId: Long,
context: TaskContext,
metrics: ShuffleWriteMetricsReporter): ShuffleWriter[K, V] = {
val mapTaskIds = taskIdMapsForShuffle.computeIfAbsent(
handle.shuffleId, _ => new OpenHashSet[Long](16))
mapTaskIds.synchronized { mapTaskIds.add(context.taskAttemptId()) }
val env = SparkEnv.get
handle match {
case unsafeShuffleHandle: SerializedShuffleHandle[K @unchecked, V @unchecked] =>
new UnsafeShuffleWriter(
env.blockManager,
context.taskMemoryManager(),
unsafeShuffleHandle,
mapId,
context,
env.conf,
metrics,
shuffleExecutorComponents)
case bypassMergeSortHandle: BypassMergeSortShuffleHandle[K @unchecked, V @unchecked] =>
new BypassMergeSortShuffleWriter(
env.blockManager,
bypassMergeSortHandle,
mapId,
env.conf,
metrics,
shuffleExecutorComponents)
case other: BaseShuffleHandle[K @unchecked, V @unchecked, _] =>
new SortShuffleWriter(other, mapId, context, shuffleExecutorComponents)
}
}
/** Remove a shuffle's metadata from the ShuffleManager. */
override def unregisterShuffle(shuffleId: Int): Boolean = {
Option(taskIdMapsForShuffle.remove(shuffleId)).foreach { mapTaskIds =>
mapTaskIds.iterator.foreach { mapTaskId =>
shuffleBlockResolver.removeDataByMap(shuffleId, mapTaskId)
}
}
true
}
/** Shut down this ShuffleManager. */
override def stop(): Unit = {
shuffleBlockResolver.stop()
}
}
private[spark] object SortShuffleManager extends Logging {
/**
* The maximum number of shuffle output partitions that SortShuffleManager supports when
* buffering map outputs in a serialized form. This is an extreme defensive programming measure,
* since it's extremely unlikely that a single shuffle produces over 16 million output partitions.
*/
val MAX_SHUFFLE_OUTPUT_PARTITIONS_FOR_SERIALIZED_MODE =
PackedRecordPointer.MAXIMUM_PARTITION_ID + 1
/**
* The local property key for continuous shuffle block fetching feature.
*/
val FETCH_SHUFFLE_BLOCKS_IN_BATCH_ENABLED_KEY =
"__fetch_continuous_blocks_in_batch_enabled"
/**
* Helper method for determining whether a shuffle reader should fetch the continuous blocks
* in batch.
*/
def canUseBatchFetch(startPartition: Int, endPartition: Int, context: TaskContext): Boolean = {
val fetchMultiPartitions = endPartition - startPartition > 1
fetchMultiPartitions &&
context.getLocalProperty(FETCH_SHUFFLE_BLOCKS_IN_BATCH_ENABLED_KEY) == "true"
}
/**
* Helper method for determining whether a shuffle should use an optimized serialized shuffle
* path or whether it should fall back to the original path that operates on deserialized objects.
*/
def canUseSerializedShuffle(dependency: ShuffleDependency[_, _, _]): Boolean = {
val shufId = dependency.shuffleId
val numPartitions = dependency.partitioner.numPartitions
if (!dependency.serializer.supportsRelocationOfSerializedObjects) {
log.debug(s"Can't use serialized shuffle for shuffle $shufId because the serializer, " +
s"${dependency.serializer.getClass.getName}, does not support object relocation")
false
} else if (dependency.mapSideCombine) {
log.debug(s"Can't use serialized shuffle for shuffle $shufId because we need to do " +
s"map-side aggregation")
false
} else if (numPartitions > MAX_SHUFFLE_OUTPUT_PARTITIONS_FOR_SERIALIZED_MODE) {
log.debug(s"Can't use serialized shuffle for shuffle $shufId because it has more than " +
s"$MAX_SHUFFLE_OUTPUT_PARTITIONS_FOR_SERIALIZED_MODE partitions")
false
} else {
log.debug(s"Can use serialized shuffle for shuffle $shufId")
true
}
}
private def loadShuffleExecutorComponents(conf: SparkConf): ShuffleExecutorComponents = {
val executorComponents = ShuffleDataIOUtils.loadShuffleDataIO(conf).executor()
val extraConfigs = conf.getAllWithPrefix(ShuffleDataIOUtils.SHUFFLE_SPARK_CONF_PREFIX)
.toMap
executorComponents.initializeExecutor(
conf.getAppId,
SparkEnv.get.executorId,
extraConfigs.asJava)
executorComponents
}
}
/**
* Subclass of [[BaseShuffleHandle]], used to identify when we've chosen to use the
* serialized shuffle.
*/
private[spark] class SerializedShuffleHandle[K, V](
shuffleId: Int,
dependency: ShuffleDependency[K, V, V])
extends BaseShuffleHandle(shuffleId, dependency) {
}
/**
* Subclass of [[BaseShuffleHandle]], used to identify when we've chosen to use the
* bypass merge sort shuffle path.
*/
private[spark] class BypassMergeSortShuffleHandle[K, V](
shuffleId: Int,
dependency: ShuffleDependency[K, V, V])
extends BaseShuffleHandle(shuffleId, dependency) {
}
| witgo/spark | core/src/main/scala/org/apache/spark/shuffle/sort/SortShuffleManager.scala | Scala | apache-2.0 | 11,781 |
package gitbucket.core.ssh
import org.apache.sshd.server.shell.UnknownCommand
import org.scalatest.funspec.AnyFunSpec
class GitCommandFactorySpec extends AnyFunSpec {
val factory = new GitCommandFactory("http://localhost:8080", None)
describe("createCommand") {
it("should return GitReceivePack when command is git-receive-pack") {
assert(factory.createCommand("git-receive-pack '/owner/repo.git'").isInstanceOf[DefaultGitReceivePack] == true)
assert(
factory.createCommand("git-receive-pack '/owner/repo.wiki.git'").isInstanceOf[DefaultGitReceivePack] == true
)
}
it("should return GitUploadPack when command is git-upload-pack") {
assert(factory.createCommand("git-upload-pack '/owner/repo.git'").isInstanceOf[DefaultGitUploadPack] == true)
assert(factory.createCommand("git-upload-pack '/owner/repo.wiki.git'").isInstanceOf[DefaultGitUploadPack] == true)
}
it("should return UnknownCommand when command is not git-(upload|receive)-pack") {
assert(factory.createCommand("git- '/owner/repo.git'").isInstanceOf[UnknownCommand] == true)
assert(factory.createCommand("git-pack '/owner/repo.git'").isInstanceOf[UnknownCommand] == true)
assert(factory.createCommand("git-a-pack '/owner/repo.git'").isInstanceOf[UnknownCommand] == true)
assert(factory.createCommand("git-up-pack '/owner/repo.git'").isInstanceOf[UnknownCommand] == true)
assert(factory.createCommand("\\ngit-upload-pack '/owner/repo.git'").isInstanceOf[UnknownCommand] == true)
}
it("should return UnknownCommand when git command has no valid arguments") {
// must be: git-upload-pack '/owner/repository_name.git'
assert(factory.createCommand("git-upload-pack").isInstanceOf[UnknownCommand] == true)
assert(factory.createCommand("git-upload-pack /owner/repo.git").isInstanceOf[UnknownCommand] == true)
assert(factory.createCommand("git-upload-pack 'owner/repo.git'").isInstanceOf[UnknownCommand] == true)
assert(factory.createCommand("git-upload-pack '/ownerrepo.git'").isInstanceOf[UnknownCommand] == true)
assert(factory.createCommand("git-upload-pack '/owner/repo.wiki'").isInstanceOf[UnknownCommand] == true)
}
}
}
| xuwei-k/gitbucket | src/test/scala/gitbucket/core/ssh/GitCommandSpec.scala | Scala | apache-2.0 | 2,226 |
package brique.bench.input
import org.openjdk.jmh.annotations.{Setup, Param, Scope, State}
import scala.{Array, Int, Unit}
@State(Scope.Thread)
class ArrayInput extends InputHelper {
@Param(Array("10", "100", "1000"))
var size: Int = _
var array: Array[Int] = _
@Setup
def setup(): Unit =
array = genArray(size)(r.nextInt())
}
| julien-truffaut/brique | bench/src/main/scala/brique/bench/input/ArrayInput.scala | Scala | mit | 345 |
package examples.range
import rescala._
class Range1(protected var _start : Int, protected var _length : Int){
protected var _end = _start + _length // backs up end
// getters and setters, maintaining correct state
def start = _start
def end = _end
def length = _length
def start_=(s : Int): Unit = {
_start = s
_end = _start + _length
}
def end_=(e : Int): Unit = {
_end = e
_length = e - _start
}
def length_=(e : Int): Unit = {
_length = e
_end = _start + _length
}
}
class Range2(var start : Int, var length : Int){
def end = start + length
def end_=(e : Int) = length = e - start
}
class Range3(val start : Var[Int], val length : Var[Int]) {
// end is a signal, leading to transparent caching
lazy val end = Signal{ start() + length()}
lazy val last = Signal{ end() - 1 }
def end_=(e : Int) = length.set(e - start.now)
// invariant
length.changed += {(x : Int) =>
if(x < 0) throw new IllegalArgumentException}
// convenience functions
def contains(number : Int) = number > start.now && number < end.now
def contains(other : Range3) = start.now < other.start.now && end.now > other.end.now
def intersects(other : Range3) = contains(other.start.now) || contains(other.end.now)
}
// immutable range
class Range(val start : Int, val length : Int) {
lazy val end = start + length
}
| volkc/REScala | Examples/examples/src/main/scala/examples/range/Range.scala | Scala | apache-2.0 | 1,363 |
package com.amarjanica.discourse.models
import com.amarjanica.discourse.util.{HasParameters, HasQueryParameters}
case class SSOContext(
payload: String,
signature: String,
secret: String
)
case class SSOUserResult(payload: String, signature: String)
/**
* Contains params for generating user signing tokens for Discourse
* @param email must be a verified email address.
* @param require_activation true If the email address has not been verified, set require_activation to "true".
* @param external_id Your unique user id. The suggested value is your database's 'id' row number.
* @param username will become the username on Discourse if the user is new or SiteSetting.sso_overrides_username is set.
* @param name will become the full name on Discourse if the user is new or SiteSetting.sso_overrides_name is set.
* @param admin boolean
* @param moderator boolean
* @param suppress_welcome_message boolean
*/
case class SSOUserRequest(
email: String,
require_activation: Boolean,
external_id: String,
username: String,
name: String,
avatar_url: String,
admin: Boolean,
moderator: Boolean,
avatar_force_update: Boolean,
suppress_welcome_message: Boolean
) extends HasParameters with HasQueryParameters {
require(Option(email).isDefined, "email can't be null!")
require(Option(external_id).isDefined, "external_id can't be null!")
require(Option(username).isDefined, "username can't be null!")
require(Option(name).isDefined, "name can't be null!")
}
| amarjanica/discourse-scala-client | src/main/scala/com/amarjanica/discourse/models/SSO.scala | Scala | mit | 1,507 |
package org.biosys.pubmed.models
case class PubmedRecord(pmid: String, record: String, rtype: String, htag: String) extends PubmedAbstractRecord {
}
| sdor/biosys | pubmed_common/src/main/scala/org/biosys/pubmed/models/PubmedRecord.scala | Scala | gpl-2.0 | 151 |
import sbt._
object Version {
val spark = "1.0.0"
val hadoop = "2.4.0"
val slf4j = "1.7.6"
val logback = "1.1.1"
val scalaTest = "2.1.7"
val mockito = "1.9.5"
val akka = "2.3.3"
}
object Library {
// workaround until 2.11 version for Spark Streaming's available
val sparkStreaming = "org.apache.spark" %% "spark-streaming" % Version.spark
val akkaActor = "com.typesafe.akka" %% "akka-actor" % Version.akka
val akkaTestKit = "com.typesafe.akka" %% "akka-testkit" % Version.akka
val hadoopClient = "org.apache.hadoop" % "hadoop-client" % Version.hadoop
val slf4jApi = "org.slf4j" % "slf4j-api" % Version.slf4j
val logbackClassic = "ch.qos.logback" % "logback-classic" % Version.logback
val scalaTest = "org.scalatest" %% "scalatest" % Version.scalaTest
val mockitoAll = "org.mockito" % "mockito-all" % Version.mockito
}
object Dependencies {
import Library._
val sparkAkkaHadoop = Seq(
sparkStreaming,
akkaActor,
akkaTestKit,
hadoopClient,
logbackClassic % "test",
scalaTest % "test",
mockitoAll % "test"
)
}
| charlesxucheng/spark-streaming-scala | project/Dependencies.scala | Scala | apache-2.0 | 1,175 |
// Copyright: 2010 - 2018 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core
import java.io.{ ByteArrayOutputStream, PrintStream }
import java.nio.charset.StandardCharsets
import org.ensime.api.DeclaredAs
import org.ensime.indexer._
import scala.tools.scalap.scalax.rules.scalasig._
import scala.collection.breakOut
import scala.util.control.NonFatal
trait ScalapSymbolToFqn {
import ScalaSigApi._
private def withScalaSigPrinter(code: ScalaSigPrinter => Any): String = {
val baos = new ByteArrayOutputStream()
val ps = new PrintStream(baos)
val printer = new ScalaSigPrinter(ps, true)
try {
code(printer)
new String(baos.toByteArray, StandardCharsets.UTF_8)
} catch {
case NonFatal(e) => ""
}
}
private def getAccess(sym: Symbol): Access =
if (sym.isPrivate) Private
else if (sym.isProtected) Protected
else Public
def rawType(s: AliasSymbol, parentPrefix: String): RawType = {
val parentName = className(s.symbolInfo.owner)
val isParentModule = parentName.fqnString.endsWith("$")
val javaName = ClassName(
parentName.pack,
parentName.name + (if (isParentModule) "" else "$") + s.name
)
val scalaName = parentPrefix + (if (isParentModule) "." else "#") + s.name
val access = getAccess(s)
val typeSignature = withScalaSigPrinter { printer =>
printer.printType(s.infoType, " = ")(printer.TypeFlags(true))
}
RawType(parentName, javaName, scalaName, access, typeSignature)
}
def rawScalaClass(sym: ClassSymbol): RawScalapClass = {
val javaName = className(sym)
val aPackage = sym.enclosingPackage
val ownerChain = sym.ownerChain
val name =
ownerChain.init
.map(s => s.name + (if (s.isModule) "." else "#"))
.mkString + ownerChain.last.name
val access = getAccess(sym)
val declaredAs =
if (sym.isTrait) DeclaredAs.Trait
else if (sym.isModule) DeclaredAs.Object
else DeclaredAs.Class
val typeSignature = withScalaSigPrinter { printer =>
printer.printType(sym.infoType)(printer.TypeFlags(true))
}
val scalaName = aPackage + "." + name
val parentPrefix = if (sym.isModule) scalaName + "." else scalaName + "#"
val fields: Map[String, RawScalapField] = sym.children.collect {
case ms: MethodSymbol if !ms.isMethod && ms.isLocal =>
val field = rawScalaField(ms, parentPrefix)
field.javaName.fqnString -> field
}(breakOut)
val methods: Map[String, IndexedSeq[RawScalapMethod]] =
sym.children.collect {
case ms: MethodSymbol if ms.isMethod =>
rawScalaMethod(ms, parentPrefix)
}(collection.breakOut).groupBy(_.simpleName)
val aliases: Map[String, RawType] = sym.children.collect {
case as: AliasSymbol =>
val alias = rawType(as, scalaName)
alias.javaName.fqnString -> alias
}(breakOut)
RawScalapClass(
javaName,
scalaName,
typeSignature,
access,
declaredAs,
fields,
methods,
aliases
)
}
private def className(sym: Symbol): ClassName = {
val nested = sym.ownerChain
val pkg = PackageName(sym.enclosingPackage.split("\\\\.").toList)
val name = nested.map(_.name).mkString("$")
val postfix = if (nested.last.isModule) "$" else ""
ClassName(pkg, name + postfix)
}
private def rawScalaField(ms: MethodSymbol,
parentPrefix: String): RawScalapField = {
val aClass = className(ms.symbolInfo.owner)
val name = ms.name.trim
val javaName = FieldName(aClass, name)
val scalaName = parentPrefix + name
val access = getAccess(ms)
val typeInfo = withScalaSigPrinter { printer =>
printer.printType(ms.infoType)(printer.TypeFlags(true))
}
RawScalapField(javaName, scalaName, typeInfo, access)
}
private def rawScalaMethod(ms: MethodSymbol,
parentPrefix: String): RawScalapMethod = {
val scalaName = parentPrefix + ms.name
val access = getAccess(ms)
val signature = withScalaSigPrinter { printer =>
printer.printMethodType(ms.infoType, printResult = true)(
printer.TypeFlags(true)
)
}
RawScalapMethod(ms.name, scalaName, signature, access)
}
}
| yyadavalli/ensime-server | core/src/main/scala/org/ensime/core/ScalapSymbolToFqn.scala | Scala | gpl-3.0 | 4,383 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util
import java.util.Collections
import java.util.Map.Entry
import java.util.concurrent.TimeUnit.{MILLISECONDS, NANOSECONDS}
import java.util.concurrent.{CompletableFuture, ExecutionException}
import kafka.network.RequestChannel
import kafka.raft.RaftManager
import kafka.server.QuotaFactory.QuotaManagers
import kafka.utils.Logging
import org.apache.kafka.clients.admin.AlterConfigOp
import org.apache.kafka.common.Uuid.ZERO_UUID
import org.apache.kafka.common.acl.AclOperation.{ALTER, ALTER_CONFIGS, CLUSTER_ACTION, CREATE, DELETE, DESCRIBE}
import org.apache.kafka.common.config.ConfigResource
import org.apache.kafka.common.errors.{ApiException, ClusterAuthorizationException, InvalidRequestException, TopicDeletionDisabledException}
import org.apache.kafka.common.internals.FatalExitError
import org.apache.kafka.common.message.AlterConfigsResponseData.{AlterConfigsResourceResponse => OldAlterConfigsResourceResponse}
import org.apache.kafka.common.message.CreatePartitionsRequestData.CreatePartitionsTopic
import org.apache.kafka.common.message.CreatePartitionsResponseData.CreatePartitionsTopicResult
import org.apache.kafka.common.message.CreateTopicsResponseData.CreatableTopicResult
import org.apache.kafka.common.message.DeleteTopicsResponseData.{DeletableTopicResult, DeletableTopicResultCollection}
import org.apache.kafka.common.message.IncrementalAlterConfigsResponseData.AlterConfigsResourceResponse
import org.apache.kafka.common.message.{CreateTopicsRequestData, _}
import org.apache.kafka.common.protocol.Errors._
import org.apache.kafka.common.protocol.{ApiKeys, ApiMessage, Errors}
import org.apache.kafka.common.requests._
import org.apache.kafka.common.resource.Resource.CLUSTER_NAME
import org.apache.kafka.common.resource.ResourceType.{CLUSTER, TOPIC}
import org.apache.kafka.common.utils.Time
import org.apache.kafka.common.{Node, Uuid}
import org.apache.kafka.controller.Controller
import org.apache.kafka.metadata.{BrokerHeartbeatReply, BrokerRegistrationReply, VersionRange}
import org.apache.kafka.server.authorizer.Authorizer
import org.apache.kafka.server.common.ApiMessageAndVersion
import scala.jdk.CollectionConverters._
/**
* Request handler for Controller APIs
*/
class ControllerApis(val requestChannel: RequestChannel,
val authorizer: Option[Authorizer],
val quotas: QuotaManagers,
val time: Time,
val supportedFeatures: Map[String, VersionRange],
val controller: Controller,
val raftManager: RaftManager[ApiMessageAndVersion],
val config: KafkaConfig,
val metaProperties: MetaProperties,
val controllerNodes: Seq[Node],
val apiVersionManager: ApiVersionManager) extends ApiRequestHandler with Logging {
val authHelper = new AuthHelper(authorizer)
val requestHelper = new RequestHandlerHelper(requestChannel, quotas, time)
private val aclApis = new AclApis(authHelper, authorizer, requestHelper, "controller", config)
def isClosed: Boolean = aclApis.isClosed
def close(): Unit = aclApis.close()
override def handle(request: RequestChannel.Request, requestLocal: RequestLocal): Unit = {
try {
request.header.apiKey match {
case ApiKeys.FETCH => handleFetch(request)
case ApiKeys.FETCH_SNAPSHOT => handleFetchSnapshot(request)
case ApiKeys.CREATE_TOPICS => handleCreateTopics(request)
case ApiKeys.DELETE_TOPICS => handleDeleteTopics(request)
case ApiKeys.API_VERSIONS => handleApiVersionsRequest(request)
case ApiKeys.ALTER_CONFIGS => handleLegacyAlterConfigs(request)
case ApiKeys.VOTE => handleVote(request)
case ApiKeys.BEGIN_QUORUM_EPOCH => handleBeginQuorumEpoch(request)
case ApiKeys.END_QUORUM_EPOCH => handleEndQuorumEpoch(request)
case ApiKeys.DESCRIBE_QUORUM => handleDescribeQuorum(request)
case ApiKeys.ALTER_ISR => handleAlterIsrRequest(request)
case ApiKeys.BROKER_REGISTRATION => handleBrokerRegistration(request)
case ApiKeys.BROKER_HEARTBEAT => handleBrokerHeartBeatRequest(request)
case ApiKeys.UNREGISTER_BROKER => handleUnregisterBroker(request)
case ApiKeys.ALTER_CLIENT_QUOTAS => handleAlterClientQuotas(request)
case ApiKeys.INCREMENTAL_ALTER_CONFIGS => handleIncrementalAlterConfigs(request)
case ApiKeys.ALTER_PARTITION_REASSIGNMENTS => handleAlterPartitionReassignments(request)
case ApiKeys.LIST_PARTITION_REASSIGNMENTS => handleListPartitionReassignments(request)
case ApiKeys.ENVELOPE => handleEnvelopeRequest(request, requestLocal)
case ApiKeys.SASL_HANDSHAKE => handleSaslHandshakeRequest(request)
case ApiKeys.SASL_AUTHENTICATE => handleSaslAuthenticateRequest(request)
case ApiKeys.ALLOCATE_PRODUCER_IDS => handleAllocateProducerIdsRequest(request)
case ApiKeys.CREATE_PARTITIONS => handleCreatePartitions(request)
case ApiKeys.DESCRIBE_ACLS => aclApis.handleDescribeAcls(request)
case ApiKeys.CREATE_ACLS => aclApis.handleCreateAcls(request)
case ApiKeys.DELETE_ACLS => aclApis.handleDeleteAcls(request)
case ApiKeys.ELECT_LEADERS => handleElectLeaders(request)
case _ => throw new ApiException(s"Unsupported ApiKey ${request.context.header.apiKey}")
}
} catch {
case e: FatalExitError => throw e
case e: Throwable => {
val t = if (e.isInstanceOf[ExecutionException]) e.getCause() else e
error(s"Unexpected error handling request ${request.requestDesc(true)} " +
s"with context ${request.context}", t)
requestHelper.handleError(request, t)
}
}
}
def handleEnvelopeRequest(request: RequestChannel.Request, requestLocal: RequestLocal): Unit = {
if (!authHelper.authorize(request.context, CLUSTER_ACTION, CLUSTER, CLUSTER_NAME)) {
requestHelper.sendErrorResponseMaybeThrottle(request, new ClusterAuthorizationException(
s"Principal ${request.context.principal} does not have required CLUSTER_ACTION for envelope"))
} else {
EnvelopeUtils.handleEnvelopeRequest(request, requestChannel.metrics, handle(_, requestLocal))
}
}
def handleSaslHandshakeRequest(request: RequestChannel.Request): Unit = {
val responseData = new SaslHandshakeResponseData().setErrorCode(ILLEGAL_SASL_STATE.code)
requestHelper.sendResponseMaybeThrottle(request, _ => new SaslHandshakeResponse(responseData))
}
def handleSaslAuthenticateRequest(request: RequestChannel.Request): Unit = {
val responseData = new SaslAuthenticateResponseData()
.setErrorCode(ILLEGAL_SASL_STATE.code)
.setErrorMessage("SaslAuthenticate request received after successful authentication")
requestHelper.sendResponseMaybeThrottle(request, _ => new SaslAuthenticateResponse(responseData))
}
def handleFetch(request: RequestChannel.Request): Unit = {
authHelper.authorizeClusterOperation(request, CLUSTER_ACTION)
handleRaftRequest(request, response => new FetchResponse(response.asInstanceOf[FetchResponseData]))
}
def handleFetchSnapshot(request: RequestChannel.Request): Unit = {
authHelper.authorizeClusterOperation(request, CLUSTER_ACTION)
handleRaftRequest(request, response => new FetchSnapshotResponse(response.asInstanceOf[FetchSnapshotResponseData]))
}
def handleDeleteTopics(request: RequestChannel.Request): Unit = {
val deleteTopicsRequest = request.body[DeleteTopicsRequest]
val future = deleteTopics(deleteTopicsRequest.data,
request.context.apiVersion,
authHelper.authorize(request.context, DELETE, CLUSTER, CLUSTER_NAME, logIfDenied = false),
names => authHelper.filterByAuthorized(request.context, DESCRIBE, TOPIC, names)(n => n),
names => authHelper.filterByAuthorized(request.context, DELETE, TOPIC, names)(n => n))
future.whenComplete { (results, exception) =>
requestHelper.sendResponseMaybeThrottle(request, throttleTimeMs => {
if (exception != null) {
deleteTopicsRequest.getErrorResponse(throttleTimeMs, exception)
} else {
val responseData = new DeleteTopicsResponseData().
setResponses(new DeletableTopicResultCollection(results.iterator)).
setThrottleTimeMs(throttleTimeMs)
new DeleteTopicsResponse(responseData)
}
})
}
}
def deleteTopics(request: DeleteTopicsRequestData,
apiVersion: Int,
hasClusterAuth: Boolean,
getDescribableTopics: Iterable[String] => Set[String],
getDeletableTopics: Iterable[String] => Set[String])
: CompletableFuture[util.List[DeletableTopicResult]] = {
// Check if topic deletion is enabled at all.
if (!config.deleteTopicEnable) {
if (apiVersion < 3) {
throw new InvalidRequestException("Topic deletion is disabled.")
} else {
throw new TopicDeletionDisabledException()
}
}
val deadlineNs = time.nanoseconds() + NANOSECONDS.convert(request.timeoutMs, MILLISECONDS);
// The first step is to load up the names and IDs that have been provided by the
// request. This is a bit messy because we support multiple ways of referring to
// topics (both by name and by id) and because we need to check for duplicates or
// other invalid inputs.
val responses = new util.ArrayList[DeletableTopicResult]
def appendResponse(name: String, id: Uuid, error: ApiError): Unit = {
responses.add(new DeletableTopicResult().
setName(name).
setTopicId(id).
setErrorCode(error.error.code).
setErrorMessage(error.message))
}
val providedNames = new util.HashSet[String]
val duplicateProvidedNames = new util.HashSet[String]
val providedIds = new util.HashSet[Uuid]
val duplicateProvidedIds = new util.HashSet[Uuid]
def addProvidedName(name: String): Unit = {
if (duplicateProvidedNames.contains(name) || !providedNames.add(name)) {
duplicateProvidedNames.add(name)
providedNames.remove(name)
}
}
request.topicNames.forEach(addProvidedName)
request.topics.forEach {
topic => if (topic.name == null) {
if (topic.topicId.equals(ZERO_UUID)) {
appendResponse(null, ZERO_UUID, new ApiError(INVALID_REQUEST,
"Neither topic name nor id were specified."))
} else if (duplicateProvidedIds.contains(topic.topicId) || !providedIds.add(topic.topicId)) {
duplicateProvidedIds.add(topic.topicId)
providedIds.remove(topic.topicId)
}
} else {
if (topic.topicId.equals(ZERO_UUID)) {
addProvidedName(topic.name)
} else {
appendResponse(topic.name, topic.topicId, new ApiError(INVALID_REQUEST,
"You may not specify both topic name and topic id."))
}
}
}
// Create error responses for duplicates.
duplicateProvidedNames.forEach(name => appendResponse(name, ZERO_UUID,
new ApiError(INVALID_REQUEST, "Duplicate topic name.")))
duplicateProvidedIds.forEach(id => appendResponse(null, id,
new ApiError(INVALID_REQUEST, "Duplicate topic id.")))
// At this point we have all the valid names and IDs that have been provided.
// However, the Authorizer needs topic names as inputs, not topic IDs. So
// we need to resolve all IDs to names.
val toAuthenticate = new util.HashSet[String]
toAuthenticate.addAll(providedNames)
val idToName = new util.HashMap[Uuid, String]
controller.findTopicNames(deadlineNs, providedIds).thenCompose { topicNames =>
topicNames.forEach { (id, nameOrError) =>
if (nameOrError.isError) {
appendResponse(null, id, nameOrError.error())
} else {
toAuthenticate.add(nameOrError.result())
idToName.put(id, nameOrError.result())
}
}
// Get the list of deletable topics (those we can delete) and the list of describeable
// topics.
val topicsToAuthenticate = toAuthenticate.asScala
val (describeable, deletable) = if (hasClusterAuth) {
(topicsToAuthenticate.toSet, topicsToAuthenticate.toSet)
} else {
(getDescribableTopics(topicsToAuthenticate), getDeletableTopics(topicsToAuthenticate))
}
// For each topic that was provided by ID, check if authentication failed.
// If so, remove it from the idToName map and create an error response for it.
val iterator = idToName.entrySet().iterator()
while (iterator.hasNext) {
val entry = iterator.next()
val id = entry.getKey
val name = entry.getValue
if (!deletable.contains(name)) {
if (describeable.contains(name)) {
appendResponse(name, id, new ApiError(TOPIC_AUTHORIZATION_FAILED))
} else {
appendResponse(null, id, new ApiError(TOPIC_AUTHORIZATION_FAILED))
}
iterator.remove()
}
}
// For each topic that was provided by name, check if authentication failed.
// If so, create an error response for it. Otherwise, add it to the idToName map.
controller.findTopicIds(deadlineNs, providedNames).thenCompose { topicIds =>
topicIds.forEach { (name, idOrError) =>
if (!describeable.contains(name)) {
appendResponse(name, ZERO_UUID, new ApiError(TOPIC_AUTHORIZATION_FAILED))
} else if (idOrError.isError) {
appendResponse(name, ZERO_UUID, idOrError.error)
} else if (deletable.contains(name)) {
val id = idOrError.result()
if (duplicateProvidedIds.contains(id) || idToName.put(id, name) != null) {
// This is kind of a weird case: what if we supply topic ID X and also a name
// that maps to ID X? In that case, _if authorization succeeds_, we end up
// here. If authorization doesn't succeed, we refrain from commenting on the
// situation since it would reveal topic ID mappings.
duplicateProvidedIds.add(id)
idToName.remove(id)
appendResponse(name, id, new ApiError(INVALID_REQUEST,
"The provided topic name maps to an ID that was already supplied."))
}
} else {
appendResponse(name, ZERO_UUID, new ApiError(TOPIC_AUTHORIZATION_FAILED))
}
}
// Finally, the idToName map contains all the topics that we are authorized to delete.
// Perform the deletion and create responses for each one.
controller.deleteTopics(deadlineNs, idToName.keySet).thenApply { idToError =>
idToError.forEach { (id, error) =>
appendResponse(idToName.get(id), id, error)
}
// Shuffle the responses so that users can not use patterns in their positions to
// distinguish between absent topics and topics we are not permitted to see.
Collections.shuffle(responses)
responses
}
}
}
}
def handleCreateTopics(request: RequestChannel.Request): Unit = {
val createTopicsRequest = request.body[CreateTopicsRequest]
val future = createTopics(createTopicsRequest.data(),
authHelper.authorize(request.context, CREATE, CLUSTER, CLUSTER_NAME, logIfDenied = false),
names => authHelper.filterByAuthorized(request.context, CREATE, TOPIC, names)(identity))
future.whenComplete { (result, exception) =>
requestHelper.sendResponseMaybeThrottle(request, throttleTimeMs => {
if (exception != null) {
createTopicsRequest.getErrorResponse(throttleTimeMs, exception)
} else {
result.setThrottleTimeMs(throttleTimeMs)
new CreateTopicsResponse(result)
}
})
}
}
def createTopics(request: CreateTopicsRequestData,
hasClusterAuth: Boolean,
getCreatableTopics: Iterable[String] => Set[String])
: CompletableFuture[CreateTopicsResponseData] = {
val topicNames = new util.HashSet[String]()
val duplicateTopicNames = new util.HashSet[String]()
request.topics().forEach { topicData =>
if (!duplicateTopicNames.contains(topicData.name())) {
if (!topicNames.add(topicData.name())) {
topicNames.remove(topicData.name())
duplicateTopicNames.add(topicData.name())
}
}
}
val authorizedTopicNames = if (hasClusterAuth) {
topicNames.asScala
} else {
getCreatableTopics.apply(topicNames.asScala)
}
val effectiveRequest = request.duplicate()
val iterator = effectiveRequest.topics().iterator()
while (iterator.hasNext) {
val creatableTopic = iterator.next()
if (duplicateTopicNames.contains(creatableTopic.name()) ||
!authorizedTopicNames.contains(creatableTopic.name())) {
iterator.remove()
}
}
controller.createTopics(effectiveRequest).thenApply { response =>
duplicateTopicNames.forEach { name =>
response.topics().add(new CreatableTopicResult().
setName(name).
setErrorCode(INVALID_REQUEST.code).
setErrorMessage("Duplicate topic name."))
}
topicNames.forEach { name =>
if (!authorizedTopicNames.contains(name)) {
response.topics().add(new CreatableTopicResult().
setName(name).
setErrorCode(TOPIC_AUTHORIZATION_FAILED.code))
}
}
response
}
}
def handleApiVersionsRequest(request: RequestChannel.Request): Unit = {
// Note that broker returns its full list of supported ApiKeys and versions regardless of current
// authentication state (e.g., before SASL authentication on an SASL listener, do note that no
// Kafka protocol requests may take place on an SSL listener before the SSL handshake is finished).
// If this is considered to leak information about the broker version a workaround is to use SSL
// with client authentication which is performed at an earlier stage of the connection where the
// ApiVersionRequest is not available.
def createResponseCallback(requestThrottleMs: Int): ApiVersionsResponse = {
val apiVersionRequest = request.body[ApiVersionsRequest]
if (apiVersionRequest.hasUnsupportedRequestVersion) {
apiVersionRequest.getErrorResponse(requestThrottleMs, UNSUPPORTED_VERSION.exception)
} else if (!apiVersionRequest.isValid) {
apiVersionRequest.getErrorResponse(requestThrottleMs, INVALID_REQUEST.exception)
} else {
apiVersionManager.apiVersionResponse(requestThrottleMs)
}
}
requestHelper.sendResponseMaybeThrottle(request, createResponseCallback)
}
def authorizeAlterResource(requestContext: RequestContext,
resource: ConfigResource): ApiError = {
resource.`type` match {
case ConfigResource.Type.BROKER =>
if (authHelper.authorize(requestContext, ALTER_CONFIGS, CLUSTER, CLUSTER_NAME)) {
new ApiError(NONE)
} else {
new ApiError(CLUSTER_AUTHORIZATION_FAILED)
}
case ConfigResource.Type.TOPIC =>
if (authHelper.authorize(requestContext, ALTER_CONFIGS, TOPIC, resource.name)) {
new ApiError(NONE)
} else {
new ApiError(TOPIC_AUTHORIZATION_FAILED)
}
case rt => new ApiError(INVALID_REQUEST, s"Unexpected resource type $rt.")
}
}
def handleLegacyAlterConfigs(request: RequestChannel.Request): Unit = {
val response = new AlterConfigsResponseData()
val alterConfigsRequest = request.body[AlterConfigsRequest]
val duplicateResources = new util.HashSet[ConfigResource]
val configChanges = new util.HashMap[ConfigResource, util.Map[String, String]]()
alterConfigsRequest.data.resources.forEach { resource =>
val configResource = new ConfigResource(
ConfigResource.Type.forId(resource.resourceType), resource.resourceName())
if (configResource.`type`().equals(ConfigResource.Type.UNKNOWN)) {
response.responses().add(new OldAlterConfigsResourceResponse().
setErrorCode(UNSUPPORTED_VERSION.code()).
setErrorMessage("Unknown resource type " + resource.resourceType() + ".").
setResourceName(resource.resourceName()).
setResourceType(resource.resourceType()))
} else if (!duplicateResources.contains(configResource)) {
val configs = new util.HashMap[String, String]()
resource.configs().forEach(config => configs.put(config.name(), config.value()))
if (configChanges.put(configResource, configs) != null) {
duplicateResources.add(configResource)
configChanges.remove(configResource)
response.responses().add(new OldAlterConfigsResourceResponse().
setErrorCode(INVALID_REQUEST.code()).
setErrorMessage("Duplicate resource.").
setResourceName(resource.resourceName()).
setResourceType(resource.resourceType()))
}
}
}
val iterator = configChanges.keySet().iterator()
while (iterator.hasNext) {
val resource = iterator.next()
val apiError = authorizeAlterResource(request.context, resource)
if (apiError.isFailure) {
response.responses().add(new OldAlterConfigsResourceResponse().
setErrorCode(apiError.error().code()).
setErrorMessage(apiError.message()).
setResourceName(resource.name()).
setResourceType(resource.`type`().id()))
iterator.remove()
}
}
controller.legacyAlterConfigs(configChanges, alterConfigsRequest.data.validateOnly)
.whenComplete { (controllerResults, exception) =>
if (exception != null) {
requestHelper.handleError(request, exception)
} else {
controllerResults.entrySet().forEach(entry => response.responses().add(
new OldAlterConfigsResourceResponse().
setErrorCode(entry.getValue.error().code()).
setErrorMessage(entry.getValue.message()).
setResourceName(entry.getKey.name()).
setResourceType(entry.getKey.`type`().id())))
requestHelper.sendResponseMaybeThrottle(request, throttleMs =>
new AlterConfigsResponse(response.setThrottleTimeMs(throttleMs)))
}
}
}
def handleVote(request: RequestChannel.Request): Unit = {
authHelper.authorizeClusterOperation(request, CLUSTER_ACTION)
handleRaftRequest(request, response => new VoteResponse(response.asInstanceOf[VoteResponseData]))
}
def handleBeginQuorumEpoch(request: RequestChannel.Request): Unit = {
authHelper.authorizeClusterOperation(request, CLUSTER_ACTION)
handleRaftRequest(request, response => new BeginQuorumEpochResponse(response.asInstanceOf[BeginQuorumEpochResponseData]))
}
def handleEndQuorumEpoch(request: RequestChannel.Request): Unit = {
authHelper.authorizeClusterOperation(request, CLUSTER_ACTION)
handleRaftRequest(request, response => new EndQuorumEpochResponse(response.asInstanceOf[EndQuorumEpochResponseData]))
}
def handleDescribeQuorum(request: RequestChannel.Request): Unit = {
authHelper.authorizeClusterOperation(request, DESCRIBE)
handleRaftRequest(request, response => new DescribeQuorumResponse(response.asInstanceOf[DescribeQuorumResponseData]))
}
def handleElectLeaders(request: RequestChannel.Request): Unit = {
authHelper.authorizeClusterOperation(request, ALTER)
val electLeadersRequest = request.body[ElectLeadersRequest]
val future = controller.electLeaders(electLeadersRequest.data)
future.whenComplete { (responseData, exception) =>
if (exception != null) {
requestHelper.sendResponseMaybeThrottle(request, throttleMs => {
electLeadersRequest.getErrorResponse(throttleMs, exception)
})
} else {
requestHelper.sendResponseMaybeThrottle(request, throttleMs => {
new ElectLeadersResponse(responseData.setThrottleTimeMs(throttleMs))
})
}
}
}
def handleAlterIsrRequest(request: RequestChannel.Request): Unit = {
val alterIsrRequest = request.body[AlterIsrRequest]
authHelper.authorizeClusterOperation(request, CLUSTER_ACTION)
val future = controller.alterIsr(alterIsrRequest.data)
future.whenComplete { (result, exception) =>
val response = if (exception != null) {
alterIsrRequest.getErrorResponse(exception)
} else {
new AlterIsrResponse(result)
}
requestHelper.sendResponseExemptThrottle(request, response)
}
}
def handleBrokerHeartBeatRequest(request: RequestChannel.Request): Unit = {
val heartbeatRequest = request.body[BrokerHeartbeatRequest]
authHelper.authorizeClusterOperation(request, CLUSTER_ACTION)
controller.processBrokerHeartbeat(heartbeatRequest.data).handle[Unit] { (reply, e) =>
def createResponseCallback(requestThrottleMs: Int,
reply: BrokerHeartbeatReply,
e: Throwable): BrokerHeartbeatResponse = {
if (e != null) {
new BrokerHeartbeatResponse(new BrokerHeartbeatResponseData().
setThrottleTimeMs(requestThrottleMs).
setErrorCode(Errors.forException(e).code))
} else {
new BrokerHeartbeatResponse(new BrokerHeartbeatResponseData().
setThrottleTimeMs(requestThrottleMs).
setErrorCode(NONE.code).
setIsCaughtUp(reply.isCaughtUp).
setIsFenced(reply.isFenced).
setShouldShutDown(reply.shouldShutDown))
}
}
requestHelper.sendResponseMaybeThrottle(request,
requestThrottleMs => createResponseCallback(requestThrottleMs, reply, e))
}
}
def handleUnregisterBroker(request: RequestChannel.Request): Unit = {
val decommissionRequest = request.body[UnregisterBrokerRequest]
authHelper.authorizeClusterOperation(request, ALTER)
controller.unregisterBroker(decommissionRequest.data().brokerId()).handle[Unit] { (_, e) =>
def createResponseCallback(requestThrottleMs: Int,
e: Throwable): UnregisterBrokerResponse = {
if (e != null) {
new UnregisterBrokerResponse(new UnregisterBrokerResponseData().
setThrottleTimeMs(requestThrottleMs).
setErrorCode(Errors.forException(e).code))
} else {
new UnregisterBrokerResponse(new UnregisterBrokerResponseData().
setThrottleTimeMs(requestThrottleMs))
}
}
requestHelper.sendResponseMaybeThrottle(request,
requestThrottleMs => createResponseCallback(requestThrottleMs, e))
}
}
def handleBrokerRegistration(request: RequestChannel.Request): Unit = {
val registrationRequest = request.body[BrokerRegistrationRequest]
authHelper.authorizeClusterOperation(request, CLUSTER_ACTION)
controller.registerBroker(registrationRequest.data).handle[Unit] { (reply, e) =>
def createResponseCallback(requestThrottleMs: Int,
reply: BrokerRegistrationReply,
e: Throwable): BrokerRegistrationResponse = {
if (e != null) {
new BrokerRegistrationResponse(new BrokerRegistrationResponseData().
setThrottleTimeMs(requestThrottleMs).
setErrorCode(Errors.forException(e).code))
} else {
new BrokerRegistrationResponse(new BrokerRegistrationResponseData().
setThrottleTimeMs(requestThrottleMs).
setErrorCode(NONE.code).
setBrokerEpoch(reply.epoch))
}
}
requestHelper.sendResponseMaybeThrottle(request,
requestThrottleMs => createResponseCallback(requestThrottleMs, reply, e))
}
}
private def handleRaftRequest(request: RequestChannel.Request,
buildResponse: ApiMessage => AbstractResponse): Unit = {
val requestBody = request.body[AbstractRequest]
val future = raftManager.handleRequest(request.header, requestBody.data, time.milliseconds())
future.whenComplete { (responseData, exception) =>
val response = if (exception != null) {
requestBody.getErrorResponse(exception)
} else {
buildResponse(responseData)
}
requestHelper.sendResponseExemptThrottle(request, response)
}
}
def handleAlterClientQuotas(request: RequestChannel.Request): Unit = {
val quotaRequest = request.body[AlterClientQuotasRequest]
authHelper.authorizeClusterOperation(request, ALTER_CONFIGS)
controller.alterClientQuotas(quotaRequest.entries, quotaRequest.validateOnly)
.whenComplete { (results, exception) =>
if (exception != null) {
requestHelper.handleError(request, exception)
} else {
requestHelper.sendResponseMaybeThrottle(request, requestThrottleMs =>
AlterClientQuotasResponse.fromQuotaEntities(results, requestThrottleMs))
}
}
}
def handleIncrementalAlterConfigs(request: RequestChannel.Request): Unit = {
val response = new IncrementalAlterConfigsResponseData()
val alterConfigsRequest = request.body[IncrementalAlterConfigsRequest]
val duplicateResources = new util.HashSet[ConfigResource]
val configChanges = new util.HashMap[ConfigResource,
util.Map[String, Entry[AlterConfigOp.OpType, String]]]()
alterConfigsRequest.data.resources.forEach { resource =>
val configResource = new ConfigResource(
ConfigResource.Type.forId(resource.resourceType), resource.resourceName())
if (configResource.`type`().equals(ConfigResource.Type.UNKNOWN)) {
response.responses().add(new AlterConfigsResourceResponse().
setErrorCode(UNSUPPORTED_VERSION.code()).
setErrorMessage("Unknown resource type " + resource.resourceType() + ".").
setResourceName(resource.resourceName()).
setResourceType(resource.resourceType()))
} else if (!duplicateResources.contains(configResource)) {
val altersByName = new util.HashMap[String, Entry[AlterConfigOp.OpType, String]]()
resource.configs.forEach { config =>
altersByName.put(config.name, new util.AbstractMap.SimpleEntry[AlterConfigOp.OpType, String](
AlterConfigOp.OpType.forId(config.configOperation), config.value))
}
if (configChanges.put(configResource, altersByName) != null) {
duplicateResources.add(configResource)
configChanges.remove(configResource)
response.responses().add(new AlterConfigsResourceResponse().
setErrorCode(INVALID_REQUEST.code()).
setErrorMessage("Duplicate resource.").
setResourceName(resource.resourceName()).
setResourceType(resource.resourceType()))
}
}
}
val iterator = configChanges.keySet().iterator()
while (iterator.hasNext) {
val resource = iterator.next()
val apiError = authorizeAlterResource(request.context, resource)
if (apiError.isFailure) {
response.responses().add(new AlterConfigsResourceResponse().
setErrorCode(apiError.error().code()).
setErrorMessage(apiError.message()).
setResourceName(resource.name()).
setResourceType(resource.`type`().id()))
iterator.remove()
}
}
controller.incrementalAlterConfigs(configChanges, alterConfigsRequest.data.validateOnly)
.whenComplete { (controllerResults, exception) =>
if (exception != null) {
requestHelper.handleError(request, exception)
} else {
controllerResults.entrySet().forEach(entry => response.responses().add(
new AlterConfigsResourceResponse().
setErrorCode(entry.getValue.error().code()).
setErrorMessage(entry.getValue.message()).
setResourceName(entry.getKey.name()).
setResourceType(entry.getKey.`type`().id())))
requestHelper.sendResponseMaybeThrottle(request, throttleMs =>
new IncrementalAlterConfigsResponse(response.setThrottleTimeMs(throttleMs)))
}
}
}
def handleCreatePartitions(request: RequestChannel.Request): Unit = {
def filterAlterAuthorizedTopics(topics: Iterable[String]): Set[String] = {
authHelper.filterByAuthorized(request.context, ALTER, TOPIC, topics)(n => n)
}
val future = createPartitions(
request.body[CreatePartitionsRequest].data,
filterAlterAuthorizedTopics
)
future.whenComplete { (responses, exception) =>
if (exception != null) {
requestHelper.handleError(request, exception)
} else {
requestHelper.sendResponseMaybeThrottle(request, requestThrottleMs => {
val responseData = new CreatePartitionsResponseData().
setResults(responses).
setThrottleTimeMs(requestThrottleMs)
new CreatePartitionsResponse(responseData)
})
}
}
}
def createPartitions(
request: CreatePartitionsRequestData,
getAlterAuthorizedTopics: Iterable[String] => Set[String]
): CompletableFuture[util.List[CreatePartitionsTopicResult]] = {
val deadlineNs = time.nanoseconds() + NANOSECONDS.convert(request.timeoutMs, MILLISECONDS);
val responses = new util.ArrayList[CreatePartitionsTopicResult]()
val duplicateTopicNames = new util.HashSet[String]()
val topicNames = new util.HashSet[String]()
request.topics().forEach {
topic =>
if (!topicNames.add(topic.name())) {
duplicateTopicNames.add(topic.name())
}
}
duplicateTopicNames.forEach { topicName =>
responses.add(new CreatePartitionsTopicResult().
setName(topicName).
setErrorCode(INVALID_REQUEST.code).
setErrorMessage("Duplicate topic name."))
topicNames.remove(topicName)
}
val authorizedTopicNames = getAlterAuthorizedTopics(topicNames.asScala)
val topics = new util.ArrayList[CreatePartitionsTopic]
topicNames.forEach { topicName =>
if (authorizedTopicNames.contains(topicName)) {
topics.add(request.topics().find(topicName))
} else {
responses.add(new CreatePartitionsTopicResult().
setName(topicName).
setErrorCode(TOPIC_AUTHORIZATION_FAILED.code))
}
}
controller.createPartitions(deadlineNs, topics).thenApply { results =>
results.forEach(response => responses.add(response))
responses
}
}
def handleAlterPartitionReassignments(request: RequestChannel.Request): Unit = {
val alterRequest = request.body[AlterPartitionReassignmentsRequest]
authHelper.authorizeClusterOperation(request, ALTER)
val response = controller.alterPartitionReassignments(alterRequest.data()).get()
requestHelper.sendResponseMaybeThrottle(request, requestThrottleMs =>
new AlterPartitionReassignmentsResponse(response.setThrottleTimeMs(requestThrottleMs)))
}
def handleListPartitionReassignments(request: RequestChannel.Request): Unit = {
val listRequest = request.body[ListPartitionReassignmentsRequest]
authHelper.authorizeClusterOperation(request, DESCRIBE)
val response = controller.listPartitionReassignments(listRequest.data()).get()
requestHelper.sendResponseMaybeThrottle(request, requestThrottleMs =>
new ListPartitionReassignmentsResponse(response.setThrottleTimeMs(requestThrottleMs)))
}
def handleAllocateProducerIdsRequest(request: RequestChannel.Request): Unit = {
val allocatedProducerIdsRequest = request.body[AllocateProducerIdsRequest]
authHelper.authorizeClusterOperation(request, CLUSTER_ACTION)
controller.allocateProducerIds(allocatedProducerIdsRequest.data)
.whenComplete((results, exception) => {
if (exception != null) {
requestHelper.handleError(request, exception)
} else {
requestHelper.sendResponseMaybeThrottle(request, requestThrottleMs => {
results.setThrottleTimeMs(requestThrottleMs)
new AllocateProducerIdsResponse(results)
})
}
})
}
}
| TiVo/kafka | core/src/main/scala/kafka/server/ControllerApis.scala | Scala | apache-2.0 | 37,054 |
package AST
object Priority {
val binary = Map("lambda" -> 1,
"or" -> 2,
"and" -> 3,
"is" -> 8, "<" -> 8, ">" -> 8, ">=" -> 8, "<=" -> 8, "==" -> 8, "!=" -> 8,
"+" -> 9, "-" -> 9,
"*" -> 10, "/" -> 10, "%" -> 10,
"**" -> 12)
val unary = Map("not" -> 4, "+" -> 12, "-" -> 12)
}
// sealed trait Node would be also OK
sealed abstract class Node {
def toStr = "error: toStr not implemented"
val indent = " " * 4
}
// proste klasy bez przeciazania equals
case class IntNum(value: Integer) extends Node {
override def toStr = value.toString
}
case class FloatNum(value: Double) extends Node {
override def toStr = value.toString
}
case class StringConst(value: String) extends Node {
override def toStr = value
}
case class TrueConst() extends Node {
override def toStr = "True"
}
case class FalseConst() extends Node {
override def toStr = "False"
}
case class Variable(name: String) extends Node {
override def toStr = name
}
// klasy z przeciazaniem equals
case class Unary(op: String, expr: Node) extends Node {
override def toStr = {
var str = expr.toStr
expr match {
case e@BinExpr(_,_,_) => if(Priority.binary(e.op)<=Priority.unary(op)) { str = "(" + str + ")" }
case e@Unary(_,_) => if(Priority.unary(e.op)<=Priority.unary(op)) { str = "(" + str + ")" }
case _ =>
}
op + " " + str
}
override def equals(that: Any): Boolean = that match {
case Unary(thatOp, thatExpr) if thatOp == op && thatExpr == expr
=> true
case _ => false
}
}
case class BinExpr(op: String, left: Node, right: Node) extends Node {
override def toStr = {
var leftStr = left.toStr
var rightStr = right.toStr
left match {
case l@(_:BinExpr) => if(Priority.binary(l.op)<Priority.binary(op)) { leftStr = "(" + leftStr + ")" }
case l@(_:Unary) => if(Priority.unary(l.op)<Priority.binary(op)) { leftStr = "(" + leftStr + ")" }
case _ =>
}
right match {
case r@BinExpr(_,_,_) => if(Priority.binary(r.op)<Priority.binary(op)) { rightStr = "(" + rightStr + ")" }
case r@Unary(_,_) => if(Priority.unary(r.op)<Priority.binary(op)) { rightStr = "(" + rightStr + ")" }
case _ =>
}
leftStr + " " + op + " " + rightStr
}
override def equals(that: Any) = that match {
case BinExpr("+", l, r) if l == right && r == left && op == "+" => true
case BinExpr("*", l, r) if l == right && r == left && op == "*" => true
case BinExpr("and", l, r) if l == right && r == left && op == "and" => true
case BinExpr("or", l, r) if l == right && r == left && op == "or" => true
case BinExpr(o, l, r) if l == left && r == right && o == op => true
case _ => false
}
}
case class IfElseExpr(cond: Node, left: Node, right: Node) extends Node {
override def toStr = left.toStr + " if " + cond.toStr + " else " + right.toStr
}
case class Assignment(left: Node, right: Node) extends Node {
override def toStr = left.toStr + " = " + right.toStr
}
case class Subscription(expr: Node, sub: Node) extends Node {
override def toStr = expr.toStr + "[" + sub.toStr + "]"
}
case class KeyDatum(key: Node, value: Node) extends Node {
override def toStr = key.toStr + ": " + value.toStr
}
case class GetAttr(expr:Node, attr: String) extends Node {
override def toStr = expr.toStr + "." + attr
}
case class IfInstr(cond: Node, left: Node) extends Node {
override def toStr = {
var str = "if " + cond.toStr + ":\\n"
str += left.toStr.replaceAll("(?m)^", indent)
str
}
}
case class IfElseInstr(cond: Node, left: Node, right: Node) extends Node {
override def toStr = {
var str = "if " + cond.toStr + ":\\n"
str += left.toStr.replaceAll("(?m)^", indent)
str += "\\nelse:\\n"
str += right.toStr.replaceAll("(?m)^", indent)
str
}
}
case class WhileInstr(cond: Node, body: Node) extends Node {
override def toStr = {
"while " + cond.toStr + ":\\n" + body.toStr.replaceAll("(?m)^", indent)
}
}
case class InputInstr() extends Node {
override def toStr = "input()"
}
case class ReturnInstr(expr: Node) extends Node {
override def toStr = "return " + expr.toStr
}
case class PrintInstr(expr: Node) extends Node {
override def toStr = "print " + expr.toStr
}
case class FunCall(name: Node, args_list: Node) extends Node {
override def toStr = {
args_list match {
case NodeList(list) => name.toStr + "(" + list.map(_.toStr).mkString("", ",", "") + ")"
case _ => name.toStr + "(" + args_list.toStr + ")"
}
}
}
case class FunDef(name: String, formal_args: Node, body: Node) extends Node {
override def toStr = {
var str = "\\ndef " + name + "(" + formal_args.toStr + "):\\n"
str += body.toStr.replaceAll("(?m)^", indent) + "\\n"
str
}
}
case class LambdaDef(formal_args: Node, body: Node) extends Node {
override def toStr = "lambda " + formal_args.toStr + ": " + body.toStr
}
case class ClassDef(name: String, inherit_list: Node, suite: Node) extends Node {
override def toStr = {
val str = "\\nclass " + name
var inheritStr = ""
val suiteStr = ":\\n" + suite.toStr.replaceAll("(?m)^", indent)
inherit_list match {
case NodeList(x) => if(x.length>0) inheritStr = "(" + x.map(_.toStr).mkString("", ",", "") + ")"
case _ =>
}
str + inheritStr + suiteStr
}
}
case class NodeList(list: List[Node]) extends Node {
override def toStr = {
list.map(_.toStr).mkString("", "\\n", "")
}
}
case class KeyDatumList(list: List[KeyDatum]) extends Node {
override def toStr = list.map(_.toStr).mkString("{", ",", "}")
}
case class IdList(list: List[Variable]) extends Node {
override def toStr = list.map(_.toStr).mkString("", ",", "")
}
case class ElemList(list: List[Node]) extends Node {
override def toStr = list.map(_.toStr).mkString("[", ",", "]")
}
case class Tuple(list: List[Node]) extends Node {
override def toStr = if(list.length==0) "()"
else if(list.length==1) "(" + list(0).toStr + ",)"
else list.map(_.toStr).mkString("(", ",", ")")
}
case class EmptyInstr() extends Node {
override def toStr = ""
} | tmachows/kompilatory | lab05-optimizer/simplifier_combinator/src/main/scala/AST/AST.scala | Scala | gpl-2.0 | 6,687 |
package sttp.client3.armeria.cats
import cats.effect.kernel.{Async, Resource, Sync}
import com.linecorp.armeria.client.WebClient
import com.linecorp.armeria.common.HttpData
import com.linecorp.armeria.common.stream.StreamMessage
import org.reactivestreams.Publisher
import sttp.client3.armeria.AbstractArmeriaBackend.newClient
import sttp.client3.armeria.{AbstractArmeriaBackend, BodyFromStreamMessage}
import sttp.client3.impl.cats.CatsMonadAsyncError
import sttp.client3.internal.NoStreams
import sttp.client3.{FollowRedirectsBackend, SttpBackend, SttpBackendOptions}
import sttp.monad.MonadAsyncError
private final class ArmeriaCatsBackend[F[_]: Async](client: WebClient, closeFactory: Boolean)
extends AbstractArmeriaBackend[F, Nothing](client, closeFactory, new CatsMonadAsyncError) {
override val streams: NoStreams = NoStreams
override protected def bodyFromStreamMessage: BodyFromStreamMessage[F, Nothing] =
new BodyFromStreamMessage[F, Nothing] {
override val streams: NoStreams = NoStreams
override implicit val monad: MonadAsyncError[F] = new CatsMonadAsyncError
override def publisherToStream(streamMessage: StreamMessage[HttpData]): Nothing =
throw new UnsupportedOperationException("This backend does not support streaming")
}
override protected def streamToPublisher(stream: Nothing): Publisher[HttpData] =
throw new UnsupportedOperationException("This backend does not support streaming")
}
object ArmeriaCatsBackend {
/** Creates a new Armeria backend, using the given or default `SttpBackendOptions`. Due to these customisations, the
* client will manage its own connection pool. If you'd like to reuse the default Armeria
* [[https://armeria.dev/docs/client-factory ClientFactory]] use `.usingDefaultClient`.
*/
def apply[F[_]: Async](options: SttpBackendOptions = SttpBackendOptions.Default): SttpBackend[F, Any] =
apply(newClient(options), closeFactory = true)
def resource[F[_]: Async](
options: SttpBackendOptions = SttpBackendOptions.Default
): Resource[F, SttpBackend[F, Any]] = {
Resource.make(Sync[F].delay(apply(newClient(options), closeFactory = true)))(_.close())
}
def usingDefaultClient[F[_]: Async](): SttpBackend[F, Any] =
apply(newClient(), closeFactory = false)
def usingClient[F[_]: Async](client: WebClient): SttpBackend[F, Any] =
apply(client, closeFactory = false)
private def apply[F[_]: Async](
client: WebClient,
closeFactory: Boolean
): SttpBackend[F, Any] =
new FollowRedirectsBackend(new ArmeriaCatsBackend(client, closeFactory))
}
| softwaremill/sttp | armeria-backend/cats/src/main/scala/sttp/client3/armeria/cats/ArmeriaCatsBackend.scala | Scala | apache-2.0 | 2,608 |
import awscala._
import com.amazonaws.services.simpledb.model.ListDomainsRequest
import com.amazonaws.services.{simpledb => aws}
import scala.collection.JavaConverters._
object SimpleDB {
def apply(credentials: Credentials)(implicit region: Region): SimpleDB = new SimpleDBClient(BasicCredentialsProvider(credentials.getAWSAccessKeyId, credentials.getAWSSecretKey)).at(region)
def apply(credentialsProvider: CredentialsProvider = CredentialsLoader.load())(implicit region: Region = Region.default()): SimpleDB = new SimpleDBClient(credentialsProvider).at(region)
def apply(accessKeyId: String, secretAccessKey: String)(implicit region: Region): SimpleDB = apply(BasicCredentialsProvider(accessKeyId, secretAccessKey)).at(region)
def at(region: Region): SimpleDB = apply()(region)
}
/**
* Amazon SimpleDB Java client wrapper
* @see [[http://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/]]
*/
trait SimpleDB extends aws.AmazonSimpleDB {
def at(region: Region): SimpleDB = {
this.setRegion(region)
this
}
// ------------------------------------------
// Domains
// ------------------------------------------
def domains: Seq[Domain] = {
import aws.model.ListDomainsResult
object domainsSequencer extends Sequencer[Domain, ListDomainsResult, String] {
val baseRequest = new ListDomainsRequest()
def getInitial = listDomains(baseRequest)
def getMarker(r: ListDomainsResult) = r.getNextToken()
def getFromMarker(marker: String) = listDomains(baseRequest.withNextToken(marker))
def getList(r: ListDomainsResult) = (r.getDomainNames().asScala.toList map { x => Domain(x) }).asJava
}
domainsSequencer.sequence
}
def domain(name: String): Option[Domain] = domains.find(_.name == name)
def domainMetadata(domain: Domain): DomainMetadata = {
DomainMetadata(domainMetadata(new aws.model.DomainMetadataRequest().withDomainName(domain.name)))
}
def createDomain(name: String): Domain = {
createDomain(new aws.model.CreateDomainRequest().withDomainName(name))
Domain(name)
}
def deleteDomain(domain: Domain): Unit = deleteDomain(new aws.model.DeleteDomainRequest().withDomainName(domain.name))
// ------------------------------------------
// Items/Attributes
// ------------------------------------------
def select(domain: Domain, expression: String, consistentRead: Boolean = false): Seq[Item] = {
import aws.model.SelectResult
object selectSequencer extends Sequencer[Item, SelectResult, String] {
val baseRequest = new aws.model.SelectRequest().withSelectExpression(expression).withConsistentRead(consistentRead)
def getInitial = select(baseRequest)
def getMarker(r: SelectResult) = r.getNextToken()
def getFromMarker(marker: String) = select(baseRequest.withNextToken(marker))
def getList(r: SelectResult) = (r.getItems().asScala.toList map { x => Item(domain, x) }).asJava
}
selectSequencer.sequence
}
def attributes(item: Item): Seq[Attribute] = {
getAttributes(
new aws.model.GetAttributesRequest().withDomainName(item.domain.name).withItemName(item.name)
)
.getAttributes.asScala.map(as => Attribute(item, as)).toSeq
}
def replaceAttributesIfExists(item: Item, attributes: (String, String)*): Unit = {
putAttributes(new aws.model.PutAttributesRequest()
.withDomainName(item.domain.name)
.withItemName(item.name)
.withAttributes(attributes.map {
case (k, v) =>
new aws.model.ReplaceableAttribute().withName(k).withValue(v).withReplace(true)
}.asJava))
}
def putAttributes(item: Item, attributes: (String, String)*): Unit = {
putAttributes(new aws.model.PutAttributesRequest()
.withDomainName(item.domain.name)
.withItemName(item.name)
.withAttributes(attributes.map {
case (k, v) =>
new aws.model.ReplaceableAttribute().withName(k).withValue(v).withReplace(false)
}.asJava))
}
def deleteItems(items: Seq[Item]): Unit = {
items.headOption.foreach { item =>
batchDeleteAttributes(new aws.model.BatchDeleteAttributesRequest()
.withDomainName(item.domain.name)
.withItems(items.map(i => new aws.model.DeletableItem().withName(i.name)).asJava))
}
}
def deleteAttributes(attributes: Seq[Attribute]): Unit = {
attributes.headOption.foreach { attr =>
deleteAttributes(new aws.model.DeleteAttributesRequest()
.withItemName(attr.item.name)
.withAttributes(attributes.map(_.asInstanceOf[aws.model.Attribute]).asJava))
}
}
}
/**
* Default Implementation
*
* @param credentialsProvider credentialsProvider
*/
class SimpleDBClient(credentialsProvider: CredentialsProvider = CredentialsLoader.load())
extends aws.AmazonSimpleDBClient(credentialsProvider)
with SimpleDB
| hirokikonishi/awscala | aws/simpledb/src/main/scala/SimpleDB.scala | Scala | apache-2.0 | 4,831 |
// TODO (ptr): Replace MultiMap with MultiDict
package uppsat.approximation.components
import scala.collection.mutable.{ArrayBuffer, ArrayStack, HashMap,
MultiMap, Queue, Set}
import uppsat.globalOptions._
import uppsat.ModelEvaluator
import uppsat.ModelEvaluator.Model
import uppsat.Timer
import uppsat.approximation._
import uppsat.approximation.toolbox.Toolbox
import uppsat.ast._
import uppsat.precision.{PrecisionMap, PrecisionOrdering}
import uppsat.precision.PrecisionMap.Path
import uppsat.solver.Z3Solver
import uppsat.theory.Theory
import uppsat.theory.FloatingPointTheory
import uppsat.theory.FloatingPointTheory._
import uppsat.theory.FloatingPointTheory.FPSortFactory.FPSort
import uppsat.theory.BooleanTheory
import uppsat.theory.BooleanTheory._
import uppsat.solver.Z3OnlineSolver
// TODO (ptr): Add more details on fixpoint reconstruction
/** Fixpoint Reconstruction
*
*
*/
trait FixpointReconstruction extends ModelReconstruction {
class FixpointException(msg : String)
extends Exception("FixpointException: " + msg)
/** Returns the assignment of one implied variable in ast (w.r.t. to
* candidateModel)
*
* Assuming that ast w.r.t. candidateModel has exactly one undefined
* variable, getImplication isolates this variable and by calling the
* backend solver retrieves a value and returns Some(variable, value) for
* this pair. If no value is found None is returned (thus the candidateModel
* is *not* a model for ast).
*
* @param candidateModel Assignments of some values in ast.
* @param ast The AST with exactly one undefined variable.
*
* @return An assignment to the undefined variable in ast s.t. it is
* compatible with candidateModel. None if none can be found.
*
*/
def getImplication(candidateModel : Model, ast : AST) : Option[(AST, AST)] = {
val vars = ast.toList.filter(_.isVariable)
val unknown =
vars.filterNot(candidateModel.contains(_)).map(x => (x.symbol, x)).toMap
unknown.toList match {
case List((unkSymbol, unkAST)) => {
verbose("Implication of " + unkSymbol + "\\n\\t" + ast.simpleString())
val assertions =
for ( v <- vars if(candidateModel.contains(v))) yield
(v.symbol, candidateModel(v))
ModelEvaluator.evalAST(ast, unkSymbol, assertions, inputTheory) match {
case Some(res) => Some ((unkAST, res))
case None => None
}
}
case _ => {
val msg =
"getImplication assumes at most one unknown" + unknown.mkString(", ")
throw new FixpointException(msg)
}
}
}
/** The number of variables in ast that are not defined in candidateModel
*
* @param candidateModel Model (possibly) containing variable definitions
* @param ast AST (possibly) containing variables
*/
def numUndefValues(candidateModel : Model, ast : AST) =
ast.toList.filter((x:AST) =>
x.isVariable && !candidateModel.contains(x)).map(_.symbol).distinct.length
/** True if ast is a definition
*
* An ast is a definition if it is an equation where one (or both) sides is a
* variable.
*
* @param ast Formula to check
*
*/
// TODO (ptr): If we have a meta-equality we don't need to check for each
// different theory.
def isDefinition(ast : AST) : Boolean = {
ast.symbol match {
case pred : FloatingPointPredicateSymbol
if (pred.getFactory == FPEqualityFactory) =>
ast.children(0).isVariable || ast.children(1).isVariable
case BoolEquality | RoundingModeEquality =>
ast.children(0).isVariable || ast.children(1).isVariable
case _ => false
}
}
/** A Boolean symbol defined by {@code ast} if exists
*
* A symbol is defined if it is a single variable on one side of an equality.
*
* @param ast Formula to be checked for definition
* @param polarity Should be true if formula is under a even number of
* negations.
* @return Some variable which is defined by {@code ast} if exists otherwise
* None.
*
*/
def getBoolDefinitions(ast : AST, polarity : Boolean) : Option[(AST,AST)] = {
(ast.symbol, polarity) match {
case (BoolEquality, true) |
(RoundingModeEquality, true) |
(FPEqualityFactory(_), true) => {
(ast.children(0).isVariable, ast.children(1).isVariable) match {
case (true, _) => Some((ast.children(0), ast))
case (_, true) => Some((ast.children(1), ast))
case _ => None
}
}
case _ => None
}
}
/** All top-level subformulas which must be true for the formula to be true */
def topLvlConjuncts(ast : AST) : Iterator[AST]= {
ast.symbol match {
case BoolConjunction | _ : NaryConjunction =>
for (c <- ast.children.iterator;
d <- topLvlConjuncts(c)) yield d
case _ => Iterator(ast)
}
}
// TODO (Aleks): What is the difference between this and
// retrieveCriticalAtoms?
/** All critical atoms in {@code ast}.
*/
def extractCriticalAtoms(ast : AST, decodedModel : Model) = {
val (definitionAtoms, conjuncts) =
topLvlConjuncts(ast).toList.partition(isDefinition(_))
var definitions =
for (a <- definitionAtoms; b <- getBoolDefinitions(a, true)) yield b
val critical = new ArrayBuffer[AST]
var todo = new Queue[AST]
todo ++= conjuncts
while (!todo.isEmpty) {
for (c <- todo) {
critical ++= Toolbox.retrieveCriticalAtoms(decodedModel)(c)
}
todo.clear()
val vars = (for (c <- critical.iterator;
v <- c.iterator.filter(_.isVariable)) yield
v.symbol).toSet
val (toBeAdded, toKeep) =
definitions.partition((p) => vars.contains(p._1.symbol))
todo ++= toBeAdded.map(_._2)
definitions = toKeep
}
(definitions, critical, conjuncts) // TODO: Inspect the correct returns
}
/** Reconstruct formula {@code ast} using {@code decodedModel}
*/
def reconstruct(ast : AST, decodedModel : Model) : Model = {
val candidateModel = new Model()
verbose("Starting fixpoint reconstruction")
val (definitions, critical, conjuncts) =
extractCriticalAtoms(ast, decodedModel)
verbose("Critical " + critical.mkString("\\n\\t"))
verbose("Definitons : " + definitions.mkString("\\n"))
//TODO: Remove duplicate definitions
//TODO: Cycle-breaking
val varsToCritical =
new HashMap[ConcreteFunctionSymbol, Set[AST]]
with MultiMap[ConcreteFunctionSymbol, AST]
(for (c <- critical.iterator; v <- c.iterator.filter(_.isVariable))
yield (v.symbol, c)).foldLeft(varsToCritical){
(acc, pair) => acc.addBinding(pair._1, pair._2)
}
//Fix-point computation
var done = false
var changed = false
var iteration = 0
val allVars =
varsToCritical.keys.toList.sortWith((x,y) =>
varsToCritical(x).size < varsToCritical(y).size)
// Boolean variables can just be copied over
for (v <- allVars if v.theory == BooleanTheory)
copyFromDecodedModelIfNotSet(decodedModel, candidateModel, AST(v))
var varDependency =
new HashMap[ConcreteFunctionSymbol, Set[ConcreteFunctionSymbol]]
with MultiMap[ConcreteFunctionSymbol, ConcreteFunctionSymbol]
for (c <- critical.toList if isDefinition(c)) {
val lhs = c.children(0)
if (lhs.isVariable && lhs.symbol.theory != BooleanTheory) {
for (v <- c.children(1).iterator.filter(_.isVariable))
varDependency.addBinding(lhs.symbol, v.symbol)
}
val rhs = c.children(0)
if (rhs.isVariable && rhs.symbol.theory != BooleanTheory) {
for (v <- c.children(0).iterator.filter(_.isVariable))
varDependency.addBinding(rhs.symbol, v.symbol)
}
}
val independentVars = Toolbox.topologicalSort(varDependency)
// TODO: This is theory specific...
// First migrate special values
verbose("Migrating special values")
for (v <- independentVars if v.sort.isInstanceOf[FPSort]) {
val value = decodedModel(AST(v))
value.symbol.asInstanceOf[IndexedFunctionSymbol].getFactory match {
case FPSpecialValuesFactory(_) => {
verbose("Migrating special value " + value)
candidateModel.set(AST(v), value)
}
case _ => ()
}
}
val vars =
independentVars.filterNot(candidateModel.variableValuation.contains(_))
verbose("Sorted variables :\\n\\t" + vars.mkString("\\n\\t"))
while (!done) {
iteration += 1
verbose("=============================\\nPatching iteration " + iteration)
val implications =
// TODO: reconsider the first condition
critical.filter {x => x.children.length > 0 &&
isDefinition(x) &&
numUndefValues(candidateModel, x) == 1 }
.sortBy(_.iterator.size)
verbose("Implications(" + implications.length + "):")
verbose(implications.map(_.simpleString()).mkString("\\n\\t"))
verbose("**************************************************")
changed = false
// TODO: (Aleks) Some implications might become fully set? x = y*y?
for (i <- implications if numUndefValues(candidateModel, i) == 1 ) {
val imp = getImplication(candidateModel, i)
imp match {
case Some((node, value)) => {
verbose("Inserting " + node.getSMT() + " -> " + value.getSMT())
candidateModel.set(node, value)
for ( crit <- varsToCritical(node.symbol)
if !candidateModel.contains(crit)
&& numUndefValues(candidateModel, crit) == 0) {
// We will set the values only of the literals that
// have not been evaluated yet and have no unknowns
// Consider cascading expressions, do we need to watch all of them
//evaluateNode(decodedModel, candidateModel, crit)
// TODO: When does this fail?
AST.postVisit(crit, candidateModel, candidateModel, evaluateNode)
if (crit.symbol.sort == BooleanSort &&
candidateModel(crit) != decodedModel(crit)) {
val str =
s"""Reconstruction fails for
${node.symbol} -> svalue
Implied by: ${i.simpleString()}
on literal
${crit.simpleString()}
DecodedModel ===================== ${decodedModel(crit)}
${decodedModel.variableAssignments(crit).mkString("\\n\\t")}
CandidateModel ===================== ${candidateModel(crit)}
${candidateModel.variableAssignments(crit).mkString("\\n\\t")}"""
println(str)
}
}
changed = true
}
case None => () // TODO: Model failed to be reconstructed
}
}
// order
if (!changed) {
verbose("No implications ... ")
val undefVars =
vars.filterNot(candidateModel.containsVariable(_)).toList
if (undefVars.isEmpty) {
verbose("No undefined variables \\n Done supporting critical atoms.")
// TODO: (Aleks) Do we actually care if the decoded model has extra
// values?
for ((variable, value) <- decodedModel.variableValuation
if (!candidateModel.contains(AST(variable)))) {
}
done = true
} else {
val chosen = undefVars.head // TODO: Call the method
val node = AST(chosen)
verbose("Copying from decoded model " + chosen + " -> " +
decodedModel(node).getSMT())
var attempts = 0
var done = false
var value = decodedModel(node)
var violated : List[AST] = List()
while (!done && attempts < 3) { // TODO: Tactic parameter
attempts += 1
candidateModel.overwrite(node, value)
//TODO: Construct one huge mega query to find this value?
violated = List()
for ( crit <- varsToCritical(node.symbol)
if !candidateModel.contains(crit)
&& numUndefValues(candidateModel, crit) == 0) {
// We will set the values only of the literals that have not
// been evaluated yet and have no unknowns Consider cascading
// expressions, do we need to watch all of them
//evaluateNode(decodedModel, candidateModel, crit)
AST.postVisit(crit,
candidateModel,
candidateModel,
evaluateNode)
if (crit.symbol.sort == BooleanSort &&
candidateModel(crit) != decodedModel(crit)) {
val str = """Migration violates:
${node.symbol} -> ${decodedModel(node)}
on literal
${crit.simpleString()}
DecodedModel
===================== ${decodedModel(crit)}
${decodedModel.variableAssignments(crit).mkString("\\n\\t")}
CandidateModel
===================== ${candidateModel(crit)}
${candidateModel.variableAssignments(crit).mkString("\\n\\t")}"""
println(str)
violated = crit :: violated
}
if(!violated.isEmpty) {
violatedConstraint(decodedModel,
node,
candidateModel,
violated) match {
case Some(v) => {
println("###")
value = v
}
// TODO: Flag conflict for analysis? Widen the interval?
case None => done = true
}
} else {
done = true
}
}
}
}
}
}
// TODO: Is this actually a partial model we are completing
verbose("Completing the model")
AST.postVisit(ast,
candidateModel,
decodedModel,
copyFromDecodedModelIfNotSet)
candidateModel
}
// TODO: (Aleks) Better name
// TODO (ptr): What does this do?
/** ??????? */
def violatedConstraint(decodedModel : Model,
node : AST,
candidateModel : Model,
violated : List[AST]) = {
val eps = uppsat.ast.Leaf(
FloatingPointTheory.getULP(
decodedModel(node).symbol.asInstanceOf[FloatingPointLiteral]))
val lessThan = node <= (decodedModel(node) + eps)
val greaterThan = node >= (decodedModel(node) - eps)
val newConjuncts = lessThan :: greaterThan :: violated
val combinedConstraint = AST(NaryConjunction(newConjuncts.length),
List(),
newConjuncts)
// TODO: Eclipse says that v != unknown will *almost* never happen.
//
// val vars = ast.iterator.toList.filter(_.isVariable)
//
// val assertions : List[(ConcreteFunctionSymbol, AST)] =
// for (v <- vars if( v != unknown &&
// candidateModel.contains(v))) yield {
// (v.symbol, candidateModel(v))
// }
val assertions = List()
ModelEvaluator.evalAST(combinedConstraint,
node.symbol, assertions, inputTheory)
}
// TODO: (Aleks) Should this only update the candidateModel if ast has
// children?
// TODO (ptr): What does this do?
/** ??????? */
def evaluateNode(decodedModel : Model,
candidateModel : Model,
ast : AST) : Model = {
ast match {
case AST(symbol, label, List()) => ()
case AST(symbol, label, children) if !candidateModel.contains(ast) => {
val newChildren = children.map{
getCurrentValue(_, decodedModel, candidateModel)
}
val newAST = AST(symbol, label, newChildren.toList)
val newValue = ModelEvaluator.evalAST(newAST, inputTheory)
candidateModel.set(ast, newValue)
}
case _ => {
throw new Exception("Value occurring twice in model...")
}
}
candidateModel
}
// TODO (ptr): What does this do?
/** ??????? */
def getCurrentValue(ast : AST,
decodedModel : Model,
candidateModel : Model) : AST = {
if (! candidateModel.contains(ast)) {
candidateModel.set(ast, decodedModel(ast))
}
candidateModel(ast)
}
// TODO (ptr): What does this do?
/** ??????? */
def copyFromDecodedModelIfNotSet (decodedModel : Model,
candidateModel : Model,
ast : AST) = {
if (! candidateModel.contains(ast)) {
candidateModel.set(ast, decodedModel(ast))
}
candidateModel
}
}
| uuverifiers/uppsat | src/main/scala/uppsat/approximation/components/FixpointReconstruction.scala | Scala | gpl-3.0 | 16,900 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
object Root
| wsargent/playframework | framework/src/sbt-plugin/src/sbt-test/play-sbt-plugin/multiproject/app/Root.scala | Scala | apache-2.0 | 89 |
/*
* Copyright © 2014 TU Berlin ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.emmalanguage
package lib.ml.clustering
import api._
import lib.linalg._
import lib.ml._
import test.util._
import scala.io.Source
class KMeansSpec extends lib.BaseLibSpec {
val delimiter = "\\t"
val runs = 2
val iterations = 2
val overlap = .75
val path = "/ml/clustering/kmeans"
val temp = tempPath(path)
override def tempPaths: Seq[String] = Seq(path)
override def resources = for {
file <- Seq("points.tsv", "clusters.tsv")
} yield () => {
materializeResource(s"$path/$file"): Unit
}
"kMeans" should "cluster points around the corners of a hypercube" in {
val exp = clusters(for {
line <- Source.fromFile(s"$temp/clusters.tsv").getLines().toSet[String]
} yield {
val Seq(pointID, clusterID) = line.split(delimiter).map(_.toLong).toSeq
LDPoint(pointID, dense(Array(0.0)), DPoint(clusterID, dense(Array(0.0))))
})
val act = clusters(for {
s <- run(exp.size, s"$temp/points.tsv")
} yield s)
val correctClusters = for {
act <- act
exp <- exp
if (act & exp).size / exp.size.toDouble >= overlap
} yield ()
correctClusters.size shouldBe exp.size
}
def clusters(solution: Set[kMeans.Solution[Long]]): Iterable[Set[Long]] =
solution.groupBy(_.label.id).mapValues(_.map(_.id).toSet).values
def run(k: Int, input: String): Set[kMeans.Solution[Long]] = {
// read the input
val points = for (line <- DataBag.readText(input)) yield {
val record = line.split("\\t")
DPoint(record.head.toLong, dense(record.tail.map(_.toDouble)))
}
// do the clustering
val result = kMeans(2, k, runs, iterations)(points)
// return the solution as a local set
result.collect().toSet[kMeans.Solution[Long]]
}
}
| emmalanguage/emma | emma-lib/src/test/scala/org/emmalanguage/lib/ml/clustering/KMeansSpec.scala | Scala | apache-2.0 | 2,381 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Mon Sep 7 15:05:06 EDT 2009
* @see LICENSE (MIT style license file).
*/
package scalation.util
import collection.mutable.ResizableArray
import math.random
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PQItem` trait should be mixed in for items going on a PQueue.
*/
trait PQItem extends Identifiable
{
/** The activation time for the item in the time-ordered priority queue
*/
var actTime: Double = 0.0
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compare two items (PQItems) based on their actTime.
* @param other the other item to compare with this item
*/
def compare (other: PQItem) = { actTime compare other.actTime }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Convert the item (PQItem) to a string.
*/
override def toString = "PQItem (" + me + ", " + actTime + ")"
} // PQItem
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PQueue` class provides a simple linear implementation for priority queues.
* Once bug in scala 2.8 if fixed, may wish to switch to logarithmic implementation
* in scala.collection.mutable.PriorityQueue.
*/
class PQueue [T <: PQItem]
extends ResizableArray [T] with Serializable
{
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add an item to the priority queue ordered by actTime.
* @param item the item to add
*/
def += (item: T)
{
var i = size0 - 1
size0 = i + 2
ensureSize (size0)
while (i >= 0 && item.actTime < this(i).actTime) {
this(i + 1) = this(i)
i -= 1
} // while
this(i + 1) = item
} // +=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Remove the specific item from the priority queue.
* @param item the item to remove
*/
def -= (item: T): Boolean =
{
var found = false
var i = 0
while (i < size0 && ! found) {
if (this(i).id == item.id) found = true
i += 1
} // while
if (found) {
// println ("item to remove found at = " + (i - 1) + ", queue size = " + size0)
while (i < size0) { this(i - 1) = this(i); i += 1 }
// this (size0 - 1) = null
size0 -= 1
} // if
found
} // -=
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Remove and return the first item (least actTime) from the priority queue.
*/
def dequeue (): T =
{
val item = this(0)
for (i <- 0 until size0 - 1) this(i) = this(i + 1)
// this (size0 - 1) = null
size0 -= 1
item
} // dequeue
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the size (number of contained items) of the priority queue.
*/
//def size = size0
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Indicate whether the priority queue is empty.
*/
override def isEmpty: Boolean = size0 == 0
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Show the contents of the priority queue.
*/
override def toString: String =
{
var s = "PQueue ( "
foreach (s += _ + " ")
s + ")"
} // toString
} // PQueue class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `PQueueTest` object is used to test the `PQueue` class.
*/
object PQueueTest extends App
{
class Element (time: Double) extends PQItem
{
{
actTime = time // must set activation time
} // primary constructor
} // Element class
val pq = new PQueue [Element]
for (i <- 1 to 10) pq += new Element (10.0 * random)
while ( ! pq.isEmpty) println (pq.dequeue ())
val e1 = new Element (10.0)
val e2 = new Element (30.0)
val e3 = new Element (20.0)
pq += e1
pq += e2
pq += e3
println ("pq = " + pq)
println ("found = " + (pq -= e3) + ", queue size = " + pq.size)
println ("pq = " + pq)
} // PQueueTest object
| mvnural/scalation | src/main/scala/scalation/util/PQueue.scala | Scala | mit | 4,479 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui.storage
import scala.collection.mutable
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.ui._
import org.apache.spark.scheduler._
import org.apache.spark.storage._
/** Web UI showing storage status of all RDD's in the given SparkContext. */
private[ui] class StorageTab(parent: SparkUI) extends SparkUITab(parent, "storage") {
val listener = parent.storageListener
attachPage(new StoragePage(this))
attachPage(new RDDPage(this))
}
/**
* :: DeveloperApi ::
* A SparkListener that prepares information to be displayed on the BlockManagerUI.
*
* This class is thread-safe (unlike JobProgressListener)
*/
@DeveloperApi
class StorageListener(storageStatusListener: StorageStatusListener) extends BlockStatusListener {
private[ui] val _rddInfoMap = mutable.Map[Int, RDDInfo]() // exposed for testing
def storageStatusList: Seq[StorageStatus] = storageStatusListener.storageStatusList
/** Filter RDD info to include only those with cached partitions */
def rddInfoList: Seq[RDDInfo] = synchronized {
_rddInfoMap.values.filter(_.numCachedPartitions > 0).toSeq
}
/** Update the storage info of the RDDs whose blocks are among the given updated blocks */
private def updateRDDInfo(updatedBlocks: Seq[(BlockId, BlockStatus)]): Unit = {
val rddIdsToUpdate = updatedBlocks.flatMap { case (bid, _) => bid.asRDDId.map(_.rddId) }.toSet
val rddInfosToUpdate = _rddInfoMap.values.toSeq.filter { s => rddIdsToUpdate.contains(s.id) }
StorageUtils.updateRddInfo(rddInfosToUpdate, storageStatusList)
}
/**
* Assumes the storage status list is fully up-to-date. This implies the corresponding
* StorageStatusSparkListener must process the SparkListenerTaskEnd event before this listener.
*/
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = synchronized {
val metrics = taskEnd.taskMetrics
if (metrics != null && metrics.updatedBlocks.isDefined) {
updateRDDInfo(metrics.updatedBlocks.get)
}
}
override def onStageSubmitted(stageSubmitted: SparkListenerStageSubmitted): Unit = synchronized {
val rddInfos = stageSubmitted.stageInfo.rddInfos
rddInfos.foreach { info => _rddInfoMap.getOrElseUpdate(info.id, info) }
}
override def onStageCompleted(stageCompleted: SparkListenerStageCompleted): Unit = synchronized {
// Remove all partitions that are no longer cached in current completed stage
val completedRddIds = stageCompleted.stageInfo.rddInfos.map(r => r.id).toSet
_rddInfoMap.retain { case (id, info) =>
!completedRddIds.contains(id) || info.numCachedPartitions > 0
}
}
override def onUnpersistRDD(unpersistRDD: SparkListenerUnpersistRDD): Unit = synchronized {
_rddInfoMap.remove(unpersistRDD.rddId)
}
}
| ArvinDevel/onlineAggregationOnSparkV2 | core/src/main/scala/org/apache/spark/ui/storage/StorageTab.scala | Scala | apache-2.0 | 3,585 |
/* __ *\
** ________ ___ / / ___ __ ____ Scala.js tools **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013-2014, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ **
** /____/\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\* */
package org.scalajs.core.tools.optimizer
import scala.language.implicitConversions
import scala.annotation.{switch, tailrec}
import scala.collection.mutable
import scala.util.control.{NonFatal, ControlThrowable, TailCalls}
import scala.util.control.TailCalls.{done => _, _} // done is a too generic term
import org.scalajs.core.ir._
import Definitions.{ObjectClass, isConstructorName, isReflProxyName}
import Trees._
import Types._
import org.scalajs.core.tools.sem.{CheckedBehavior, Semantics}
import org.scalajs.core.tools.javascript.{LongImpl, OutputMode}
import org.scalajs.core.tools.logging._
/** Optimizer core.
* Designed to be "mixed in" [[IncOptimizer#MethodImpl#Optimizer]].
* This is the core of the optimizer. It contains all the smart things the
* optimizer does. To perform inlining, it relies on abstract protected
* methods to identify the target of calls.
*/
private[optimizer] abstract class OptimizerCore(
semantics: Semantics, outputMode: OutputMode) {
import OptimizerCore._
type MethodID <: AbstractMethodID
val myself: MethodID
/** Returns the body of a method. */
protected def getMethodBody(method: MethodID): MethodDef
/** Returns the list of possible targets for a dynamically linked call. */
protected def dynamicCall(intfName: String,
methodName: String): List[MethodID]
/** Returns the target of a static call. */
protected def staticCall(className: String,
methodName: String): Option[MethodID]
/** Returns the target of a call to a static method. */
protected def callStatic(className: String,
methodName: String): Option[MethodID]
/** Returns the list of ancestors of a class or interface. */
protected def getAncestorsOf(encodedName: String): List[String]
/** Tests whether the given module class has an elidable accessor.
* In other words, whether it is safe to discard a LoadModule of that
* module class which is not used.
*/
protected def hasElidableModuleAccessor(moduleClassName: String): Boolean
/** Tests whether the given class is inlineable.
* @return None if the class is not inlineable, Some(value) if it is, where
* value is a RecordValue with the initial value of its fields.
*/
protected def tryNewInlineableClass(className: String): Option[RecordValue]
/** Used local names and whether they are mutable */
private val usedLocalNames = mutable.Map.empty[String, Boolean]
private val usedLabelNames = mutable.Set.empty[String]
/** A list of the States that have been allocated so far, and must be saved.
*
* This list only ever grows, even though, in theory, it will keep
* references to states that are not used anymore.
* This creates a "temporary memory leak", but the list is discarded when
* `optimize` terminates anyway because the whole OptimizerCore is discarded.
* It also means that RollbackException will save more state than strictly
* necessary, but it is not incorrect to do so.
*
* Manual "memory management" of this list has caused issues such as #1515
* and #1843 in the past. So now we just let it grow in a "region-allocated"
* style of memory management.
*/
private var statesInUse: List[State] = Nil
private var disableOptimisticOptimizations: Boolean = false
private var rollbacksCount: Int = 0
private val attemptedInlining = mutable.ListBuffer.empty[MethodID]
private var curTrampolineId = 0
def optimize(thisType: Type, originalDef: MethodDef): LinkedMember[MethodDef] = {
try {
val MethodDef(static, name, params, resultType, body) = originalDef
val (newParams, newBody) = try {
transformIsolatedBody(Some(myself), thisType, params, resultType, body)
} catch {
case _: TooManyRollbacksException =>
usedLocalNames.clear()
usedLabelNames.clear()
statesInUse = Nil
disableOptimisticOptimizations = true
transformIsolatedBody(Some(myself), thisType, params, resultType, body)
}
val m = MethodDef(static, name, newParams, resultType,
newBody)(originalDef.optimizerHints, None)(originalDef.pos)
val info = Infos.generateMethodInfo(m)
new LinkedMember(info, m, None)
} catch {
case NonFatal(cause) =>
throw new OptimizeException(myself, attemptedInlining.distinct.toList, cause)
case e: Throwable =>
// This is a fatal exception. Don't wrap, just output debug info error
Console.err.println(exceptionMsg(
myself, attemptedInlining.distinct.toList, e))
throw e
}
}
private def newSimpleState[A](initialValue: A): SimpleState[A] = {
val state = new SimpleState[A](initialValue)
statesInUse ::= state
state
}
private def freshLocalName(base: String, mutable: Boolean): String = {
val result = freshNameGeneric(usedLocalNames.contains, base)
usedLocalNames += result -> mutable
result
}
private def freshLabelName(base: String): String = {
val result = freshNameGeneric(usedLabelNames, base)
usedLabelNames += result
result
}
private val isReserved = isKeyword ++ Seq("arguments", "eval", "ScalaJS")
private def freshNameGeneric(nameUsed: String => Boolean,
base: String): String = {
if (!nameUsed(base) && !isReserved(base)) {
base
} else {
var i = 1
while (nameUsed(base + "$" + i))
i += 1
base + "$" + i
}
}
// Just a helper to make the callsites more understandable
private def localIsMutable(name: String): Boolean = usedLocalNames(name)
private def tryOrRollback(body: CancelFun => TailRec[Tree])(
fallbackFun: () => TailRec[Tree]): TailRec[Tree] = {
if (disableOptimisticOptimizations) {
fallbackFun()
} else {
val trampolineId = curTrampolineId
val savedUsedLocalNames = usedLocalNames.toMap
val savedUsedLabelNames = usedLabelNames.toSet
val stateBackups = statesInUse.map(_.makeBackup())
body { () =>
throw new RollbackException(trampolineId, savedUsedLocalNames,
savedUsedLabelNames, stateBackups, fallbackFun)
}
}
}
private def isSubclass(lhs: String, rhs: String): Boolean =
getAncestorsOf(lhs).contains(rhs)
private val isSubclassFun = isSubclass _
private def isSubtype(lhs: Type, rhs: Type): Boolean =
Types.isSubtype(lhs, rhs)(isSubclassFun)
/** Transforms a statement.
*
* For valid expression trees, it is always the case that
* {{{
* transformStat(tree)
* ===
* pretransformExpr(tree)(finishTransformStat)
* }}}
*/
private def transformStat(tree: Tree)(implicit scope: Scope): Tree =
transform(tree, isStat = true)
/** Transforms an expression.
*
* It is always the case that
* {{{
* transformExpr(tree)
* ===
* pretransformExpr(tree)(finishTransformExpr)
* }}}
*/
private def transformExpr(tree: Tree)(implicit scope: Scope): Tree =
transform(tree, isStat = false)
/** Transforms an expression or a JSSpread. */
private def transformExprOrSpread(tree: Tree)(implicit scope: Scope): Tree = {
tree match {
case JSSpread(items) =>
JSSpread(transformExpr(items))(tree.pos)
case _ =>
transformExpr(tree)
}
}
/** Transforms a tree. */
private def transform(tree: Tree, isStat: Boolean)(
implicit scope: Scope): Tree = {
@inline implicit def pos = tree.pos
val result = tree match {
// Definitions
case VarDef(_, _, _, rhs) =>
/* A local var that is last (or alone) in its block is not terribly
* useful. Get rid of it.
* (Non-last VarDefs in blocks are handled in transformBlock.)
*/
transformStat(rhs)
// Control flow constructs
case tree: Block =>
transformBlock(tree, isStat)
case Labeled(ident @ Ident(label, _), tpe, body) =>
trampoline {
returnable(label, if (isStat) NoType else tpe, body, isStat,
usePreTransform = false)(finishTransform(isStat))
}
case Assign(lhs, rhs) =>
val cont = { (preTransLhs: PreTransform) =>
resolveLocalDef(preTransLhs) match {
case PreTransRecordTree(lhsTree, lhsOrigType, lhsCancelFun) =>
val recordType = lhsTree.tpe.asInstanceOf[RecordType]
pretransformNoLocalDef(rhs) {
case PreTransRecordTree(rhsTree, rhsOrigType, rhsCancelFun) =>
if (rhsTree.tpe != recordType || rhsOrigType != lhsOrigType)
lhsCancelFun()
TailCalls.done(Assign(lhsTree, rhsTree))
case _ =>
lhsCancelFun()
}
case PreTransTree(lhsTree, _) =>
TailCalls.done(Assign(lhsTree, transformExpr(rhs)))
}
}
trampoline {
lhs match {
case lhs: Select =>
pretransformSelectCommon(lhs, isLhsOfAssign = true)(cont)
case _ =>
pretransformExpr(lhs)(cont)
}
}
case Return(expr, optLabel) =>
val optInfo = optLabel match {
case Some(Ident(label, _)) =>
Some(scope.env.labelInfos(label))
case None =>
scope.env.labelInfos.get("")
}
optInfo.fold[Tree] {
Return(transformExpr(expr), None)
} { info =>
val newOptLabel = Some(Ident(info.newName, None))
if (!info.acceptRecords) {
val newExpr = transformExpr(expr)
info.returnedTypes.value ::= (newExpr.tpe, RefinedType(newExpr.tpe))
Return(newExpr, newOptLabel)
} else trampoline {
pretransformNoLocalDef(expr) { texpr =>
texpr match {
case PreTransRecordTree(newExpr, origType, cancelFun) =>
info.returnedTypes.value ::= (newExpr.tpe, origType)
TailCalls.done(Return(newExpr, newOptLabel))
case PreTransTree(newExpr, tpe) =>
info.returnedTypes.value ::= (newExpr.tpe, tpe)
TailCalls.done(Return(newExpr, newOptLabel))
}
}
}
}
case If(cond, thenp, elsep) =>
val newCond = transformExpr(cond)
newCond match {
case BooleanLiteral(condValue) =>
if (condValue) transform(thenp, isStat)
else transform(elsep, isStat)
case _ =>
val newThenp = transform(thenp, isStat)
val newElsep = transform(elsep, isStat)
val refinedType =
constrainedLub(newThenp.tpe, newElsep.tpe, tree.tpe)
foldIf(newCond, newThenp, newElsep)(refinedType)
}
case While(cond, body, optLabel) =>
val newCond = transformExpr(cond)
newCond match {
case BooleanLiteral(false) => Skip()
case _ =>
optLabel match {
case None =>
While(newCond, transformStat(body), None)
case Some(labelIdent @ Ident(label, _)) =>
val newLabel = freshLabelName(label)
val info = new LabelInfo(newLabel, acceptRecords = false,
returnedTypes = newSimpleState(Nil))
While(newCond, {
val bodyScope = scope.withEnv(
scope.env.withLabelInfo(label, info))
transformStat(body)(bodyScope)
}, Some(Ident(newLabel, None)(labelIdent.pos)))
}
}
case DoWhile(body, cond, None) =>
val newBody = transformStat(body)
val newCond = transformExpr(cond)
newCond match {
case BooleanLiteral(false) => newBody
case _ => DoWhile(newBody, newCond, None)
}
case Try(block, errVar, EmptyTree, finalizer) =>
val newBlock = transform(block, isStat)
val newFinalizer = transformStat(finalizer)
Try(newBlock, errVar, EmptyTree, newFinalizer)(newBlock.tpe)
case Try(block, errVar @ Ident(name, originalName), handler, finalizer) =>
val newBlock = transform(block, isStat)
val newName = freshLocalName(name, false)
val newOriginalName = originalName.orElse(Some(name))
val localDef = LocalDef(RefinedType(AnyType), true,
ReplaceWithVarRef(newName, newOriginalName, newSimpleState(true), None))
val newHandler = {
val handlerScope = scope.withEnv(scope.env.withLocalDef(name, localDef))
transform(handler, isStat)(handlerScope)
}
val newFinalizer = transformStat(finalizer)
val refinedType = constrainedLub(newBlock.tpe, newHandler.tpe, tree.tpe)
Try(newBlock, Ident(newName, newOriginalName)(errVar.pos),
newHandler, newFinalizer)(refinedType)
case Throw(expr) =>
Throw(transformExpr(expr))
case Continue(optLabel) =>
val newOptLabel = optLabel map { label =>
Ident(scope.env.labelInfos(label.name).newName, None)(label.pos)
}
Continue(newOptLabel)
case Match(selector, cases, default) =>
val newSelector = transformExpr(selector)
newSelector match {
case newSelector: Literal =>
val body = cases collectFirst {
case (alts, body) if alts.exists(literal_===(_, newSelector)) => body
} getOrElse default
transform(body, isStat)
case _ =>
Match(newSelector,
cases map (c => (c._1, transform(c._2, isStat))),
transform(default, isStat))(tree.tpe)
}
// Scala expressions
case New(cls, ctor, args) =>
New(cls, ctor, args map transformExpr)
case StoreModule(cls, value) =>
StoreModule(cls, transformExpr(value))
case tree: Select =>
trampoline {
pretransformSelectCommon(tree, isLhsOfAssign = false)(
finishTransform(isStat = false))
}
case tree: Apply =>
trampoline {
pretransformApply(tree, isStat, usePreTransform = false)(
finishTransform(isStat))
}
case tree: ApplyStatically =>
trampoline {
pretransformStaticApply(tree, isStat, usePreTransform = false)(
finishTransform(isStat))
}
case tree: ApplyStatic =>
trampoline {
pretransformApplyStatic(tree, isStat, usePreTransform = false)(
finishTransform(isStat))
}
case tree @ UnaryOp(_, arg) =>
if (isStat) transformStat(arg)
else transformUnaryOp(tree)
case tree @ BinaryOp(op, lhs, rhs) =>
if (isStat) Block(transformStat(lhs), transformStat(rhs))
else transformBinaryOp(tree)
case NewArray(tpe, lengths) =>
NewArray(tpe, lengths map transformExpr)
case ArrayValue(tpe, elems) =>
ArrayValue(tpe, elems map transformExpr)
case ArrayLength(array) =>
ArrayLength(transformExpr(array))
case ArraySelect(array, index) =>
ArraySelect(transformExpr(array), transformExpr(index))(tree.tpe)
case RecordValue(tpe, elems) =>
RecordValue(tpe, elems map transformExpr)
case IsInstanceOf(expr, ClassType(ObjectClass)) =>
transformExpr(BinaryOp(BinaryOp.!==, expr, Null()))
case IsInstanceOf(expr, tpe) =>
trampoline {
pretransformExpr(expr) { texpr =>
val result = {
if (isSubtype(texpr.tpe.base, tpe)) {
if (texpr.tpe.isNullable)
BinaryOp(BinaryOp.!==, finishTransformExpr(texpr), Null())
else
Block(finishTransformStat(texpr), BooleanLiteral(true))
} else {
if (texpr.tpe.isExact)
Block(finishTransformStat(texpr), BooleanLiteral(false))
else
IsInstanceOf(finishTransformExpr(texpr), tpe)
}
}
TailCalls.done(result)
}
}
case AsInstanceOf(expr, ClassType(ObjectClass)) =>
transformExpr(expr)
case AsInstanceOf(expr, cls) =>
trampoline {
pretransformExpr(tree)(finishTransform(isStat))
}
case Unbox(arg, charCode) =>
trampoline {
pretransformExpr(arg) { targ =>
foldUnbox(targ, charCode)(finishTransform(isStat))
}
}
case GetClass(expr) =>
trampoline {
pretransformExpr(expr) { texpr =>
texpr.tpe match {
case RefinedType(base: ReferenceType, true, false) =>
TailCalls.done(Block(
finishTransformStat(texpr),
ClassOf(base)))
case _ =>
TailCalls.done(GetClass(finishTransformExpr(texpr)))
}
}
}
// JavaScript expressions
case JSNew(ctor, args) =>
JSNew(transformExpr(ctor), args map transformExprOrSpread)
case JSDotSelect(qualifier, item) =>
JSDotSelect(transformExpr(qualifier), item)
case JSBracketSelect(qualifier, item) =>
foldJSBracketSelect(transformExpr(qualifier), transformExpr(item))
case tree: JSFunctionApply =>
trampoline {
pretransformJSFunctionApply(tree, isStat, usePreTransform = false)(
finishTransform(isStat))
}
case JSDotMethodApply(receiver, method, args) =>
JSDotMethodApply(transformExpr(receiver), method,
args map transformExprOrSpread)
case JSBracketMethodApply(receiver, method, args) =>
JSBracketMethodApply(transformExpr(receiver), transformExpr(method),
args map transformExprOrSpread)
case JSSuperBracketSelect(cls, qualifier, item) =>
JSSuperBracketSelect(cls, transformExpr(qualifier), transformExpr(item))
case JSSuperBracketCall(cls, receiver, method, args) =>
JSSuperBracketCall(cls, transformExpr(receiver), transformExpr(method),
args map transformExprOrSpread)
case JSSuperConstructorCall(args) =>
JSSuperConstructorCall(args map transformExprOrSpread)
case JSDelete(JSDotSelect(obj, prop)) =>
JSDelete(JSDotSelect(transformExpr(obj), prop))
case JSDelete(JSBracketSelect(obj, prop)) =>
JSDelete(JSBracketSelect(transformExpr(obj), transformExpr(prop)))
case JSUnaryOp(op, lhs) =>
JSUnaryOp(op, transformExpr(lhs))
case JSBinaryOp(op, lhs, rhs) =>
JSBinaryOp(op, transformExpr(lhs), transformExpr(rhs))
case JSArrayConstr(items) =>
JSArrayConstr(items map transformExprOrSpread)
case JSObjectConstr(fields) =>
JSObjectConstr(fields map {
case (name, value) => (name, transformExpr(value))
})
// Atomic expressions
case _:VarRef | _:This =>
trampoline {
pretransformExpr(tree)(finishTransform(isStat))
}
case Closure(captureParams, params, body, captureValues) =>
transformClosureCommon(captureParams, params, body,
captureValues.map(transformExpr))
// Trees that need not be transformed
case _:Skip | _:Debugger | _:LoadModule | _:LoadJSConstructor |
_:LoadJSModule | _:JSEnvInfo | _:JSLinkingInfo | _:Literal |
EmptyTree =>
tree
case _ =>
sys.error(s"Invalid tree in transform of class ${tree.getClass.getName}: $tree")
}
if (isStat) keepOnlySideEffects(result)
else result
}
private def transformClosureCommon(captureParams: List[ParamDef],
params: List[ParamDef], body: Tree, newCaptureValues: List[Tree])(
implicit pos: Position): Closure = {
val (allNewParams, newBody) =
transformIsolatedBody(None, AnyType, captureParams ++ params, AnyType, body)
val (newCaptureParams, newParams) =
allNewParams.splitAt(captureParams.size)
Closure(newCaptureParams, newParams, newBody, newCaptureValues)
}
private def transformBlock(tree: Block, isStat: Boolean)(
implicit scope: Scope): Tree = {
def transformList(stats: List[Tree])(
implicit scope: Scope): Tree = stats match {
case last :: Nil =>
transform(last, isStat)
case (VarDef(Ident(name, originalName), vtpe, mutable, rhs)) :: rest =>
trampoline {
pretransformExpr(rhs) { trhs =>
withBinding(Binding(name, originalName, vtpe, mutable, trhs)) {
(restScope, cont1) =>
val newRest = transformList(rest)(restScope)
cont1(PreTransTree(newRest, RefinedType(newRest.tpe)))
} (finishTransform(isStat))
}
}
case stat :: rest =>
val transformedStat = transformStat(stat)
if (transformedStat.tpe == NothingType) transformedStat
else Block(transformedStat, transformList(rest))(stat.pos)
case Nil => // silence the exhaustivity warning in a sensible way
Skip()(tree.pos)
}
transformList(tree.stats)(scope)
}
/** Pretransforms a list of trees as a list of [[PreTransform]]s.
* This is a convenience method to use pretransformExpr on a list.
*/
private def pretransformExprs(trees: List[Tree])(
cont: List[PreTransform] => TailRec[Tree])(
implicit scope: Scope): TailRec[Tree] = {
trees match {
case first :: rest =>
pretransformExpr(first) { tfirst =>
pretransformExprs(rest) { trest =>
cont(tfirst :: trest)
}
}
case Nil =>
cont(Nil)
}
}
/** Pretransforms two trees as a pair of [[PreTransform]]s.
* This is a convenience method to use pretransformExpr on two trees.
*/
private def pretransformExprs(tree1: Tree, tree2: Tree)(
cont: (PreTransform, PreTransform) => TailRec[Tree])(
implicit scope: Scope): TailRec[Tree] = {
pretransformExpr(tree1) { ttree1 =>
pretransformExpr(tree2) { ttree2 =>
cont(ttree1, ttree2)
}
}
}
/** Pretransforms a tree and a list of trees as [[PreTransform]]s.
* This is a convenience method to use pretransformExpr.
*/
private def pretransformExprs(first: Tree, rest: List[Tree])(
cont: (PreTransform, List[PreTransform]) => TailRec[Tree])(
implicit scope: Scope): TailRec[Tree] = {
pretransformExpr(first) { tfirst =>
pretransformExprs(rest) { trest =>
cont(tfirst, trest)
}
}
}
/** Pretransforms a tree to get a refined type while avoiding to force
* things we might be able to optimize by folding and aliasing.
*/
private def pretransformExpr(tree: Tree)(cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = tailcall {
@inline implicit def pos = tree.pos
tree match {
case tree: Block =>
pretransformBlock(tree)(cont)
case VarRef(Ident(name, _)) =>
val localDef = scope.env.localDefs.getOrElse(name,
sys.error(s"Cannot find local def '$name' at $pos\n" +
s"While optimizing $myself\n" +
s"Env is ${scope.env}\nInlining ${scope.implsBeingInlined}"))
cont(PreTransLocalDef(localDef))
case This() =>
val localDef = scope.env.localDefs.getOrElse("this",
sys.error(s"Found invalid 'this' at $pos\n" +
s"While optimizing $myself\n" +
s"Env is ${scope.env}\nInlining ${scope.implsBeingInlined}"))
cont(PreTransLocalDef(localDef))
case If(cond, thenp, elsep) =>
val newCond = transformExpr(cond)
newCond match {
case BooleanLiteral(condValue) =>
if (condValue) pretransformExpr(thenp)(cont)
else pretransformExpr(elsep)(cont)
case _ =>
tryOrRollback { cancelFun =>
pretransformNoLocalDef(thenp) { tthenp =>
pretransformNoLocalDef(elsep) { telsep =>
(tthenp, telsep) match {
case (PreTransRecordTree(thenTree, thenOrigType, thenCancelFun),
PreTransRecordTree(elseTree, elseOrigType, elseCancelFun)) =>
val commonType =
if (thenTree.tpe == elseTree.tpe &&
thenOrigType == elseOrigType) thenTree.tpe
else cancelFun()
val refinedOrigType =
constrainedLub(thenOrigType, elseOrigType, tree.tpe)
cont(PreTransRecordTree(
If(newCond, thenTree, elseTree)(commonType),
refinedOrigType,
cancelFun))
case (PreTransRecordTree(thenTree, thenOrigType, thenCancelFun), _)
if telsep.tpe.isNothingType =>
cont(PreTransRecordTree(
If(newCond, thenTree, finishTransformExpr(telsep))(thenTree.tpe),
thenOrigType,
thenCancelFun))
case (_, PreTransRecordTree(elseTree, elseOrigType, elseCancelFun))
if tthenp.tpe.isNothingType =>
cont(PreTransRecordTree(
If(newCond, finishTransformExpr(tthenp), elseTree)(elseTree.tpe),
elseOrigType,
elseCancelFun))
case _ =>
val newThenp = finishTransformExpr(tthenp)
val newElsep = finishTransformExpr(telsep)
val refinedType =
constrainedLub(newThenp.tpe, newElsep.tpe, tree.tpe)
cont(PreTransTree(
foldIf(newCond, newThenp, newElsep)(refinedType)))
}
}
}
} { () =>
val newThenp = transformExpr(thenp)
val newElsep = transformExpr(elsep)
val refinedType =
constrainedLub(newThenp.tpe, newElsep.tpe, tree.tpe)
cont(PreTransTree(
foldIf(newCond, newThenp, newElsep)(refinedType)))
}
}
case Match(selector, cases, default) =>
val newSelector = transformExpr(selector)
newSelector match {
case newSelector: Literal =>
val body = cases collectFirst {
case (alts, body) if alts.exists(literal_===(_, newSelector)) => body
} getOrElse default
pretransformExpr(body)(cont)
case _ =>
cont(PreTransTree(Match(newSelector,
cases map (c => (c._1, transformExpr(c._2))),
transformExpr(default))(tree.tpe)))
}
case Labeled(ident @ Ident(label, _), tpe, body) =>
returnable(label, tpe, body, isStat = false, usePreTransform = true)(cont)
case New(cls, ctor, args) =>
pretransformExprs(args) { targs =>
pretransformNew(tree, cls, ctor, targs)(cont)
}
case tree: Select =>
pretransformSelectCommon(tree, isLhsOfAssign = false)(cont)
case tree: Apply =>
pretransformApply(tree, isStat = false,
usePreTransform = true)(cont)
case tree: ApplyStatically =>
pretransformStaticApply(tree, isStat = false,
usePreTransform = true)(cont)
case tree: ApplyStatic =>
pretransformApplyStatic(tree, isStat = false,
usePreTransform = true)(cont)
case tree: JSFunctionApply =>
pretransformJSFunctionApply(tree, isStat = false,
usePreTransform = true)(cont)
case AsInstanceOf(expr, tpe) =>
pretransformExpr(expr) { texpr =>
tpe match {
case ClassType(ObjectClass) =>
cont(texpr)
case _ =>
if (isSubtype(texpr.tpe.base, tpe)) {
cont(texpr)
} else {
cont(PreTransTree(
AsInstanceOf(finishTransformExpr(texpr), tpe)))
}
}
}
case Closure(captureParams, params, body, captureValues) =>
pretransformExprs(captureValues) { tcaptureValues =>
tryOrRollback { cancelFun =>
val captureBindings = for {
(ParamDef(Ident(name, origName), tpe, mutable, rest), value) <-
captureParams zip tcaptureValues
} yield {
assert(!rest, s"Found a rest capture parameter at $pos")
Binding(name, origName, tpe, mutable, value)
}
withNewLocalDefs(captureBindings) { (captureLocalDefs, cont1) =>
val replacement = TentativeClosureReplacement(
captureParams, params, body, captureLocalDefs,
alreadyUsed = newSimpleState(false), cancelFun)
val localDef = LocalDef(
RefinedType(AnyType, isExact = false, isNullable = false),
mutable = false,
replacement)
cont1(PreTransLocalDef(localDef))
} (cont)
} { () =>
val newClosure = transformClosureCommon(captureParams, params, body,
tcaptureValues.map(finishTransformExpr))
cont(PreTransTree(
newClosure,
RefinedType(AnyType, isExact = false, isNullable = false)))
}
}
case _ =>
val result = transformExpr(tree)
cont(PreTransTree(result))
}
}
private def pretransformBlock(tree: Block)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
def pretransformList(stats: List[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = stats match {
case last :: Nil =>
pretransformExpr(last)(cont)
case (VarDef(Ident(name, originalName), vtpe, mutable, rhs)) :: rest =>
pretransformExpr(rhs) { trhs =>
withBinding(Binding(name, originalName, vtpe, mutable, trhs)) {
(restScope, cont1) =>
pretransformList(rest)(cont1)(restScope)
} (cont)
}
case stat :: rest =>
implicit val pos = tree.pos
val transformedStat = transformStat(stat)
transformedStat match {
case Skip() =>
pretransformList(rest)(cont)
case _ =>
if (transformedStat.tpe == NothingType)
cont(PreTransTree(transformedStat, RefinedType.Nothing))
else {
pretransformList(rest) { trest =>
cont(PreTransBlock(transformedStat :: Nil, trest))
}
}
}
case Nil => // silence the exhaustivity warning in a sensible way
TailCalls.done(Skip()(tree.pos))
}
pretransformList(tree.stats)(cont)(scope)
}
private def pretransformSelectCommon(tree: Select, isLhsOfAssign: Boolean)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
val Select(qualifier, item) = tree
pretransformExpr(qualifier) { preTransQual =>
pretransformSelectCommon(tree.tpe, preTransQual, item,
isLhsOfAssign)(cont)(scope, tree.pos)
}
}
private def pretransformSelectCommon(expectedType: Type,
preTransQual: PreTransform, item: Ident, isLhsOfAssign: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
preTransQual match {
case PreTransLocalDef(LocalDef(_, _,
InlineClassBeingConstructedReplacement(fieldLocalDefs, cancelFun))) =>
val fieldLocalDef = fieldLocalDefs(item.name)
if (!isLhsOfAssign || fieldLocalDef.mutable) {
cont(PreTransLocalDef(fieldLocalDef))
} else {
/* This is an assignment to an immutable field of a inlineable class
* being constructed, but that does not appear at the "top-level" of
* one of its constructors. We cannot handle those, so we cancel.
* (Assignments at the top-level are normal initializations of these
* fields, and are transformed as vals in inlineClassConstructor.)
*/
cancelFun()
}
case PreTransLocalDef(LocalDef(_, _,
InlineClassInstanceReplacement(_, fieldLocalDefs, cancelFun))) =>
val fieldLocalDef = fieldLocalDefs(item.name)
if (!isLhsOfAssign || fieldLocalDef.mutable) {
cont(PreTransLocalDef(fieldLocalDef))
} else {
/* In an ideal world, this should not happen (assigning to an
* immutable field of an already constructed object). However, since
* we cannot IR-check that this does not happen (see #1021), this is
* effectively allowed by the IR spec. We are therefore not allowed
* to crash. We cancel instead. This will become an actual field
* (rather than an optimized local val) which is not considered pure
* (for that same reason).
*/
cancelFun()
}
case _ =>
resolveLocalDef(preTransQual) match {
case PreTransRecordTree(newQual, origType, cancelFun) =>
val recordType = newQual.tpe.asInstanceOf[RecordType]
val field = recordType.findField(item.name)
val sel = Select(newQual, item)(field.tpe)
sel.tpe match {
case _: RecordType =>
cont(PreTransRecordTree(sel, RefinedType(expectedType), cancelFun))
case _ =>
cont(PreTransTree(sel, RefinedType(sel.tpe)))
}
case PreTransTree(newQual, _) =>
cont(PreTransTree(Select(newQual, item)(expectedType),
RefinedType(expectedType)))
}
}
}
private def pretransformNew(tree: Tree, cls: ClassType, ctor: Ident,
targs: List[PreTransform])(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
tryNewInlineableClass(cls.className) match {
case Some(initialValue) =>
tryOrRollback { cancelFun =>
inlineClassConstructor(
new AllocationSite(tree),
cls, initialValue, ctor, targs, cancelFun)(cont)
} { () =>
cont(PreTransTree(
New(cls, ctor, targs.map(finishTransformExpr)),
RefinedType(cls, isExact = true, isNullable = false)))
}
case None =>
cont(PreTransTree(
New(cls, ctor, targs.map(finishTransformExpr)),
RefinedType(cls, isExact = true, isNullable = false)))
}
}
/** Resolves any LocalDef in a [[PreTransform]]. */
private def resolveLocalDef(preTrans: PreTransform): PreTransGenTree = {
implicit val pos = preTrans.pos
preTrans match {
case PreTransBlock(stats, result) =>
resolveLocalDef(result) match {
case PreTransRecordTree(tree, tpe, cancelFun) =>
PreTransRecordTree(Block(stats :+ tree), tpe, cancelFun)
case PreTransTree(tree, tpe) =>
PreTransTree(Block(stats :+ tree), tpe)
}
case PreTransLocalDef(localDef @ LocalDef(tpe, _, replacement)) =>
replacement match {
case ReplaceWithRecordVarRef(name, originalName,
recordType, used, cancelFun) =>
used.value = true
PreTransRecordTree(
VarRef(Ident(name, originalName))(recordType), tpe, cancelFun)
case InlineClassInstanceReplacement(recordType, fieldLocalDefs, cancelFun) =>
if (!isImmutableType(recordType))
cancelFun()
PreTransRecordTree(
RecordValue(recordType, recordType.fields.map(
f => fieldLocalDefs(f.name).newReplacement)),
tpe, cancelFun)
case _ =>
PreTransTree(localDef.newReplacement, localDef.tpe)
}
case preTrans: PreTransGenTree =>
preTrans
}
}
/** Combines pretransformExpr and resolveLocalDef in one convenience method. */
private def pretransformNoLocalDef(tree: Tree)(
cont: PreTransGenTree => TailRec[Tree])(
implicit scope: Scope): TailRec[Tree] = {
pretransformExpr(tree) { ttree =>
cont(resolveLocalDef(ttree))
}
}
/** Finishes a pretransform, either a statement or an expression. */
private def finishTransform(isStat: Boolean): PreTransCont = { preTrans =>
TailCalls.done {
if (isStat) finishTransformStat(preTrans)
else finishTransformExpr(preTrans)
}
}
/** Finishes an expression pretransform to get a normal [[Tree]].
* This method (together with finishTransformStat) must not be called more
* than once per pretransform and per translation.
* By "per translation", we mean in an alternative path through
* `tryOrRollback`. It could still be called several times as long as
* it is once in the 'try' part and once in the 'fallback' part.
*/
private def finishTransformExpr(preTrans: PreTransform): Tree = {
implicit val pos = preTrans.pos
preTrans match {
case PreTransBlock(stats, result) =>
Block(stats :+ finishTransformExpr(result))
case PreTransLocalDef(localDef) =>
localDef.newReplacement
case PreTransRecordTree(_, _, cancelFun) =>
cancelFun()
case PreTransTree(tree, _) =>
tree
}
}
/** Finishes a statement pretransform to get a normal [[Tree]].
* This method (together with finishTransformExpr) must not be called more
* than once per pretransform and per translation.
* By "per translation", we mean in an alternative path through
* `tryOrRollback`. It could still be called several times as long as
* it is once in the 'try' part and once in the 'fallback' part.
*/
private def finishTransformStat(stat: PreTransform): Tree = stat match {
case PreTransBlock(stats, result) =>
Block(stats :+ finishTransformStat(result))(stat.pos)
case PreTransLocalDef(_) =>
Skip()(stat.pos)
case PreTransRecordTree(tree, _, _) =>
keepOnlySideEffects(tree)
case PreTransTree(tree, _) =>
keepOnlySideEffects(tree)
}
/** Keeps only the side effects of a Tree (overapproximation). */
private def keepOnlySideEffects(stat: Tree): Tree = stat match {
case _:VarRef | _:This | _:Literal =>
Skip()(stat.pos)
case Block(init :+ last) =>
Block(init :+ keepOnlySideEffects(last))(stat.pos)
case LoadModule(ClassType(moduleClassName)) =>
if (hasElidableModuleAccessor(moduleClassName)) Skip()(stat.pos)
else stat
case NewArray(_, lengths) =>
Block(lengths.map(keepOnlySideEffects))(stat.pos)
case Select(qualifier, _) =>
keepOnlySideEffects(qualifier)
case Closure(_, _, _, captureValues) =>
Block(captureValues.map(keepOnlySideEffects))(stat.pos)
case UnaryOp(_, arg) =>
keepOnlySideEffects(arg)
case If(cond, thenp, elsep) =>
(keepOnlySideEffects(thenp), keepOnlySideEffects(elsep)) match {
case (Skip(), Skip()) => keepOnlySideEffects(cond)
case (newThenp, newElsep) => If(cond, newThenp, newElsep)(NoType)(stat.pos)
}
case BinaryOp(_, lhs, rhs) =>
Block(keepOnlySideEffects(lhs), keepOnlySideEffects(rhs))(stat.pos)
case RecordValue(_, elems) =>
Block(elems.map(keepOnlySideEffects))(stat.pos)
case _ =>
stat
}
private def pretransformApply(tree: Apply, isStat: Boolean,
usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
val Apply(receiver, methodIdent @ Ident(methodName, _), args) = tree
implicit val pos = tree.pos
pretransformExprs(receiver, args) { (treceiver, targs) =>
def treeNotInlined =
cont(PreTransTree(Apply(finishTransformExpr(treceiver), methodIdent,
targs.map(finishTransformExpr))(tree.tpe), RefinedType(tree.tpe)))
treceiver.tpe.base match {
case NothingType =>
cont(treceiver)
case NullType =>
cont(PreTransTree(Block(
finishTransformStat(treceiver),
CallHelper("throwNullPointerException")(NothingType))))
case _ =>
if (isReflProxyName(methodName)) {
// Never inline reflective proxies
treeNotInlined
} else {
val cls = boxedClassForType(treceiver.tpe.base)
val impls =
if (treceiver.tpe.isExact) staticCall(cls, methodName).toList
else dynamicCall(cls, methodName)
val allocationSites =
(treceiver :: targs).map(_.tpe.allocationSite)
if (impls.isEmpty || impls.exists(impl =>
scope.implsBeingInlined((allocationSites, impl)))) {
// isEmpty could happen, have to leave it as is for the TypeError
treeNotInlined
} else if (impls.size == 1) {
val target = impls.head
val intrinsicCode = getIntrinsicCode(target)
if (intrinsicCode >= 0) {
callIntrinsic(intrinsicCode, Some(treceiver), targs,
isStat, usePreTransform)(cont)
} else if (target.inlineable && (
target.shouldInline ||
shouldInlineBecauseOfArgs(target, treceiver :: targs))) {
inline(allocationSites, Some(treceiver), targs, target,
isStat, usePreTransform)(cont)
} else {
treeNotInlined
}
} else {
if (impls.forall(_.isForwarder)) {
val reference = impls.head
val areAllTheSame = getMethodBody(reference).body match {
// Trait impl forwarder
case ApplyStatic(ClassType(staticCls), Ident(methodName, _), _) =>
impls.tail.forall(getMethodBody(_).body match {
case ApplyStatic(ClassType(`staticCls`),
Ident(`methodName`, _), _) => true
case _ => false
})
// Bridge method
case MaybeBox(Apply(This(), Ident(methodName, _), referenceArgs), boxID) =>
impls.tail.forall(getMethodBody(_).body match {
case MaybeBox(Apply(This(), Ident(`methodName`, _), implArgs), `boxID`) =>
referenceArgs.zip(implArgs) forall {
case (MaybeUnbox(_, unboxID1), MaybeUnbox(_, unboxID2)) =>
unboxID1 == unboxID2
}
case _ => false
})
case body =>
throw new AssertionError("Invalid forwarder shape: " + body)
}
if (!areAllTheSame) {
// Not all doing the same thing
treeNotInlined
} else {
inline(allocationSites, Some(treceiver), targs, reference,
isStat, usePreTransform)(cont)
}
} else {
// TODO? Inline multiple non-trait-impl-forwarder with the exact same body?
treeNotInlined
}
}
}
}
}
}
private def boxedClassForType(tpe: Type): String = (tpe: @unchecked) match {
case ClassType(cls) => cls
case AnyType => Definitions.ObjectClass
case UndefType => Definitions.BoxedUnitClass
case BooleanType => Definitions.BoxedBooleanClass
case IntType => Definitions.BoxedIntegerClass
case LongType => Definitions.BoxedLongClass
case FloatType => Definitions.BoxedFloatClass
case DoubleType => Definitions.BoxedDoubleClass
case StringType => Definitions.StringClass
case ArrayType(_, _) => Definitions.ObjectClass
}
private def pretransformStaticApply(tree: ApplyStatically, isStat: Boolean,
usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
val ApplyStatically(receiver, clsType @ ClassType(cls),
methodIdent @ Ident(methodName, _), args) = tree
implicit val pos = tree.pos
def treeNotInlined0(transformedReceiver: Tree, transformedArgs: List[Tree]) =
cont(PreTransTree(ApplyStatically(transformedReceiver, clsType,
methodIdent, transformedArgs)(tree.tpe), RefinedType(tree.tpe)))
def treeNotInlined =
treeNotInlined0(transformExpr(receiver), args.map(transformExpr))
if (isReflProxyName(methodName)) {
// Never inline reflective proxies
treeNotInlined
} else {
val optTarget = staticCall(cls, methodName)
if (optTarget.isEmpty) {
// just in case
treeNotInlined
} else {
val target = optTarget.get
pretransformExprs(receiver, args) { (treceiver, targs) =>
val intrinsicCode = getIntrinsicCode(target)
if (intrinsicCode >= 0) {
callIntrinsic(intrinsicCode, Some(treceiver), targs,
isStat, usePreTransform)(cont)
} else {
val shouldInline = target.inlineable && (
target.shouldInline ||
shouldInlineBecauseOfArgs(target, treceiver :: targs))
val allocationSites =
(treceiver :: targs).map(_.tpe.allocationSite)
val beingInlined =
scope.implsBeingInlined((allocationSites, target))
if (shouldInline && !beingInlined) {
inline(allocationSites, Some(treceiver), targs, target,
isStat, usePreTransform)(cont)
} else {
treeNotInlined0(finishTransformExpr(treceiver),
targs.map(finishTransformExpr))
}
}
}
}
}
}
private def pretransformApplyStatic(tree: ApplyStatic, isStat: Boolean,
usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
val ApplyStatic(classType @ ClassType(cls),
methodIdent @ Ident(methodName, _), args) = tree
implicit val pos = tree.pos
def treeNotInlined0(transformedArgs: List[Tree]) =
cont(PreTransTree(ApplyStatic(classType, methodIdent,
transformedArgs)(tree.tpe), RefinedType(tree.tpe)))
def treeNotInlined = treeNotInlined0(args.map(transformExpr))
val optTarget = callStatic(cls, methodName)
if (optTarget.isEmpty) {
// just in case
treeNotInlined
} else {
val target = optTarget.get
pretransformExprs(args) { targs =>
val intrinsicCode = getIntrinsicCode(target)
if (intrinsicCode >= 0) {
callIntrinsic(intrinsicCode, None, targs,
isStat, usePreTransform)(cont)
} else {
val shouldInline = target.inlineable && (
target.shouldInline || shouldInlineBecauseOfArgs(target, targs))
val allocationSites = targs.map(_.tpe.allocationSite)
val beingInlined =
scope.implsBeingInlined((allocationSites, target))
if (shouldInline && !beingInlined) {
inline(allocationSites, None, targs, target,
isStat, usePreTransform)(cont)
} else {
treeNotInlined0(targs.map(finishTransformExpr))
}
}
}
}
}
private def pretransformJSFunctionApply(tree: JSFunctionApply,
isStat: Boolean, usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
val JSFunctionApply(fun, args) = tree
implicit val pos = tree.pos
if (args.exists(_.isInstanceOf[JSSpread])) {
cont(PreTransTree(
JSFunctionApply(transformExpr(fun), args.map(transformExprOrSpread))))
} else {
pretransformExpr(fun) { tfun =>
tfun match {
case PreTransLocalDef(LocalDef(_, false,
closure @ TentativeClosureReplacement(
captureParams, params, body, captureLocalDefs,
alreadyUsed, cancelFun))) if !alreadyUsed.value =>
alreadyUsed.value = true
pretransformExprs(args) { targs =>
inlineBody(
Some(PreTransTree(Undefined())), // `this` is `undefined`
captureParams ++ params, AnyType, body,
captureLocalDefs.map(PreTransLocalDef(_)) ++ targs, isStat,
usePreTransform)(cont)
}
case _ =>
cont(PreTransTree(
JSFunctionApply(finishTransformExpr(tfun), args.map(transformExpr))))
}
}
}
}
private val ClassNamesThatShouldBeInlined = Set(
"s_Predef$$less$colon$less",
"s_Predef$$eq$colon$eq",
"s_reflect_ManifestFactory$ByteManifest$",
"s_reflect_ManifestFactory$ShortManifest$",
"s_reflect_ManifestFactory$CharManifest$",
"s_reflect_ManifestFactory$IntManifest$",
"s_reflect_ManifestFactory$LongManifest$",
"s_reflect_ManifestFactory$FloatManifest$",
"s_reflect_ManifestFactory$DoubleManifest$",
"s_reflect_ManifestFactory$BooleanManifest$",
"s_reflect_ManifestFactory$UnitManifest$",
"s_reflect_ManifestFactory$AnyManifest$",
"s_reflect_ManifestFactory$ObjectManifest$",
"s_reflect_ManifestFactory$AnyValManifest$",
"s_reflect_ManifestFactory$NullManifest$",
"s_reflect_ManifestFactory$NothingManifest$"
)
private def shouldInlineBecauseOfArgs(target: MethodID,
receiverAndArgs: List[PreTransform]): Boolean = {
def isTypeLikelyOptimizable(tpe: RefinedType): Boolean = tpe.base match {
case ClassType(className) =>
ClassNamesThatShouldBeInlined.contains(className)
case _ =>
false
}
def isLikelyOptimizable(arg: PreTransform): Boolean = arg match {
case PreTransBlock(_, result) =>
isLikelyOptimizable(result)
case PreTransLocalDef(localDef) =>
(localDef.replacement match {
case TentativeClosureReplacement(_, _, _, _, _, _) => true
case ReplaceWithRecordVarRef(_, _, _, _, _) => true
case InlineClassBeingConstructedReplacement(_, _) => true
case InlineClassInstanceReplacement(_, _, _) => true
case _ =>
isTypeLikelyOptimizable(localDef.tpe)
}) && {
/* java.lang.Character is @inline so that *local* box/unbox pairs
* can be eliminated. But we don't want that to force inlining of
* a method only because we pass it a boxed Char.
*/
localDef.tpe.base match {
case ClassType(Definitions.BoxedCharacterClass) => false
case _ => true
}
}
case PreTransRecordTree(_, _, _) =>
true
case _ =>
isTypeLikelyOptimizable(arg.tpe)
}
receiverAndArgs.exists(isLikelyOptimizable) || {
target.toString == "s_reflect_ClassTag$.apply__jl_Class__s_reflect_ClassTag" &&
(receiverAndArgs.tail.head match {
case PreTransTree(ClassOf(_), _) => true
case _ => false
})
}
}
private def inline(allocationSites: List[Option[AllocationSite]],
optReceiver: Option[PreTransform],
args: List[PreTransform], target: MethodID, isStat: Boolean,
usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
require(target.inlineable)
attemptedInlining += target
val MethodDef(static, _, formals, resultType, body) = getMethodBody(target)
assert(static == optReceiver.isEmpty,
"There must be receiver if and only if the method is not static")
body match {
case Skip() =>
assert(isStat, "Found Skip() in expression position")
cont(PreTransTree(
Block((optReceiver ++: args).map(finishTransformStat)),
RefinedType.NoRefinedType))
case _: Literal =>
cont(PreTransTree(
Block((optReceiver ++: args).map(finishTransformStat) :+ body),
RefinedType(body.tpe)))
case This() if args.isEmpty =>
assert(optReceiver.isDefined,
"There was a This(), there should be a receiver")
cont(optReceiver.get)
case Select(This(), field) if formals.isEmpty =>
assert(optReceiver.isDefined,
"There was a This(), there should be a receiver")
pretransformSelectCommon(body.tpe, optReceiver.get, field,
isLhsOfAssign = false)(cont)
case Assign(lhs @ Select(This(), field), VarRef(Ident(rhsName, _)))
if formals.size == 1 && formals.head.name.name == rhsName =>
assert(isStat, "Found Assign in expression position")
assert(optReceiver.isDefined,
"There was a This(), there should be a receiver")
pretransformSelectCommon(lhs.tpe, optReceiver.get, field,
isLhsOfAssign = true) { preTransLhs =>
// TODO Support assignment of record
cont(PreTransTree(
Assign(finishTransformExpr(preTransLhs),
finishTransformExpr(args.head)),
RefinedType.NoRefinedType))
}
case _ =>
val targetID = (allocationSites, target)
inlineBody(optReceiver, formals, resultType, body, args, isStat,
usePreTransform)(cont)(scope.inlining(targetID), pos)
}
}
private def inlineBody(optReceiver: Option[PreTransform],
formals: List[ParamDef], resultType: Type, body: Tree,
args: List[PreTransform], isStat: Boolean,
usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = tailcall {
val optReceiverBinding = optReceiver map { receiver =>
Binding("this", None, receiver.tpe.base, false, receiver)
}
val argsBindings = for {
(ParamDef(Ident(name, originalName), tpe, mutable, rest), arg) <- formals zip args
} yield {
assert(!rest, s"Trying to inline a body with a rest parameter at $pos")
Binding(name, originalName, tpe, mutable, arg)
}
withBindings(optReceiverBinding ++: argsBindings) { (bodyScope, cont1) =>
returnable("", resultType, body, isStat, usePreTransform)(
cont1)(bodyScope, pos)
} (cont) (scope.withEnv(OptEnv.Empty))
}
private def callIntrinsic(code: Int, optTReceiver: Option[PreTransform],
targs: List[PreTransform], isStat: Boolean, usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
import Intrinsics._
implicit def string2ident(s: String): Ident = Ident(s, None)
lazy val newReceiver = finishTransformExpr(optTReceiver.get)
lazy val newArgs = targs.map(finishTransformExpr)
@inline def contTree(result: Tree) = cont(PreTransTree(result))
@inline def StringClassType = ClassType(Definitions.StringClass)
def defaultApply(methodName: String, resultType: Type): TailRec[Tree] =
contTree(Apply(newReceiver, Ident(methodName), newArgs)(resultType))
def cursoryArrayElemType(tpe: ArrayType): Type = {
if (tpe.dimensions != 1) AnyType
else (tpe.baseClassName match {
case "Z" => BooleanType
case "B" | "C" | "S" | "I" => IntType
case "F" => FloatType
case "D" => DoubleType
case _ => AnyType
})
}
def asRTLong(arg: Tree): Tree =
AsInstanceOf(arg, ClassType(LongImpl.RuntimeLongClass))
def firstArgAsRTLong: Tree =
asRTLong(newArgs.head)
(code: @switch) match {
// java.lang.System
case ArrayCopy =>
assert(isStat, "System.arraycopy must be used in statement position")
contTree(CallHelper("systemArraycopy", newArgs)(NoType))
case IdentityHashCode =>
contTree(CallHelper("systemIdentityHashCode", newArgs)(IntType))
// scala.runtime.ScalaRunTime object
case ArrayApply =>
val List(array, index) = newArgs
array.tpe match {
case arrayTpe @ ArrayType(base, depth) =>
val elemType = cursoryArrayElemType(arrayTpe)
val select = ArraySelect(array, index)(elemType)
if (base == "C")
boxChar(select)(cont)
else
contTree(select)
case _ =>
defaultApply("array$undapply__O__I__O", AnyType)
}
case ArrayUpdate =>
val List(tarray, tindex, tvalue) = targs
tarray.tpe.base match {
case arrayTpe @ ArrayType(base, depth) =>
val array = finishTransformExpr(tarray)
val index = finishTransformExpr(tindex)
val elemType = cursoryArrayElemType(arrayTpe)
val select = ArraySelect(array, index)(elemType)
val cont1: PreTransCont = { tunboxedValue =>
contTree(Assign(select, finishTransformExpr(tunboxedValue)))
}
base match {
case "Z" | "B" | "S" | "I" | "L" | "F" | "D" if depth == 1 =>
foldUnbox(tvalue, base.charAt(0))(cont1)
case "C" if depth == 1 =>
unboxChar(tvalue)(cont1)
case _ =>
cont1(tvalue)
}
case _ =>
defaultApply("array$undupdate__O__I__O__V", AnyType)
}
case ArrayLength =>
targs.head.tpe.base match {
case _: ArrayType =>
contTree(Trees.ArrayLength(newArgs.head))
case _ =>
defaultApply("array$undlength__O__I", IntType)
}
// scala.scalajs.runtime package object
case PropertiesOf =>
contTree(CallHelper("propertiesOf", newArgs)(AnyType))
// java.lang.Integer
case IntegerNLZ =>
contTree(newArgs.head match {
case IntLiteral(value) => IntLiteral(Integer.numberOfLeadingZeros(value))
case newArg => CallHelper("clz32", newArg)(IntType)
})
// java.lang.Long
case LongToString =>
contTree(Apply(firstArgAsRTLong, "toString__T", Nil)(StringClassType))
case LongCompare =>
contTree(Apply(firstArgAsRTLong, "compareTo__sjsr_RuntimeLong__I",
List(asRTLong(newArgs(1))))(IntType))
case LongDivideUnsigned =>
contTree(Apply(firstArgAsRTLong, LongImpl.divideUnsigned,
List(asRTLong(newArgs(1))))(LongType))
case LongRemainderUnsigned =>
contTree(Apply(firstArgAsRTLong, LongImpl.remainderUnsigned,
List(asRTLong(newArgs(1))))(LongType))
case LongBitCount =>
contTree(Apply(firstArgAsRTLong, LongImpl.bitCount, Nil)(IntType))
case LongSignum =>
contTree(Apply(firstArgAsRTLong, LongImpl.signum, Nil)(LongType))
case LongLeading0s =>
contTree(Apply(firstArgAsRTLong, LongImpl.numberOfLeadingZeros, Nil)(IntType))
case LongTrailing0s =>
contTree(Apply(firstArgAsRTLong, LongImpl.numberOfTrailingZeros, Nil)(IntType))
case LongToBinStr =>
contTree(Apply(firstArgAsRTLong, LongImpl.toBinaryString, Nil)(StringClassType))
case LongToHexStr =>
contTree(Apply(firstArgAsRTLong, LongImpl.toHexString, Nil)(StringClassType))
case LongToOctalStr =>
contTree(Apply(firstArgAsRTLong, LongImpl.toOctalString, Nil)(StringClassType))
// scala.collection.mutable.ArrayBuilder
case GenericArrayBuilderResult =>
val List(runtimeClass, array) = newArgs
val (resultType, isExact) = runtimeClass match {
case ClassOf(elemType) => (ArrayType(elemType), true)
case _ => (AnyType, false)
}
cont(PreTransTree(CallHelper("makeNativeArrayWrapper",
CallHelper("arrayDataOf",
CallHelper("classDataOf", runtimeClass)(AnyType))(AnyType),
array)(resultType),
RefinedType(resultType, isExact = isExact, isNullable = false)))
case ArrayBuilderZeroOf =>
contTree(finishTransformExpr(targs.head) match {
case ClassOf(ClassType(cls)) =>
cls match {
case "B" | "S" | "C" | "I" | "D" => IntLiteral(0)
case "L" => LongLiteral(0L)
case "F" => FloatLiteral(0.0f)
case "Z" => BooleanLiteral(false)
case "V" => Undefined()
case _ => Null()
}
case ClassOf(_) =>
Null()
case runtimeClass =>
CallHelper("zeroOf", runtimeClass)(AnyType)
})
// java.lang.Class
case ClassGetComponentType =>
newReceiver match {
case ClassOf(ArrayType(base, depth)) =>
contTree(ClassOf(
if (depth == 1) ClassType(base)
else ArrayType(base, depth - 1)))
case ClassOf(ClassType(_)) =>
contTree(Null())
case receiver =>
defaultApply("getComponentType__jl_Class",
ClassType(Definitions.ClassClass))
}
// java.lang.reflect.Array
case ArrayNewInstance =>
newArgs.head match {
case ClassOf(elementTpe) =>
contTree(NewArray(ArrayType(elementTpe), List(newArgs.tail.head)))
case _ =>
defaultApply("newInstance__jl_Class__I__O", AnyType)
}
// TypedArray conversions
case ByteArrayToInt8Array =>
contTree(CallHelper("byteArray2TypedArray", newArgs)(AnyType))
case ShortArrayToInt16Array =>
contTree(CallHelper("shortArray2TypedArray", newArgs)(AnyType))
case CharArrayToUint16Array =>
contTree(CallHelper("charArray2TypedArray", newArgs)(AnyType))
case IntArrayToInt32Array =>
contTree(CallHelper("intArray2TypedArray", newArgs)(AnyType))
case FloatArrayToFloat32Array =>
contTree(CallHelper("floatArray2TypedArray", newArgs)(AnyType))
case DoubleArrayToFloat64Array =>
contTree(CallHelper("doubleArray2TypedArray", newArgs)(AnyType))
case Int8ArrayToByteArray =>
contTree(CallHelper("typedArray2ByteArray", newArgs)(AnyType))
case Int16ArrayToShortArray =>
contTree(CallHelper("typedArray2ShortArray", newArgs)(AnyType))
case Uint16ArrayToCharArray =>
contTree(CallHelper("typedArray2CharArray", newArgs)(AnyType))
case Int32ArrayToIntArray =>
contTree(CallHelper("typedArray2IntArray", newArgs)(AnyType))
case Float32ArrayToFloatArray =>
contTree(CallHelper("typedArray2FloatArray", newArgs)(AnyType))
case Float64ArrayToDoubleArray =>
contTree(CallHelper("typedArray2DoubleArray", newArgs)(AnyType))
}
}
private def boxChar(value: Tree)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
pretransformNew(value, ClassType(Definitions.BoxedCharacterClass),
Ident("init___C"), List(PreTransTree(value)))(cont)
}
private def unboxChar(tvalue: PreTransform)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
val BoxesRunTimeModuleClassName = "sr_BoxesRunTime$"
val treceiver = PreTransTree(LoadModule(
ClassType(BoxesRunTimeModuleClassName)))
val target = staticCall(BoxesRunTimeModuleClassName, "unboxToChar__O__C").getOrElse {
throw new AssertionError("Cannot find method sr_BoxesRunTime$.unboxToChar__O__C")
}
val allocationSites = List(treceiver, tvalue).map(_.tpe.allocationSite)
inline(allocationSites, Some(treceiver), List(tvalue), target,
isStat = false, usePreTransform = true)(cont)
}
private def inlineClassConstructor(allocationSite: AllocationSite,
cls: ClassType, initialValue: RecordValue,
ctor: Ident, args: List[PreTransform], cancelFun: CancelFun)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = {
val RecordValue(recordType, initialFieldValues) = initialValue
pretransformExprs(initialFieldValues) { tinitialFieldValues =>
val initialFieldBindings = for {
(RecordType.Field(name, originalName, tpe, mutable), value) <-
recordType.fields zip tinitialFieldValues
} yield {
Binding(name, originalName, tpe, mutable, value)
}
withNewLocalDefs(initialFieldBindings) { (initialFieldLocalDefList, cont1) =>
val fieldNames = initialValue.tpe.fields.map(_.name)
val initialFieldLocalDefs =
Map(fieldNames zip initialFieldLocalDefList: _*)
inlineClassConstructorBody(allocationSite, initialFieldLocalDefs,
cls, cls, ctor, args, cancelFun) { (finalFieldLocalDefs, cont2) =>
cont2(PreTransLocalDef(LocalDef(
RefinedType(cls, isExact = true, isNullable = false,
allocationSite = Some(allocationSite)),
mutable = false,
InlineClassInstanceReplacement(recordType, finalFieldLocalDefs, cancelFun))))
} (cont1)
} (cont)
}
}
private def inlineClassConstructorBody(
allocationSite: AllocationSite,
inputFieldsLocalDefs: Map[String, LocalDef], cls: ClassType,
ctorClass: ClassType, ctor: Ident, args: List[PreTransform],
cancelFun: CancelFun)(
buildInner: (Map[String, LocalDef], PreTransCont) => TailRec[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = tailcall {
val target = staticCall(ctorClass.className, ctor.name).getOrElse(cancelFun())
val targetID = (Some(allocationSite) :: args.map(_.tpe.allocationSite), target)
if (scope.implsBeingInlined.contains(targetID))
cancelFun()
val MethodDef(_, _, formals, _, BlockOrAlone(stats, This())) =
getMethodBody(target)
val argsBindings = for {
(ParamDef(Ident(name, originalName), tpe, mutable, _), arg) <- formals zip args
} yield {
Binding(name, originalName, tpe, mutable, arg)
}
withBindings(argsBindings) { (bodyScope, cont1) =>
val thisLocalDef = LocalDef(
RefinedType(cls, isExact = true, isNullable = false), false,
InlineClassBeingConstructedReplacement(inputFieldsLocalDefs, cancelFun))
val statsScope = bodyScope.inlining(targetID).withEnv(
bodyScope.env.withLocalDef("this", thisLocalDef))
inlineClassConstructorBodyList(allocationSite, thisLocalDef,
inputFieldsLocalDefs, cls, stats, cancelFun)(
buildInner)(cont1)(statsScope)
} (cont) (scope.withEnv(OptEnv.Empty))
}
private def inlineClassConstructorBodyList(
allocationSite: AllocationSite,
thisLocalDef: LocalDef, inputFieldsLocalDefs: Map[String, LocalDef],
cls: ClassType, stats: List[Tree], cancelFun: CancelFun)(
buildInner: (Map[String, LocalDef], PreTransCont) => TailRec[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
stats match {
case This() :: rest =>
inlineClassConstructorBodyList(allocationSite, thisLocalDef,
inputFieldsLocalDefs, cls, rest, cancelFun)(buildInner)(cont)
case Assign(s @ Select(ths: This,
Ident(fieldName, fieldOrigName)), value) :: rest
if !inputFieldsLocalDefs(fieldName).mutable =>
pretransformExpr(value) { tvalue =>
withNewLocalDef(Binding(fieldName, fieldOrigName, s.tpe, false,
tvalue)) { (localDef, cont1) =>
if (localDef.contains(thisLocalDef)) {
/* Uh oh, there is a `val x = ...this...`. We can't keep it,
* because this field will not be updated with `newThisLocalDef`.
*/
cancelFun()
}
val newFieldsLocalDefs =
inputFieldsLocalDefs.updated(fieldName, localDef)
val newThisLocalDef = LocalDef(
RefinedType(cls, isExact = true, isNullable = false), false,
InlineClassBeingConstructedReplacement(newFieldsLocalDefs, cancelFun))
val restScope = scope.withEnv(scope.env.withLocalDef(
"this", newThisLocalDef))
inlineClassConstructorBodyList(allocationSite,
newThisLocalDef, newFieldsLocalDefs, cls, rest, cancelFun)(
buildInner)(cont1)(restScope)
} (cont)
}
/* if (cond)
* throw e
* else
* this.outer = value
*
* becomes
*
* this.outer =
* if (cond) throw e
* else value
*
* Typical shape of initialization of outer pointer of inner classes.
*/
case If(cond, th: Throw, Assign(Select(This(), _), value)) :: rest =>
// work around a bug of the compiler (these should be @-bindings)
val stat = stats.head.asInstanceOf[If]
val ass = stat.elsep.asInstanceOf[Assign]
val lhs = ass.lhs
inlineClassConstructorBodyList(allocationSite, thisLocalDef,
inputFieldsLocalDefs, cls,
Assign(lhs, If(cond, th, value)(lhs.tpe)(stat.pos))(ass.pos) :: rest,
cancelFun)(buildInner)(cont)
case ApplyStatically(ths: This, superClass, superCtor, args) :: rest
if isConstructorName(superCtor.name) =>
pretransformExprs(args) { targs =>
inlineClassConstructorBody(allocationSite, inputFieldsLocalDefs,
cls, superClass, superCtor, targs,
cancelFun) { (outputFieldsLocalDefs, cont1) =>
val newThisLocalDef = LocalDef(
RefinedType(cls, isExact = true, isNullable = false), false,
InlineClassBeingConstructedReplacement(outputFieldsLocalDefs, cancelFun))
val restScope = scope.withEnv(scope.env.withLocalDef(
"this", newThisLocalDef))
inlineClassConstructorBodyList(allocationSite,
newThisLocalDef, outputFieldsLocalDefs,
cls, rest, cancelFun)(buildInner)(cont1)(restScope)
} (cont)
}
case VarDef(Ident(name, originalName), tpe, mutable, rhs) :: rest =>
pretransformExpr(rhs) { trhs =>
withBinding(Binding(name, originalName, tpe, mutable, trhs)) { (restScope, cont1) =>
inlineClassConstructorBodyList(allocationSite,
thisLocalDef, inputFieldsLocalDefs,
cls, rest, cancelFun)(buildInner)(cont1)(restScope)
} (cont)
}
case stat :: rest =>
val transformedStat = transformStat(stat)
transformedStat match {
case Skip() =>
inlineClassConstructorBodyList(allocationSite,
thisLocalDef, inputFieldsLocalDefs,
cls, rest, cancelFun)(buildInner)(cont)
case _ =>
if (transformedStat.tpe == NothingType)
cont(PreTransTree(transformedStat, RefinedType.Nothing))
else {
inlineClassConstructorBodyList(allocationSite,
thisLocalDef, inputFieldsLocalDefs,
cls, rest, cancelFun)(buildInner) { tinner =>
cont(PreTransBlock(transformedStat :: Nil, tinner))
}
}
}
case Nil =>
buildInner(inputFieldsLocalDefs, cont)
}
}
private def foldIf(cond: Tree, thenp: Tree, elsep: Tree)(tpe: Type)(
implicit pos: Position): Tree = {
import BinaryOp._
@inline def default = If(cond, thenp, elsep)(tpe)
cond match {
case BooleanLiteral(v) =>
if (v) thenp
else elsep
case _ =>
@inline def negCond = foldUnaryOp(UnaryOp.Boolean_!, cond)
if (thenp.tpe == BooleanType && elsep.tpe == BooleanType) {
(cond, thenp, elsep) match {
case (_, BooleanLiteral(t), BooleanLiteral(e)) =>
if (t == e) Block(keepOnlySideEffects(cond), thenp)
else if (t) cond
else negCond
case (_, BooleanLiteral(false), _) =>
foldIf(negCond, elsep, BooleanLiteral(false))(tpe) // canonical && form
case (_, _, BooleanLiteral(true)) =>
foldIf(negCond, BooleanLiteral(true), thenp)(tpe) // canonical || form
/* if (lhs === null) rhs === null else lhs === rhs
* -> lhs === rhs
* This is the typical shape of a lhs == rhs test where
* the equals() method has been inlined as a reference
* equality test.
*/
case (BinaryOp(BinaryOp.===, VarRef(lhsIdent), Null()),
BinaryOp(BinaryOp.===, VarRef(rhsIdent), Null()),
BinaryOp(BinaryOp.===, VarRef(lhsIdent2), VarRef(rhsIdent2)))
if lhsIdent2 == lhsIdent && rhsIdent2 == rhsIdent =>
elsep
// Example: (x > y) || (x == y) -> (x >= y)
case (BinaryOp(op1 @ (Num_== | Num_!= | Num_< | Num_<= | Num_> | Num_>=), l1, r1),
BooleanLiteral(true),
BinaryOp(op2 @ (Num_== | Num_!= | Num_< | Num_<= | Num_> | Num_>=), l2, r2))
if ((l1.isInstanceOf[Literal] || l1.isInstanceOf[VarRef]) &&
(r1.isInstanceOf[Literal] || r1.isInstanceOf[VarRef]) &&
(l1 == l2 && r1 == r2)) =>
val canBeEqual =
((op1 == Num_==) || (op1 == Num_<=) || (op1 == Num_>=)) ||
((op2 == Num_==) || (op2 == Num_<=) || (op2 == Num_>=))
val canBeLessThan =
((op1 == Num_!=) || (op1 == Num_<) || (op1 == Num_<=)) ||
((op2 == Num_!=) || (op2 == Num_<) || (op2 == Num_<=))
val canBeGreaterThan =
((op1 == Num_!=) || (op1 == Num_>) || (op1 == Num_>=)) ||
((op2 == Num_!=) || (op2 == Num_>) || (op2 == Num_>=))
fold3WayComparison(canBeEqual, canBeLessThan, canBeGreaterThan, l1, r1)
// Example: (x >= y) && (x <= y) -> (x == y)
case (BinaryOp(op1 @ (Num_== | Num_!= | Num_< | Num_<= | Num_> | Num_>=), l1, r1),
BinaryOp(op2 @ (Num_== | Num_!= | Num_< | Num_<= | Num_> | Num_>=), l2, r2),
BooleanLiteral(false))
if ((l1.isInstanceOf[Literal] || l1.isInstanceOf[VarRef]) &&
(r1.isInstanceOf[Literal] || r1.isInstanceOf[VarRef]) &&
(l1 == l2 && r1 == r2)) =>
val canBeEqual =
((op1 == Num_==) || (op1 == Num_<=) || (op1 == Num_>=)) &&
((op2 == Num_==) || (op2 == Num_<=) || (op2 == Num_>=))
val canBeLessThan =
((op1 == Num_!=) || (op1 == Num_<) || (op1 == Num_<=)) &&
((op2 == Num_!=) || (op2 == Num_<) || (op2 == Num_<=))
val canBeGreaterThan =
((op1 == Num_!=) || (op1 == Num_>) || (op1 == Num_>=)) &&
((op2 == Num_!=) || (op2 == Num_>) || (op2 == Num_>=))
fold3WayComparison(canBeEqual, canBeLessThan, canBeGreaterThan, l1, r1)
case _ => default
}
} else {
(thenp, elsep) match {
case (Skip(), Skip()) => keepOnlySideEffects(cond)
case (Skip(), _) => foldIf(negCond, elsep, thenp)(tpe)
case _ => default
}
}
}
}
private def transformUnaryOp(tree: UnaryOp)(implicit scope: Scope): Tree = {
import UnaryOp._
implicit val pos = tree.pos
val UnaryOp(op, arg) = tree
op match {
case LongToInt =>
trampoline {
pretransformExpr(arg) { (targ) =>
TailCalls.done {
foldUnaryOp(op, finishTransformOptLongExpr(targ))
}
}
}
case _ =>
foldUnaryOp(op, transformExpr(arg))
}
}
private def transformBinaryOp(tree: BinaryOp)(implicit scope: Scope): Tree = {
import BinaryOp._
implicit val pos = tree.pos
val BinaryOp(op, lhs, rhs) = tree
(op: @switch) match {
case === | !== =>
trampoline {
pretransformExprs(lhs, rhs) { (tlhs, trhs) =>
TailCalls.done(foldReferenceEquality(tlhs, trhs, op == ===))
}
}
case Long_== | Long_!= | Long_< | Long_<= | Long_> | Long_>= =>
trampoline {
pretransformExprs(lhs, rhs) { (tlhs, trhs) =>
TailCalls.done {
if (isLiteralOrOptimizableLong(tlhs) &&
isLiteralOrOptimizableLong(trhs)) {
foldBinaryOp(op, finishTransformOptLongExpr(tlhs),
finishTransformOptLongExpr(trhs))
} else {
foldBinaryOp(op, finishTransformExpr(tlhs),
finishTransformExpr(trhs))
}
}
}
}
case _ =>
foldBinaryOp(op, transformExpr(lhs), transformExpr(rhs))
}
}
private def isLiteralOrOptimizableLong(texpr: PreTransform): Boolean = {
texpr match {
case PreTransTree(LongLiteral(_), _) =>
true
case PreTransLocalDef(LocalDef(_, _, replacement)) =>
replacement match {
case ReplaceWithVarRef(_, _, _, Some(_)) => true
case ReplaceWithConstant(LongLiteral(_)) => true
case _ => false
}
case _ =>
false
}
}
private def finishTransformOptLongExpr(targ: PreTransform): Tree = targ match {
case PreTransLocalDef(LocalDef(tpe, false,
ReplaceWithVarRef(_, _, _, Some(argValue)))) =>
argValue()
case _ =>
finishTransformExpr(targ)
}
private def foldUnaryOp(op: UnaryOp.Code, arg: Tree)(
implicit pos: Position): Tree = {
import UnaryOp._
@inline def default = UnaryOp(op, arg)
(op: @switch) match {
case Boolean_! =>
arg match {
case BooleanLiteral(v) => BooleanLiteral(!v)
case UnaryOp(Boolean_!, x) => x
case BinaryOp(innerOp, l, r) =>
val newOp = (innerOp: @switch) match {
case BinaryOp.=== => BinaryOp.!==
case BinaryOp.!== => BinaryOp.===
case BinaryOp.Num_== => BinaryOp.Num_!=
case BinaryOp.Num_!= => BinaryOp.Num_==
case BinaryOp.Num_< => BinaryOp.Num_>=
case BinaryOp.Num_<= => BinaryOp.Num_>
case BinaryOp.Num_> => BinaryOp.Num_<=
case BinaryOp.Num_>= => BinaryOp.Num_<
case BinaryOp.Long_== => BinaryOp.Long_!=
case BinaryOp.Long_!= => BinaryOp.Long_==
case BinaryOp.Long_< => BinaryOp.Long_>=
case BinaryOp.Long_<= => BinaryOp.Long_>
case BinaryOp.Long_> => BinaryOp.Long_<=
case BinaryOp.Long_>= => BinaryOp.Long_<
case BinaryOp.Boolean_== => BinaryOp.Boolean_!=
case BinaryOp.Boolean_!= => BinaryOp.Boolean_==
case _ => -1
}
if (newOp == -1) default
else BinaryOp(newOp, l, r)
case _ => default
}
case IntToLong =>
arg match {
case IntLiteral(v) => LongLiteral(v.toLong)
case _ => default
}
case LongToInt =>
arg match {
case LongLiteral(v) => IntLiteral(v.toInt)
case UnaryOp(IntToLong, x) => x
case BinaryOp(BinaryOp.Long_+, x, y) =>
foldBinaryOp(BinaryOp.Int_+,
foldUnaryOp(LongToInt, x),
foldUnaryOp(LongToInt, y))
case BinaryOp(BinaryOp.Long_-, x, y) =>
foldBinaryOp(BinaryOp.Int_-,
foldUnaryOp(LongToInt, x),
foldUnaryOp(LongToInt, y))
case _ => default
}
case LongToDouble =>
arg match {
case LongLiteral(v) => DoubleLiteral(v.toDouble)
case _ => default
}
case DoubleToInt =>
arg match {
case _ if arg.tpe == IntType => arg
case NumberLiteral(v) => IntLiteral(v.toInt)
case _ => default
}
case DoubleToFloat =>
arg match {
case _ if arg.tpe == FloatType => arg
case NumberLiteral(v) => FloatLiteral(v.toFloat)
case _ => default
}
case DoubleToLong =>
arg match {
case _ if arg.tpe == IntType => foldUnaryOp(IntToLong, arg)
case NumberLiteral(v) => LongLiteral(v.toLong)
case _ => default
}
case _ =>
default
}
}
/** Performs === for two literals.
* The result is always known statically.
*/
private def literal_===(lhs: Literal, rhs: Literal): Boolean = {
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) => l == r
case (FloatLiteral(l), FloatLiteral(r)) => l == r
case (NumberLiteral(l), NumberLiteral(r)) => l == r
case (LongLiteral(l), LongLiteral(r)) => l == r
case (BooleanLiteral(l), BooleanLiteral(r)) => l == r
case (StringLiteral(l), StringLiteral(r)) => l == r
case (ClassOf(l), ClassOf(r)) => l == r
case (Undefined(), Undefined()) => true
case (Null(), Null()) => true
case _ => false
}
}
/** Translate literals to their Scala.js String representation. */
private def foldToStringForString_+(tree: Tree)(implicit pos : Position): Tree = tree match {
case FloatLiteral(value) =>
foldToStringForString_+(DoubleLiteral(value.toDouble))
case DoubleLiteral(value) =>
jsNumberToString(value).fold(tree)(StringLiteral(_))
case LongLiteral(value) => StringLiteral(value.toString)
case IntLiteral(value) => StringLiteral(value.toString)
case BooleanLiteral(value) => StringLiteral(value.toString)
case Null() => StringLiteral("null")
case Undefined() => StringLiteral("undefined")
case _ => tree
}
/* Following the ECMAScript 6 specification */
private def jsNumberToString(value: Double): Option[String] = {
if (1.0.toString == "1") {
// We are in a JS environment, so the host .toString() is the correct one.
Some(value.toString)
} else {
value match {
case _ if value.isNaN => Some("NaN")
case 0 => Some("0")
case _ if value < 0 => jsNumberToString(-value).map("-" + _)
case _ if value.isInfinity => Some("Infinity")
case _ if value.isValidInt => Some(value.toInt.toString)
case _ => None
}
}
}
private def foldBinaryOp(op: BinaryOp.Code, lhs: Tree, rhs: Tree)(
implicit pos: Position): Tree = {
import BinaryOp._
@inline def default = BinaryOp(op, lhs, rhs)
(op: @switch) match {
case === | !== =>
val positive = (op == ===)
(lhs, rhs) match {
case (lhs: Literal, rhs: Literal) =>
BooleanLiteral(literal_===(lhs, rhs) == positive)
case (_: Literal, _) => foldBinaryOp(op, rhs, lhs)
case _ => default
}
case String_+ =>
val lhs1 = foldToStringForString_+(lhs)
val rhs1 = foldToStringForString_+(rhs)
@inline def stringDefault = BinaryOp(String_+, lhs1, rhs1)
(lhs1, rhs1) match {
case (StringLiteral(s1), StringLiteral(s2)) =>
StringLiteral(s1 + s2)
case (_, StringLiteral("")) =>
foldBinaryOp(op, rhs1, lhs1)
case (StringLiteral(""), _) if rhs1.tpe == StringType =>
rhs1
case (_, BinaryOp(String_+, rl, rr)) =>
foldBinaryOp(String_+, BinaryOp(String_+, lhs1, rl), rr)
case (BinaryOp(String_+, ll, StringLiteral(lr)), StringLiteral(r)) =>
BinaryOp(String_+, ll, StringLiteral(lr + r))
case (BinaryOp(String_+, StringLiteral(""), lr), _) =>
BinaryOp(String_+, lr, rhs1)
case _ =>
stringDefault
}
case Int_+ =>
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) => IntLiteral(l + r)
case (_, IntLiteral(_)) => foldBinaryOp(Int_+, rhs, lhs)
case (IntLiteral(0), _) => rhs
case (IntLiteral(x),
BinaryOp(innerOp @ (Int_+ | Int_-), IntLiteral(y), z)) =>
foldBinaryOp(innerOp, IntLiteral(x+y), z)
case _ => default
}
case Int_- =>
(lhs, rhs) match {
case (_, IntLiteral(r)) => foldBinaryOp(Int_+, lhs, IntLiteral(-r))
case (IntLiteral(x), BinaryOp(Int_+, IntLiteral(y), z)) =>
foldBinaryOp(Int_-, IntLiteral(x-y), z)
case (IntLiteral(x), BinaryOp(Int_-, IntLiteral(y), z)) =>
foldBinaryOp(Int_+, IntLiteral(x-y), z)
case (_, BinaryOp(Int_-, IntLiteral(0), x)) =>
foldBinaryOp(Int_+, lhs, x)
case _ => default
}
case Int_* =>
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) => IntLiteral(l * r)
case (_, IntLiteral(_)) => foldBinaryOp(Int_*, rhs, lhs)
case (IntLiteral(1), _) => rhs
case (IntLiteral(-1), _) => foldBinaryOp(Int_-, IntLiteral(0), rhs)
case _ => default
}
case Int_/ =>
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) if r != 0 => IntLiteral(l / r)
case (_, IntLiteral(1)) => lhs
case (_, IntLiteral(-1)) => foldBinaryOp(Int_-, IntLiteral(0), lhs)
case _ => default
}
case Int_% =>
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) if r != 0 => IntLiteral(l % r)
case (_, IntLiteral(1 | -1)) =>
Block(keepOnlySideEffects(lhs), IntLiteral(0))
case _ => default
}
case Int_| =>
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) => IntLiteral(l | r)
case (_, IntLiteral(_)) => foldBinaryOp(Int_|, rhs, lhs)
case (IntLiteral(0), _) => rhs
case (IntLiteral(x), BinaryOp(Int_|, IntLiteral(y), z)) =>
foldBinaryOp(Int_|, IntLiteral(x | y), z)
case _ => default
}
case Int_& =>
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) => IntLiteral(l & r)
case (_, IntLiteral(_)) => foldBinaryOp(Int_&, rhs, lhs)
case (IntLiteral(-1), _) => rhs
case (IntLiteral(x), BinaryOp(Int_&, IntLiteral(y), z)) =>
foldBinaryOp(Int_&, IntLiteral(x & y), z)
case _ => default
}
case Int_^ =>
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) => IntLiteral(l ^ r)
case (_, IntLiteral(_)) => foldBinaryOp(Int_^, rhs, lhs)
case (IntLiteral(0), _) => rhs
case (IntLiteral(x), BinaryOp(Int_^, IntLiteral(y), z)) =>
foldBinaryOp(Int_^, IntLiteral(x ^ y), z)
case _ => default
}
case Int_<< =>
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) => IntLiteral(l << r)
case (_, IntLiteral(x)) if x % 32 == 0 => lhs
case _ => default
}
case Int_>>> =>
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) => IntLiteral(l >>> r)
case (_, IntLiteral(x)) if x % 32 == 0 => lhs
case _ => default
}
case Int_>> =>
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) => IntLiteral(l >> r)
case (_, IntLiteral(x)) if x % 32 == 0 => lhs
case _ => default
}
case Long_+ =>
(lhs, rhs) match {
case (LongLiteral(l), LongLiteral(r)) => LongLiteral(l + r)
case (_, LongLiteral(_)) => foldBinaryOp(Long_+, rhs, lhs)
case (LongLiteral(0), _) => rhs
case (LongLiteral(x),
BinaryOp(innerOp @ (Long_+ | Long_-), LongLiteral(y), z)) =>
foldBinaryOp(innerOp, LongLiteral(x+y), z)
case _ => default
}
case Long_- =>
(lhs, rhs) match {
case (_, LongLiteral(r)) => foldBinaryOp(Long_+, LongLiteral(-r), lhs)
case (LongLiteral(x), BinaryOp(Long_+, LongLiteral(y), z)) =>
foldBinaryOp(Long_-, LongLiteral(x-y), z)
case (LongLiteral(x), BinaryOp(Long_-, LongLiteral(y), z)) =>
foldBinaryOp(Long_+, LongLiteral(x-y), z)
case (_, BinaryOp(BinaryOp.Long_-, LongLiteral(0L), x)) =>
foldBinaryOp(Long_+, lhs, x)
case _ => default
}
case Long_* =>
(lhs, rhs) match {
case (LongLiteral(l), LongLiteral(r)) => LongLiteral(l * r)
case (_, LongLiteral(_)) => foldBinaryOp(Long_*, rhs, lhs)
case (LongLiteral(1), _) => rhs
case (LongLiteral(-1), _) => foldBinaryOp(Long_-, LongLiteral(0), lhs)
case _ => default
}
case Long_/ =>
(lhs, rhs) match {
case (_, LongLiteral(0)) => default
case (LongLiteral(l), LongLiteral(r)) => LongLiteral(l / r)
case (_, LongLiteral(1)) => lhs
case (_, LongLiteral(-1)) => foldBinaryOp(Long_-, LongLiteral(0), lhs)
case (LongFromInt(x), LongFromInt(y: IntLiteral)) if y.value != -1 =>
LongFromInt(foldBinaryOp(Int_/, x, y))
case _ => default
}
case Long_% =>
(lhs, rhs) match {
case (_, LongLiteral(0)) => default
case (LongLiteral(l), LongLiteral(r)) => LongLiteral(l % r)
case (_, LongLiteral(1L | -1L)) =>
Block(keepOnlySideEffects(lhs), LongLiteral(0L))
case (LongFromInt(x), LongFromInt(y)) =>
LongFromInt(foldBinaryOp(Int_%, x, y))
case _ => default
}
case Long_| =>
(lhs, rhs) match {
case (LongLiteral(l), LongLiteral(r)) => LongLiteral(l | r)
case (_, LongLiteral(_)) => foldBinaryOp(Long_|, rhs, lhs)
case (LongLiteral(0), _) => rhs
case (LongLiteral(x), BinaryOp(Long_|, LongLiteral(y), z)) =>
foldBinaryOp(Long_|, LongLiteral(x | y), z)
case _ => default
}
case Long_& =>
(lhs, rhs) match {
case (LongLiteral(l), LongLiteral(r)) => LongLiteral(l & r)
case (_, LongLiteral(_)) => foldBinaryOp(Long_&, rhs, lhs)
case (LongLiteral(-1), _) => rhs
case (LongLiteral(x), BinaryOp(Long_&, LongLiteral(y), z)) =>
foldBinaryOp(Long_&, LongLiteral(x & y), z)
case _ => default
}
case Long_^ =>
(lhs, rhs) match {
case (LongLiteral(l), LongLiteral(r)) => LongLiteral(l ^ r)
case (_, LongLiteral(_)) => foldBinaryOp(Long_^, rhs, lhs)
case (LongLiteral(0), _) => rhs
case (LongLiteral(x), BinaryOp(Long_^, LongLiteral(y), z)) =>
foldBinaryOp(Long_^, LongLiteral(x ^ y), z)
case _ => default
}
case Long_<< =>
(lhs, rhs) match {
case (LongLiteral(l), IntLiteral(r)) => LongLiteral(l << r)
case (_, IntLiteral(x)) if x % 64 == 0 => lhs
case _ => default
}
case Long_>>> =>
(lhs, rhs) match {
case (LongLiteral(l), IntLiteral(r)) => LongLiteral(l >>> r)
case (_, IntLiteral(x)) if x % 64 == 0 => lhs
case _ => default
}
case Long_>> =>
(lhs, rhs) match {
case (LongLiteral(l), IntLiteral(r)) => LongLiteral(l >> r)
case (_, IntLiteral(x)) if x % 64 == 0 => lhs
case _ => default
}
case Long_== | Long_!= =>
val positive = (op == Long_==)
(lhs, rhs) match {
case (LongLiteral(l), LongLiteral(r)) =>
BooleanLiteral((l == r) == positive)
case (LongFromInt(x), LongFromInt(y)) =>
foldBinaryOp(if (positive) === else !==, x, y)
case (LongFromInt(x), LongLiteral(y)) =>
assert(y > Int.MaxValue || y < Int.MinValue)
Block(keepOnlySideEffects(x), BooleanLiteral(!positive))
case (BinaryOp(Long_+, LongLiteral(x), y), LongLiteral(z)) =>
foldBinaryOp(op, y, LongLiteral(z-x))
case (BinaryOp(Long_-, LongLiteral(x), y), LongLiteral(z)) =>
foldBinaryOp(op, y, LongLiteral(x-z))
case (LongLiteral(_), _) => foldBinaryOp(op, rhs, lhs)
case _ => default
}
case Long_< | Long_<= | Long_> | Long_>= =>
def flippedOp = (op: @switch) match {
case Long_< => Long_>
case Long_<= => Long_>=
case Long_> => Long_<
case Long_>= => Long_<=
}
def intOp = (op: @switch) match {
case Long_< => Num_<
case Long_<= => Num_<=
case Long_> => Num_>
case Long_>= => Num_>=
}
(lhs, rhs) match {
case (LongLiteral(l), LongLiteral(r)) =>
val result = (op: @switch) match {
case Long_< => l < r
case Long_<= => l <= r
case Long_> => l > r
case Long_>= => l >= r
}
BooleanLiteral(result)
case (_, LongLiteral(Long.MinValue)) =>
if (op == Long_< || op == Long_>=)
Block(keepOnlySideEffects(lhs), BooleanLiteral(op == Long_>=))
else
foldBinaryOp(if (op == Long_<=) Long_== else Long_!=, lhs, rhs)
case (_, LongLiteral(Long.MaxValue)) =>
if (op == Long_> || op == Long_<=)
Block(keepOnlySideEffects(lhs), BooleanLiteral(op == Long_<=))
else
foldBinaryOp(if (op == Long_>=) Long_== else Long_!=, lhs, rhs)
case (LongFromInt(x), LongFromInt(y)) =>
foldBinaryOp(intOp, x, y)
case (LongFromInt(x), LongLiteral(y)) =>
assert(y > Int.MaxValue || y < Int.MinValue)
val result =
if (y > Int.MaxValue) op == Long_< || op == Long_<=
else op == Long_> || op == Long_>=
Block(keepOnlySideEffects(x), BooleanLiteral(result))
/* x + y.toLong > z
* -x on both sides
* requires x + y.toLong not to overflow, and z - x likewise
* y.toLong > z - x
*/
case (BinaryOp(Long_+, LongLiteral(x), y @ LongFromInt(_)), LongLiteral(z))
if canAddLongs(x, Int.MinValue) &&
canAddLongs(x, Int.MaxValue) &&
canSubtractLongs(z, x) =>
foldBinaryOp(op, y, LongLiteral(z-x))
/* x - y.toLong > z
* -x on both sides
* requires x - y.toLong not to overflow, and z - x likewise
* -(y.toLong) > z - x
*/
case (BinaryOp(Long_-, LongLiteral(x), y @ LongFromInt(_)), LongLiteral(z))
if canSubtractLongs(x, Int.MinValue) &&
canSubtractLongs(x, Int.MaxValue) &&
canSubtractLongs(z, x) =>
if (z-x != Long.MinValue) {
// Since -(y.toLong) does not overflow, we can negate both sides
foldBinaryOp(flippedOp, y, LongLiteral(-(z-x)))
} else {
/* -(y.toLong) > Long.MinValue
* Depending on the operator, this is either always true or
* always false.
*/
val result = (op == Long_>) || (op == Long_>=)
Block(keepOnlySideEffects(y), BooleanLiteral(result))
}
/* x.toLong + y.toLong > Int.MaxValue.toLong
*
* This is basically testing whether x+y overflows in positive.
* If x <= 0 or y <= 0, this cannot happen -> false.
* If x > 0 and y > 0, this can be detected with x+y < 0.
* Therefore, we rewrite as:
*
* x > 0 && y > 0 && x+y < 0.
*
* This requires to evaluate x and y once.
*/
case (BinaryOp(Long_+, LongFromInt(x), LongFromInt(y)),
LongLiteral(Int.MaxValue)) =>
trampoline {
withNewLocalDefs(List(
Binding("x", None, IntType, false, PreTransTree(x)),
Binding("y", None, IntType, false, PreTransTree(y)))) {
(tempsLocalDefs, cont) =>
val List(tempXDef, tempYDef) = tempsLocalDefs
val tempX = tempXDef.newReplacement
val tempY = tempYDef.newReplacement
cont(PreTransTree(
AndThen(AndThen(
BinaryOp(Num_>, tempX, IntLiteral(0)),
BinaryOp(Num_>, tempY, IntLiteral(0))),
BinaryOp(Num_<, BinaryOp(Int_+, tempX, tempY), IntLiteral(0)))))
} (finishTransform(isStat = false))
}
case (LongLiteral(_), _) => foldBinaryOp(flippedOp, rhs, lhs)
case _ => default
}
case Float_+ =>
(lhs, rhs) match {
case (FloatLiteral(l), FloatLiteral(r)) => FloatLiteral(l + r)
case (FloatLiteral(0), _) => rhs
case (_, FloatLiteral(_)) => foldBinaryOp(Float_+, rhs, lhs)
case (FloatLiteral(x),
BinaryOp(innerOp @ (Float_+ | Float_-), FloatLiteral(y), z)) =>
foldBinaryOp(innerOp, FloatLiteral(x+y), z)
case _ => default
}
case Float_- =>
(lhs, rhs) match {
case (_, FloatLiteral(r)) => foldBinaryOp(Float_+, lhs, FloatLiteral(-r))
case (FloatLiteral(x), BinaryOp(Float_+, FloatLiteral(y), z)) =>
foldBinaryOp(Float_-, FloatLiteral(x-y), z)
case (FloatLiteral(x), BinaryOp(Float_-, FloatLiteral(y), z)) =>
foldBinaryOp(Float_+, FloatLiteral(x-y), z)
case (_, BinaryOp(BinaryOp.Float_-, FloatLiteral(0), x)) =>
foldBinaryOp(Float_+, lhs, x)
case _ => default
}
case Float_* =>
(lhs, rhs) match {
case (FloatLiteral(l), FloatLiteral(r)) => FloatLiteral(l * r)
case (_, FloatLiteral(_)) => foldBinaryOp(Float_*, rhs, lhs)
case (FloatLiteral(1), _) => rhs
case (FloatLiteral(-1), _) => foldBinaryOp(Float_-, FloatLiteral(0), rhs)
case _ => default
}
case Float_/ =>
(lhs, rhs) match {
case (FloatLiteral(l), FloatLiteral(r)) => FloatLiteral(l / r)
case (_, FloatLiteral(1)) => lhs
case (_, FloatLiteral(-1)) => foldBinaryOp(Float_-, FloatLiteral(0), lhs)
case _ => default
}
case Float_% =>
(lhs, rhs) match {
case (FloatLiteral(l), FloatLiteral(r)) => FloatLiteral(l % r)
case _ => default
}
case Double_+ =>
(lhs, rhs) match {
case (NumberLiteral(l), NumberLiteral(r)) => DoubleLiteral(l + r)
case (NumberLiteral(0), _) => rhs
case (_, NumberLiteral(_)) => foldBinaryOp(Double_+, rhs, lhs)
case (NumberLiteral(x),
BinaryOp(innerOp @ (Double_+ | Double_-), NumberLiteral(y), z)) =>
foldBinaryOp(innerOp, DoubleLiteral(x+y), z)
case _ => default
}
case Double_- =>
(lhs, rhs) match {
case (_, NumberLiteral(r)) => foldBinaryOp(Double_+, lhs, DoubleLiteral(-r))
case (NumberLiteral(x), BinaryOp(Double_+, NumberLiteral(y), z)) =>
foldBinaryOp(Double_-, DoubleLiteral(x-y), z)
case (NumberLiteral(x), BinaryOp(Double_-, NumberLiteral(y), z)) =>
foldBinaryOp(Double_+, DoubleLiteral(x-y), z)
case (_, BinaryOp(BinaryOp.Double_-, NumberLiteral(0), x)) =>
foldBinaryOp(Double_+, lhs, x)
case _ => default
}
case Double_* =>
(lhs, rhs) match {
case (NumberLiteral(l), NumberLiteral(r)) => DoubleLiteral(l * r)
case (_, NumberLiteral(_)) => foldBinaryOp(Double_*, rhs, lhs)
case (NumberLiteral(1), _) => rhs
case (NumberLiteral(-1), _) => foldBinaryOp(Double_-, DoubleLiteral(0), rhs)
case _ => default
}
case Double_/ =>
(lhs, rhs) match {
case (NumberLiteral(l), NumberLiteral(r)) => DoubleLiteral(l / r)
case (_, NumberLiteral(1)) => lhs
case (_, NumberLiteral(-1)) => foldBinaryOp(Double_-, DoubleLiteral(0), lhs)
case _ => default
}
case Double_% =>
(lhs, rhs) match {
case (NumberLiteral(l), NumberLiteral(r)) => DoubleLiteral(l % r)
case _ => default
}
case Boolean_== | Boolean_!= =>
val positive = (op == Boolean_==)
(lhs, rhs) match {
case (BooleanLiteral(l), _) =>
if (l == positive) rhs
else foldUnaryOp(UnaryOp.Boolean_!, rhs)
case (_, BooleanLiteral(r)) =>
if (r == positive) lhs
else foldUnaryOp(UnaryOp.Boolean_!, lhs)
case _ =>
default
}
case Boolean_| =>
(lhs, rhs) match {
case (_, BooleanLiteral(false)) => lhs
case (BooleanLiteral(false), _) => rhs
case _ => default
}
case Boolean_& =>
(lhs, rhs) match {
case (_, BooleanLiteral(true)) => lhs
case (BooleanLiteral(true), _) => rhs
case _ => default
}
case Num_== | Num_!= =>
val positive = (op == Num_==)
(lhs, rhs) match {
case (lhs: Literal, rhs: Literal) =>
BooleanLiteral(literal_===(lhs, rhs) == positive)
case (BinaryOp(Int_+, IntLiteral(x), y), IntLiteral(z)) =>
foldBinaryOp(op, y, IntLiteral(z-x))
case (BinaryOp(Int_-, IntLiteral(x), y), IntLiteral(z)) =>
foldBinaryOp(op, y, IntLiteral(x-z))
case (_: Literal, _) => foldBinaryOp(op, rhs, lhs)
case _ => default
}
case Num_< | Num_<= | Num_> | Num_>= =>
def flippedOp = (op: @switch) match {
case Num_< => Num_>
case Num_<= => Num_>=
case Num_> => Num_<
case Num_>= => Num_<=
}
if (lhs.tpe == IntType && rhs.tpe == IntType) {
(lhs, rhs) match {
case (IntLiteral(l), IntLiteral(r)) =>
val result = (op: @switch) match {
case Num_< => l < r
case Num_<= => l <= r
case Num_> => l > r
case Num_>= => l >= r
}
BooleanLiteral(result)
case (_, IntLiteral(Int.MinValue)) =>
if (op == Num_< || op == Num_>=)
Block(keepOnlySideEffects(lhs), BooleanLiteral(op == Num_>=))
else
foldBinaryOp(if (op == Num_<=) Num_== else Num_!=, lhs, rhs)
case (_, IntLiteral(Int.MaxValue)) =>
if (op == Num_> || op == Num_<=)
Block(keepOnlySideEffects(lhs), BooleanLiteral(op == Num_<=))
else
foldBinaryOp(if (op == Num_>=) Num_== else Num_!=, lhs, rhs)
case (IntLiteral(_), _) => foldBinaryOp(flippedOp, rhs, lhs)
case _ => default
}
} else {
(lhs, rhs) match {
case (NumberLiteral(l), NumberLiteral(r)) =>
val result = (op: @switch) match {
case Num_< => l < r
case Num_<= => l <= r
case Num_> => l > r
case Num_>= => l >= r
}
BooleanLiteral(result)
case _ => default
}
}
case _ =>
default
}
}
private def fold3WayComparison(canBeEqual: Boolean, canBeLessThan: Boolean,
canBeGreaterThan: Boolean, lhs: Tree, rhs: Tree)(
implicit pos: Position): Tree = {
import BinaryOp._
if (canBeEqual) {
if (canBeLessThan) {
if (canBeGreaterThan)
Block(keepOnlySideEffects(lhs), keepOnlySideEffects(rhs), BooleanLiteral(true))
else
foldBinaryOp(Num_<=, lhs, rhs)
} else {
if (canBeGreaterThan)
foldBinaryOp(Num_>=, lhs, rhs)
else
foldBinaryOp(Num_==, lhs, rhs)
}
} else {
if (canBeLessThan) {
if (canBeGreaterThan)
foldBinaryOp(Num_!=, lhs, rhs)
else
foldBinaryOp(Num_<, lhs, rhs)
} else {
if (canBeGreaterThan)
foldBinaryOp(Num_>, lhs, rhs)
else
Block(keepOnlySideEffects(lhs), keepOnlySideEffects(rhs), BooleanLiteral(false))
}
}
}
private def foldUnbox(arg: PreTransform, charCode: Char)(
cont: PreTransCont): TailRec[Tree] = {
(charCode: @switch) match {
case 'Z' if arg.tpe.base == BooleanType => cont(arg)
case 'I' if arg.tpe.base == IntType => cont(arg)
case 'F' if arg.tpe.base == FloatType => cont(arg)
case 'J' if arg.tpe.base == LongType => cont(arg)
case 'D' if arg.tpe.base == DoubleType ||
arg.tpe.base == IntType || arg.tpe.base == FloatType => cont(arg)
case _ =>
cont(PreTransTree(Unbox(finishTransformExpr(arg), charCode)(arg.pos)))
}
}
private def foldReferenceEquality(tlhs: PreTransform, trhs: PreTransform,
positive: Boolean = true)(implicit pos: Position): Tree = {
(tlhs, trhs) match {
case (_, PreTransTree(Null(), _)) if !tlhs.tpe.isNullable =>
Block(
finishTransformStat(tlhs),
BooleanLiteral(!positive))
case (PreTransTree(Null(), _), _) if !trhs.tpe.isNullable =>
Block(
finishTransformStat(trhs),
BooleanLiteral(!positive))
case _ =>
foldBinaryOp(if (positive) BinaryOp.=== else BinaryOp.!==,
finishTransformExpr(tlhs), finishTransformExpr(trhs))
}
}
private def foldJSBracketSelect(qualifier: Tree, item: Tree)(
implicit pos: Position): Tree = {
// !!! Must be in sync with scala.scalajs.runtime.LinkingInfo
@inline def default =
JSBracketSelect(qualifier, item)
(qualifier, item) match {
case (JSBracketSelect(JSLinkingInfo(), StringLiteral("semantics")),
StringLiteral(semanticsStr)) =>
def behavior2IntLiteral(behavior: CheckedBehavior) = {
IntLiteral(behavior match {
case CheckedBehavior.Compliant => 0
case CheckedBehavior.Fatal => 1
case CheckedBehavior.Unchecked => 2
})
}
semanticsStr match {
case "asInstanceOfs" =>
behavior2IntLiteral(semantics.asInstanceOfs)
case "moduleInit" =>
behavior2IntLiteral(semantics.moduleInit)
case "strictFloats" =>
BooleanLiteral(semantics.strictFloats)
case _ =>
default
}
case (JSLinkingInfo(), StringLiteral("assumingES6")) =>
BooleanLiteral(outputMode match {
case OutputMode.ECMAScript51Global | OutputMode.ECMAScript51Isolated =>
false
case OutputMode.ECMAScript6 | OutputMode.ECMAScript6StrongMode =>
true
})
case _ =>
default
}
}
private def finishTransformCheckNull(preTrans: PreTransform)(
implicit pos: Position): Tree = {
if (preTrans.tpe.isNullable) {
val transformed = finishTransformExpr(preTrans)
CallHelper("checkNonNull", transformed)(transformed.tpe)
} else {
finishTransformExpr(preTrans)
}
}
def transformIsolatedBody(optTarget: Option[MethodID],
thisType: Type, params: List[ParamDef], resultType: Type,
body: Tree): (List[ParamDef], Tree) = {
val (paramLocalDefs, newParamDefs) = (for {
p @ ParamDef(ident @ Ident(name, originalName), ptpe, mutable, rest) <- params
} yield {
val newName = freshLocalName(name, mutable)
val newOriginalName = originalName.orElse(Some(newName))
val localDef = LocalDef(RefinedType(ptpe), mutable,
ReplaceWithVarRef(newName, newOriginalName, newSimpleState(true), None))
val newParamDef = ParamDef(
Ident(newName, newOriginalName)(ident.pos), ptpe, mutable, rest)(p.pos)
((name -> localDef), newParamDef)
}).unzip
val thisLocalDef =
if (thisType == NoType) None
else {
Some("this" -> LocalDef(
RefinedType(thisType, isExact = false, isNullable = false),
false, ReplaceWithThis()))
}
val allLocalDefs = thisLocalDef ++: paramLocalDefs
val allocationSites = List.fill(allLocalDefs.size)(None)
val scope0 = optTarget.fold(Scope.Empty)(
target => Scope.Empty.inlining((allocationSites, target)))
val scope = scope0.withEnv(OptEnv.Empty.withLocalDefs(allLocalDefs))
val newBody =
transform(body, resultType == NoType)(scope)
(newParamDefs, newBody)
}
private def returnable(oldLabelName: String, resultType: Type,
body: Tree, isStat: Boolean, usePreTransform: Boolean)(
cont: PreTransCont)(
implicit scope: Scope, pos: Position): TailRec[Tree] = tailcall {
val newLabel = freshLabelName(
if (oldLabelName.isEmpty) "inlinereturn" else oldLabelName)
def doMakeTree(newBody: Tree, returnedTypes: List[Type]): Tree = {
val refinedType =
returnedTypes.reduce(constrainedLub(_, _, resultType))
val returnCount = returnedTypes.size - 1
tryOptimizePatternMatch(oldLabelName, refinedType,
returnCount, newBody) getOrElse {
Labeled(Ident(newLabel, None), refinedType, newBody)
}
}
val info = new LabelInfo(newLabel, acceptRecords = usePreTransform,
returnedTypes = newSimpleState(Nil))
val bodyScope = scope.withEnv(scope.env.withLabelInfo(oldLabelName, info))
if (usePreTransform) {
assert(!isStat, "Cannot use pretransform in statement position")
tryOrRollback { cancelFun =>
pretransformExpr(body) { tbody0 =>
val returnedTypes0 = info.returnedTypes.value
if (returnedTypes0.isEmpty) {
// no return to that label, we can eliminate it
cont(tbody0)
} else {
val tbody = resolveLocalDef(tbody0)
val (newBody, returnedTypes) = tbody match {
case PreTransRecordTree(bodyTree, origType, _) =>
(bodyTree, (bodyTree.tpe, origType) :: returnedTypes0)
case PreTransTree(bodyTree, tpe) =>
(bodyTree, (bodyTree.tpe, tpe) :: returnedTypes0)
}
val (actualTypes, origTypes) = returnedTypes.unzip
val refinedOrigType =
origTypes.reduce(constrainedLub(_, _, resultType))
actualTypes.collectFirst {
case actualType: RecordType => actualType
}.fold[TailRec[Tree]] {
// None of the returned types are records
cont(PreTransTree(
doMakeTree(newBody, actualTypes), refinedOrigType))
} { recordType =>
if (actualTypes.exists(t => t != recordType && t != NothingType))
cancelFun()
val resultTree = doMakeTree(newBody, actualTypes)
if (origTypes.exists(t => t != refinedOrigType && !t.isNothingType))
cancelFun()
cont(PreTransRecordTree(resultTree, refinedOrigType, cancelFun))
}
}
} (bodyScope)
} { () =>
returnable(oldLabelName, resultType, body, isStat,
usePreTransform = false)(cont)
}
} else {
val newBody = transform(body, isStat)(bodyScope)
val returnedTypes0 = info.returnedTypes.value.map(_._1)
if (returnedTypes0.isEmpty) {
// no return to that label, we can eliminate it
cont(PreTransTree(newBody, RefinedType(newBody.tpe)))
} else {
val returnedTypes = newBody.tpe :: returnedTypes0
val tree = doMakeTree(newBody, returnedTypes)
cont(PreTransTree(tree, RefinedType(tree.tpe)))
}
}
}
/** Tries and optimizes the remainings of a pattern match as if/elses.
*
* !!! There is quite of bit of code duplication with
* GenJSCode.genOptimizedLabeled.
*/
def tryOptimizePatternMatch(oldLabelName: String, refinedType: Type,
returnCount: Int, newBody: Tree): Option[Tree] = {
if (!oldLabelName.startsWith("matchEnd")) None
else {
newBody match {
case Block(stats) =>
@tailrec
def createRevAlts(xs: List[Tree], acc: List[(Tree, Tree)]): List[(Tree, Tree)] = xs match {
case If(cond, body, Skip()) :: xr =>
createRevAlts(xr, (cond, body) :: acc)
case remaining =>
(EmptyTree, Block(remaining)(remaining.head.pos)) :: acc
}
val revAlts = createRevAlts(stats, Nil)
if (revAlts.size == returnCount) {
@tailrec
def constructOptimized(revAlts: List[(Tree, Tree)], elsep: Tree): Option[Tree] = {
revAlts match {
case (cond, body) :: revAltsRest =>
body match {
case BlockOrAlone(prep,
Return(result, Some(Ident(newLabel, _)))) =>
val result1 =
if (refinedType == NoType) keepOnlySideEffects(result)
else result
val prepAndResult = Block(prep :+ result1)(body.pos)
if (cond == EmptyTree) {
assert(elsep == EmptyTree)
constructOptimized(revAltsRest, prepAndResult)
} else {
assert(elsep != EmptyTree)
constructOptimized(revAltsRest,
foldIf(cond, prepAndResult, elsep)(refinedType)(cond.pos))
}
case _ =>
None
}
case Nil =>
Some(elsep)
}
}
constructOptimized(revAlts, EmptyTree)
} else None
case _ =>
None
}
}
}
private def withBindings(bindings: List[Binding])(
buildInner: (Scope, PreTransCont) => TailRec[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
withNewLocalDefs(bindings) { (localDefs, cont1) =>
val newMappings = for {
(binding, localDef) <- bindings zip localDefs
} yield {
binding.name -> localDef
}
buildInner(scope.withEnv(scope.env.withLocalDefs(newMappings)), cont1)
} (cont)
}
private def withBinding(binding: Binding)(
buildInner: (Scope, PreTransCont) => TailRec[Tree])(
cont: PreTransCont)(
implicit scope: Scope): TailRec[Tree] = {
withNewLocalDef(binding) { (localDef, cont1) =>
buildInner(scope.withEnv(scope.env.withLocalDef(binding.name, localDef)),
cont1)
} (cont)
}
private def withNewLocalDefs(bindings: List[Binding])(
buildInner: (List[LocalDef], PreTransCont) => TailRec[Tree])(
cont: PreTransCont): TailRec[Tree] = {
bindings match {
case first :: rest =>
withNewLocalDef(first) { (firstLocalDef, cont1) =>
withNewLocalDefs(rest) { (restLocalDefs, cont2) =>
buildInner(firstLocalDef :: restLocalDefs, cont2)
} (cont1)
} (cont)
case Nil =>
buildInner(Nil, cont)
}
}
private def isImmutableType(tpe: Type): Boolean = tpe match {
case RecordType(fields) =>
fields.forall(f => !f.mutable && isImmutableType(f.tpe))
case _ =>
true
}
private def withNewLocalDef(binding: Binding)(
buildInner: (LocalDef, PreTransCont) => TailRec[Tree])(
cont: PreTransCont): TailRec[Tree] = tailcall {
val Binding(name, originalName, declaredType, mutable, value) = binding
implicit val pos = value.pos
def withDedicatedVar(tpe: RefinedType): TailRec[Tree] = {
val newName = freshLocalName(name, mutable)
val newOriginalName = originalName.orElse(Some(name))
val used = newSimpleState(false)
def doBuildInner(localDef: LocalDef)(varDef: => VarDef)(
cont: PreTransCont): TailRec[Tree] = {
buildInner(localDef, { tinner =>
if (used.value) {
cont(PreTransBlock(varDef :: Nil, tinner))
} else {
tinner match {
case PreTransLocalDef(`localDef`) =>
cont(value)
case _ if tinner.contains(localDef) =>
cont(PreTransBlock(varDef :: Nil, tinner))
case _ =>
val rhsSideEffects = finishTransformStat(value)
rhsSideEffects match {
case Skip() =>
cont(tinner)
case _ =>
if (rhsSideEffects.tpe == NothingType)
cont(PreTransTree(rhsSideEffects, RefinedType.Nothing))
else
cont(PreTransBlock(rhsSideEffects :: Nil, tinner))
}
}
}
})
}
resolveLocalDef(value) match {
case PreTransRecordTree(valueTree, valueTpe, cancelFun) =>
val recordType = valueTree.tpe.asInstanceOf[RecordType]
if (!isImmutableType(recordType))
cancelFun()
val localDef = LocalDef(valueTpe, mutable,
ReplaceWithRecordVarRef(newName, newOriginalName, recordType,
used, cancelFun))
doBuildInner(localDef) {
VarDef(Ident(newName, newOriginalName), recordType, mutable,
valueTree)
} (cont)
case PreTransTree(valueTree, valueTpe) =>
def doDoBuildInner(optValueTree: Option[() => Tree])(
cont1: PreTransCont) = {
val localDef = LocalDef(tpe, mutable, ReplaceWithVarRef(
newName, newOriginalName, used, optValueTree))
doBuildInner(localDef) {
VarDef(Ident(newName, newOriginalName), tpe.base, mutable,
optValueTree.fold(valueTree)(_()))
} (cont1)
}
if (mutable) {
doDoBuildInner(None)(cont)
} else (valueTree match {
case LongFromInt(arg) =>
withNewLocalDef(
Binding("x", None, IntType, false, PreTransTree(arg))) {
(intLocalDef, cont1) =>
doDoBuildInner(Some(
() => LongFromInt(intLocalDef.newReplacement)))(
cont1)
} (cont)
case BinaryOp(op @ (BinaryOp.Long_+ | BinaryOp.Long_-),
LongFromInt(intLhs), LongFromInt(intRhs)) =>
withNewLocalDefs(List(
Binding("x", None, IntType, false, PreTransTree(intLhs)),
Binding("y", None, IntType, false, PreTransTree(intRhs)))) {
(intLocalDefs, cont1) =>
val List(lhsLocalDef, rhsLocalDef) = intLocalDefs
doDoBuildInner(Some(
() => BinaryOp(op,
LongFromInt(lhsLocalDef.newReplacement),
LongFromInt(rhsLocalDef.newReplacement))))(
cont1)
} (cont)
case _ =>
doDoBuildInner(None)(cont)
})
}
}
if (value.tpe.isNothingType) {
cont(value)
} else if (mutable) {
withDedicatedVar(RefinedType(declaredType))
} else {
val refinedType = value.tpe
value match {
case PreTransBlock(stats, result) =>
withNewLocalDef(binding.copy(value = result))(buildInner) { tresult =>
cont(PreTransBlock(stats, tresult))
}
case PreTransLocalDef(localDef) if !localDef.mutable =>
buildInner(localDef, cont)
case PreTransTree(literal: Literal, _) =>
buildInner(LocalDef(refinedType, false,
ReplaceWithConstant(literal)), cont)
case PreTransTree(VarRef(Ident(refName, refOriginalName)), _)
if !localIsMutable(refName) =>
buildInner(LocalDef(refinedType, false,
ReplaceWithVarRef(refName, refOriginalName,
newSimpleState(true), None)), cont)
case _ =>
withDedicatedVar(refinedType)
}
}
}
/** Finds a type as precise as possible which is a supertype of lhs and rhs
* but still a subtype of upperBound.
* Requires that lhs and rhs be subtypes of upperBound, obviously.
*/
private def constrainedLub(lhs: RefinedType, rhs: RefinedType,
upperBound: Type): RefinedType = {
if (upperBound == NoType) RefinedType(upperBound)
else if (lhs == rhs) lhs
else if (lhs.isNothingType) rhs
else if (rhs.isNothingType) lhs
else {
RefinedType(constrainedLub(lhs.base, rhs.base, upperBound),
false, lhs.isNullable || rhs.isNullable)
}
}
/** Finds a type as precise as possible which is a supertype of lhs and rhs
* but still a subtype of upperBound.
* Requires that lhs and rhs be subtypes of upperBound, obviously.
*/
private def constrainedLub(lhs: Type, rhs: Type, upperBound: Type): Type = {
// TODO Improve this
if (upperBound == NoType) upperBound
else if (lhs == rhs) lhs
else if (lhs == NothingType) rhs
else if (rhs == NothingType) lhs
else upperBound
}
/** Trampolines a pretransform */
private def trampoline(tailrec: => TailRec[Tree]): Tree = {
curTrampolineId += 1
val myTrampolineId = curTrampolineId
try {
var rec = () => tailrec
while (true) {
try {
return rec().result
} catch {
case e: RollbackException if e.trampolineId == myTrampolineId =>
rollbacksCount += 1
if (rollbacksCount > MaxRollbacksPerMethod)
throw new TooManyRollbacksException
usedLocalNames.clear()
usedLocalNames ++= e.savedUsedLocalNames
usedLabelNames.clear()
usedLabelNames ++= e.savedUsedLabelNames
e.stateBackups.foreach(_.restore())
rec = e.cont
}
}
sys.error("Reached end of infinite loop")
} finally {
curTrampolineId -= 1
}
}
}
private[optimizer] object OptimizerCore {
private final val MaxRollbacksPerMethod = 256
private final class TooManyRollbacksException
extends scala.util.control.ControlThrowable
private val AnonFunctionClassPrefix = "sjsr_AnonFunction"
private type CancelFun = () => Nothing
private type PreTransCont = PreTransform => TailRec[Tree]
private case class RefinedType private (base: Type, isExact: Boolean,
isNullable: Boolean)(
val allocationSite: Option[AllocationSite], dummy: Int = 0) {
def isNothingType: Boolean = base == NothingType
}
private object RefinedType {
def apply(base: Type, isExact: Boolean, isNullable: Boolean,
allocationSite: Option[AllocationSite]): RefinedType =
new RefinedType(base, isExact, isNullable)(allocationSite)
def apply(base: Type, isExact: Boolean, isNullable: Boolean): RefinedType =
RefinedType(base, isExact, isNullable, None)
def apply(tpe: Type): RefinedType = tpe match {
case IntType | FloatType | DoubleType =>
RefinedType(tpe, isExact = false, isNullable = false)
case BooleanType | StringType | UndefType | NothingType |
_:RecordType | NoType =>
RefinedType(tpe, isExact = true, isNullable = false)
case NullType =>
RefinedType(tpe, isExact = true, isNullable = true)
case _ =>
RefinedType(tpe, isExact = false, isNullable = true)
}
val NoRefinedType = RefinedType(NoType)
val Nothing = RefinedType(NothingType)
}
private class AllocationSite(private val node: Tree) {
override def equals(that: Any): Boolean = that match {
case that: AllocationSite => this.node eq that.node
case _ => false
}
override def hashCode(): Int =
System.identityHashCode(node)
override def toString(): String =
s"AllocationSite($node)"
}
private case class LocalDef(
tpe: RefinedType,
mutable: Boolean,
replacement: LocalDefReplacement) {
def newReplacement(implicit pos: Position): Tree = replacement match {
case ReplaceWithVarRef(name, originalName, used, _) =>
used.value = true
VarRef(Ident(name, originalName))(tpe.base)
case ReplaceWithRecordVarRef(_, _, _, _, cancelFun) =>
cancelFun()
case ReplaceWithThis() =>
This()(tpe.base)
case ReplaceWithConstant(value) =>
value
case TentativeClosureReplacement(_, _, _, _, _, cancelFun) =>
cancelFun()
case InlineClassBeingConstructedReplacement(_, cancelFun) =>
cancelFun()
case InlineClassInstanceReplacement(_, _, cancelFun) =>
cancelFun()
}
def contains(that: LocalDef): Boolean = {
(this eq that) || (replacement match {
case TentativeClosureReplacement(_, _, _, captureLocalDefs, _, _) =>
captureLocalDefs.exists(_.contains(that))
case InlineClassInstanceReplacement(_, fieldLocalDefs, _) =>
fieldLocalDefs.valuesIterator.exists(_.contains(that))
case _ =>
false
})
}
}
private sealed abstract class LocalDefReplacement
private final case class ReplaceWithVarRef(name: String,
originalName: Option[String],
used: SimpleState[Boolean],
longOpTree: Option[() => Tree]) extends LocalDefReplacement
private final case class ReplaceWithRecordVarRef(name: String,
originalName: Option[String],
recordType: RecordType,
used: SimpleState[Boolean],
cancelFun: CancelFun) extends LocalDefReplacement
private final case class ReplaceWithThis() extends LocalDefReplacement
private final case class ReplaceWithConstant(
value: Tree) extends LocalDefReplacement
private final case class TentativeClosureReplacement(
captureParams: List[ParamDef], params: List[ParamDef], body: Tree,
captureValues: List[LocalDef],
alreadyUsed: SimpleState[Boolean],
cancelFun: CancelFun) extends LocalDefReplacement
private final case class InlineClassBeingConstructedReplacement(
fieldLocalDefs: Map[String, LocalDef],
cancelFun: CancelFun) extends LocalDefReplacement
private final case class InlineClassInstanceReplacement(
recordType: RecordType,
fieldLocalDefs: Map[String, LocalDef],
cancelFun: CancelFun) extends LocalDefReplacement
private final class LabelInfo(
val newName: String,
val acceptRecords: Boolean,
/** (actualType, originalType), actualType can be a RecordType. */
val returnedTypes: SimpleState[List[(Type, RefinedType)]])
private class OptEnv(
val localDefs: Map[String, LocalDef],
val labelInfos: Map[String, LabelInfo]) {
def withLocalDef(oldName: String, rep: LocalDef): OptEnv =
new OptEnv(localDefs + (oldName -> rep), labelInfos)
def withLocalDefs(reps: List[(String, LocalDef)]): OptEnv =
new OptEnv(localDefs ++ reps, labelInfos)
def withLabelInfo(oldName: String, info: LabelInfo): OptEnv =
new OptEnv(localDefs, labelInfos + (oldName -> info))
def withinFunction(paramLocalDefs: List[(String, LocalDef)]): OptEnv =
new OptEnv(localDefs ++ paramLocalDefs, Map.empty)
override def toString(): String = {
"localDefs:"+localDefs.mkString("\n ", "\n ", "\n") +
"labelInfos:"+labelInfos.mkString("\n ", "\n ", "")
}
}
private object OptEnv {
val Empty: OptEnv = new OptEnv(Map.empty, Map.empty)
}
private class Scope(val env: OptEnv,
val implsBeingInlined: Set[(List[Option[AllocationSite]], AbstractMethodID)]) {
def withEnv(env: OptEnv): Scope =
new Scope(env, implsBeingInlined)
def inlining(impl: (List[Option[AllocationSite]],
AbstractMethodID)): Scope = {
assert(!implsBeingInlined(impl), s"Circular inlining of $impl")
new Scope(env, implsBeingInlined + impl)
}
}
private object Scope {
val Empty: Scope = new Scope(OptEnv.Empty, Set.empty)
}
/** The result of pretransformExpr().
* It has a `tpe` as precisely refined as if a full transformExpr() had
* been performed.
* It is also not dependent on the environment anymore. In some sense, it
* has "captured" its environment at definition site.
*/
private sealed abstract class PreTransform {
def pos: Position
val tpe: RefinedType
def contains(localDef: LocalDef): Boolean = this match {
case PreTransBlock(_, result) =>
result.contains(localDef)
case PreTransLocalDef(thisLocalDef) =>
thisLocalDef.contains(localDef)
case _ =>
false
}
}
private final class PreTransBlock private (val stats: List[Tree],
val result: PreTransLocalDef) extends PreTransform {
def pos = result.pos
val tpe = result.tpe
assert(stats.nonEmpty)
override def toString(): String =
s"PreTransBlock($stats,$result)"
}
private object PreTransBlock {
def apply(stats: List[Tree], result: PreTransform): PreTransform = {
if (stats.isEmpty) result
else {
result match {
case PreTransBlock(innerStats, innerResult) =>
new PreTransBlock(stats ++ innerStats, innerResult)
case result: PreTransLocalDef =>
new PreTransBlock(stats, result)
case PreTransRecordTree(tree, tpe, cancelFun) =>
PreTransRecordTree(Block(stats :+ tree)(tree.pos), tpe, cancelFun)
case PreTransTree(tree, tpe) =>
PreTransTree(Block(stats :+ tree)(tree.pos), tpe)
}
}
}
def unapply(preTrans: PreTransBlock): Some[(List[Tree], PreTransLocalDef)] =
Some(preTrans.stats, preTrans.result)
}
private sealed abstract class PreTransNoBlock extends PreTransform
private final case class PreTransLocalDef(localDef: LocalDef)(
implicit val pos: Position) extends PreTransNoBlock {
val tpe: RefinedType = localDef.tpe
}
private sealed abstract class PreTransGenTree extends PreTransNoBlock
private final case class PreTransRecordTree(tree: Tree,
tpe: RefinedType, cancelFun: CancelFun) extends PreTransGenTree {
def pos = tree.pos
assert(tree.tpe.isInstanceOf[RecordType],
s"Cannot create a PreTransRecordTree with non-record type ${tree.tpe}")
}
private final case class PreTransTree(tree: Tree,
tpe: RefinedType) extends PreTransGenTree {
def pos: Position = tree.pos
assert(!tree.tpe.isInstanceOf[RecordType],
s"Cannot create a Tree with record type ${tree.tpe}")
}
private object PreTransTree {
def apply(tree: Tree): PreTransTree = {
val refinedTpe: RefinedType = tree match {
case BlockOrAlone(_,
_:LoadModule | _:NewArray | _:ArrayValue | _:GetClass |
_:ClassOf) =>
RefinedType(tree.tpe, isExact = true, isNullable = false)
case _ =>
RefinedType(tree.tpe)
}
PreTransTree(tree, refinedTpe)
}
}
private final case class Binding(name: String, originalName: Option[String],
declaredType: Type, mutable: Boolean, value: PreTransform)
private object NumberLiteral {
def unapply(tree: Literal): Option[Double] = tree match {
case DoubleLiteral(v) => Some(v)
case IntLiteral(v) => Some(v.toDouble)
case FloatLiteral(v) => Some(v.toDouble)
case _ => None
}
}
private object LongFromInt {
def apply(x: Tree)(implicit pos: Position): Tree = x match {
case IntLiteral(v) => LongLiteral(v)
case _ => UnaryOp(UnaryOp.IntToLong, x)
}
def unapply(tree: Tree): Option[Tree] = tree match {
case LongLiteral(v) if v.toInt == v => Some(IntLiteral(v.toInt)(tree.pos))
case UnaryOp(UnaryOp.IntToLong, x) => Some(x)
case _ => None
}
}
private object AndThen {
def apply(lhs: Tree, rhs: Tree)(implicit pos: Position): Tree =
If(lhs, rhs, BooleanLiteral(false))(BooleanType)
}
/** Tests whether `x + y` is valid without falling out of range. */
private def canAddLongs(x: Long, y: Long): Boolean =
if (y >= 0) x+y >= x
else x+y < x
/** Tests whether `x - y` is valid without falling out of range. */
private def canSubtractLongs(x: Long, y: Long): Boolean =
if (y >= 0) x-y <= x
else x-y > x
/** Tests whether `-x` is valid without falling out of range. */
private def canNegateLong(x: Long): Boolean =
x != Long.MinValue
private object Intrinsics {
final val ArrayCopy = 1
final val IdentityHashCode = ArrayCopy + 1
final val ArrayApply = IdentityHashCode + 1
final val ArrayUpdate = ArrayApply + 1
final val ArrayLength = ArrayUpdate + 1
final val PropertiesOf = ArrayLength + 1
final val IntegerNLZ = PropertiesOf + 1
final val LongToString = IntegerNLZ + 1
final val LongCompare = LongToString + 1
final val LongDivideUnsigned = LongCompare + 1
final val LongRemainderUnsigned = LongDivideUnsigned + 1
final val LongBitCount = LongRemainderUnsigned + 1
final val LongSignum = LongBitCount + 1
final val LongLeading0s = LongSignum + 1
final val LongTrailing0s = LongLeading0s + 1
final val LongToBinStr = LongTrailing0s + 1
final val LongToHexStr = LongToBinStr + 1
final val LongToOctalStr = LongToHexStr + 1
final val ArrayBuilderZeroOf = LongToOctalStr + 1
final val GenericArrayBuilderResult = ArrayBuilderZeroOf + 1
final val ClassGetComponentType = GenericArrayBuilderResult + 1
final val ArrayNewInstance = ClassGetComponentType + 1
final val ByteArrayToInt8Array = ArrayNewInstance + 1
final val ShortArrayToInt16Array = ByteArrayToInt8Array + 1
final val CharArrayToUint16Array = ShortArrayToInt16Array + 1
final val IntArrayToInt32Array = CharArrayToUint16Array + 1
final val FloatArrayToFloat32Array = IntArrayToInt32Array + 1
final val DoubleArrayToFloat64Array = FloatArrayToFloat32Array + 1
final val Int8ArrayToByteArray = DoubleArrayToFloat64Array + 1
final val Int16ArrayToShortArray = Int8ArrayToByteArray + 1
final val Uint16ArrayToCharArray = Int16ArrayToShortArray + 1
final val Int32ArrayToIntArray = Uint16ArrayToCharArray + 1
final val Float32ArrayToFloatArray = Int32ArrayToIntArray + 1
final val Float64ArrayToDoubleArray = Float32ArrayToFloatArray + 1
val intrinsics: Map[String, Int] = Map(
"jl_System$.arraycopy__O__I__O__I__I__V" -> ArrayCopy,
"jl_System$.identityHashCode__O__I" -> IdentityHashCode,
"sr_ScalaRunTime$.array$undapply__O__I__O" -> ArrayApply,
"sr_ScalaRunTime$.array$undupdate__O__I__O__V" -> ArrayUpdate,
"sr_ScalaRunTime$.array$undlength__O__I" -> ArrayLength,
"sjsr_package$.propertiesOf__sjs_js_Any__sjs_js_Array" -> PropertiesOf,
"jl_Integer$.numberOfLeadingZeros__I__I" -> IntegerNLZ,
"jl_Long$.toString__J__T" -> LongToString,
"jl_Long$.compare__J__J__I" -> LongCompare,
"jl_Long$.divideUnsigned__J__J__J" -> LongDivideUnsigned,
"jl_Long$.remainderUnsigned__J__J__J" -> LongRemainderUnsigned,
"jl_Long$.bitCount__J__I" -> LongBitCount,
"jl_Long$.signum__J__J" -> LongSignum,
"jl_Long$.numberOfLeadingZeros__J__I" -> LongLeading0s,
"jl_Long$.numberOfTrailingZeros__J__I" -> LongTrailing0s,
"jl_long$.toBinaryString__J__T" -> LongToBinStr,
"jl_Long$.toHexString__J__T" -> LongToHexStr,
"jl_Long$.toOctalString__J__T" -> LongToOctalStr,
"scm_ArrayBuilder$.scala$collection$mutable$ArrayBuilder$$zeroOf__jl_Class__O" -> ArrayBuilderZeroOf,
"scm_ArrayBuilder$.scala$collection$mutable$ArrayBuilder$$genericArrayBuilderResult__jl_Class__sjs_js_Array__O" -> GenericArrayBuilderResult,
"jl_Class.getComponentType__jl_Class" -> ClassGetComponentType,
"jl_reflect_Array$.newInstance__jl_Class__I__O" -> ArrayNewInstance,
"sjs_js_typedarray_package$.byteArray2Int8Array__AB__sjs_js_typedarray_Int8Array" -> ByteArrayToInt8Array,
"sjs_js_typedarray_package$.shortArray2Int16Array__AS__sjs_js_typedarray_Int16Array" -> ShortArrayToInt16Array,
"sjs_js_typedarray_package$.charArray2Uint16Array__AC__sjs_js_typedarray_Uint16Array" -> CharArrayToUint16Array,
"sjs_js_typedarray_package$.intArray2Int32Array__AI__sjs_js_typedarray_Int32Array" -> IntArrayToInt32Array,
"sjs_js_typedarray_package$.floatArray2Float32Array__AF__sjs_js_typedarray_Float32Array" -> FloatArrayToFloat32Array,
"sjs_js_typedarray_package$.doubleArray2Float64Array__AD__sjs_js_typedarray_Float64Array" -> DoubleArrayToFloat64Array,
"sjs_js_typedarray_package$.int8Array2ByteArray__sjs_js_typedarray_Int8Array__AB" -> Int8ArrayToByteArray,
"sjs_js_typedarray_package$.int16Array2ShortArray__sjs_js_typedarray_Int16Array__AS" -> Int16ArrayToShortArray,
"sjs_js_typedarray_package$.uint16Array2CharArray__sjs_js_typedarray_Uint16Array__AC" -> Uint16ArrayToCharArray,
"sjs_js_typedarray_package$.int32Array2IntArray__sjs_js_typedarray_Int32Array__AI" -> Int32ArrayToIntArray,
"sjs_js_typedarray_package$.float32Array2FloatArray__sjs_js_typedarray_Float32Array__AF" -> Float32ArrayToFloatArray,
"sjs_js_typedarray_package$.float64Array2DoubleArray__sjs_js_typedarray_Float64Array__AD" -> Float64ArrayToDoubleArray
).withDefaultValue(-1)
}
private def getIntrinsicCode(target: AbstractMethodID): Int =
Intrinsics.intrinsics(target.toString)
private trait StateBackup {
def restore(): Unit
}
private trait State {
def makeBackup(): StateBackup
}
private class SimpleState[A](var value: A) extends State {
private class Backup(savedValue: A) extends StateBackup {
override def restore(): Unit = value = savedValue
}
def makeBackup(): StateBackup = new Backup(value)
}
trait AbstractMethodID {
def inlineable: Boolean
def shouldInline: Boolean
def isForwarder: Boolean
}
/** Parts of [[GenIncOptimizer#MethodImpl]] with decisions about optimizations. */
abstract class MethodImpl {
def encodedName: String
def optimizerHints: OptimizerHints
def originalDef: MethodDef
def thisType: Type
var inlineable: Boolean = false
var shouldInline: Boolean = false
var isForwarder: Boolean = false
protected def updateInlineable(): Unit = {
val MethodDef(_, Ident(methodName, _), params, _, body) = originalDef
isForwarder = body match {
// Shape of forwarders to trait impls
case ApplyStatic(impl, method, args) =>
((args.size == params.size + 1) &&
(args.head.isInstanceOf[This]) &&
(args.tail.zip(params).forall {
case (VarRef(Ident(aname, _)),
ParamDef(Ident(pname, _), _, _, _)) => aname == pname
case _ => false
}))
// Shape of bridges for generic methods
case MaybeBox(Apply(This(), method, args), _) =>
(args.size == params.size) &&
args.zip(params).forall {
case (MaybeUnbox(VarRef(Ident(aname, _)), _),
ParamDef(Ident(pname, _), _, _, _)) => aname == pname
case _ => false
}
case _ => false
}
inlineable = !optimizerHints.noinline
shouldInline = inlineable && {
optimizerHints.inline || isForwarder || {
val MethodDef(_, _, params, _, body) = originalDef
body match {
case _:Skip | _:This | _:Literal => true
// Shape of accessors
case Select(This(), _) if params.isEmpty => true
case Assign(Select(This(), _), VarRef(_))
if params.size == 1 => true
// Shape of trivial call-super constructors
case Block(stats)
if params.isEmpty && isConstructorName(encodedName) &&
stats.forall(isTrivialConstructorStat) => true
// Simple method
case SimpleMethodBody() => true
case _ => false
}
}
}
}
}
private object MaybeBox {
def unapply(tree: Tree): Some[(Tree, Any)] = tree match {
case Apply(LoadModule(ClassType("sr_BoxesRunTime$")),
Ident("boxToCharacter__C__jl_Character", _), List(arg)) =>
Some((arg, "C"))
case _ =>
Some((tree, ()))
}
}
private object MaybeUnbox {
def unapply(tree: Tree): Some[(Tree, Any)] = tree match {
case AsInstanceOf(arg, tpe) =>
Some((arg, tpe))
case Unbox(arg, charCode) =>
Some((arg, charCode))
case Apply(LoadModule(ClassType("sr_BoxesRunTime$")),
Ident("unboxToChar__O__C", _), List(arg)) =>
Some((arg, "C"))
case _ =>
Some((tree, ()))
}
}
private def isTrivialConstructorStat(stat: Tree): Boolean = stat match {
case This() =>
true
case ApplyStatically(This(), _, _, Nil) =>
true
case ApplyStatic(_, Ident(methodName, _), This() :: Nil) =>
methodName.startsWith("$$init$__")
case _ =>
false
}
private object SimpleMethodBody {
@tailrec
def unapply(body: Tree): Boolean = body match {
case New(_, _, args) => areSimpleArgs(args)
case Apply(receiver, _, args) => areSimpleArgs(receiver :: args)
case ApplyStatically(receiver, _, _, args) => areSimpleArgs(receiver :: args)
case ApplyStatic(_, _, args) => areSimpleArgs(args)
case Select(qual, _) => isSimpleArg(qual)
case IsInstanceOf(inner, _) => isSimpleArg(inner)
case Block(List(inner, Undefined())) =>
unapply(inner)
case Unbox(inner, _) => unapply(inner)
case AsInstanceOf(inner, _) => unapply(inner)
case _ => isSimpleArg(body)
}
private def areSimpleArgs(args: List[Tree]): Boolean =
args.forall(isSimpleArg)
@tailrec
private def isSimpleArg(arg: Tree): Boolean = arg match {
case New(_, _, Nil) => true
case Apply(receiver, _, Nil) => isTrivialArg(receiver)
case ApplyStatically(receiver, _, _, Nil) => isTrivialArg(receiver)
case ApplyStatic(_, _, Nil) => true
case ArrayLength(array) => isTrivialArg(array)
case ArraySelect(array, index) => isTrivialArg(array) && isTrivialArg(index)
case Unbox(inner, _) => isSimpleArg(inner)
case AsInstanceOf(inner, _) => isSimpleArg(inner)
case _ =>
isTrivialArg(arg)
}
private def isTrivialArg(arg: Tree): Boolean = arg match {
case _:VarRef | _:This | _:Literal | _:LoadModule =>
true
case _ =>
false
}
}
private object BlockOrAlone {
def unapply(tree: Tree): Some[(List[Tree], Tree)] = Some(tree match {
case Block(init :+ last) => (init, last)
case _ => (Nil, tree)
})
}
private def exceptionMsg(myself: AbstractMethodID,
attemptedInlining: List[AbstractMethodID], cause: Throwable) = {
val buf = new StringBuilder()
buf.append("The Scala.js optimizer crashed while optimizing " + myself +
": " + cause.toString)
buf.append("\nMethods attempted to inline:\n")
for (m <- attemptedInlining) {
buf.append("* ")
buf.append(m)
buf.append('\n')
}
buf.toString
}
private class RollbackException(val trampolineId: Int,
val savedUsedLocalNames: Map[String, Boolean],
val savedUsedLabelNames: Set[String],
val stateBackups: List[StateBackup],
val cont: () => TailRec[Tree]) extends ControlThrowable
class OptimizeException(val myself: AbstractMethodID,
val attemptedInlining: List[AbstractMethodID], cause: Throwable
) extends Exception(exceptionMsg(myself, attemptedInlining, cause), cause)
}
| jasonchaffee/scala-js | tools/shared/src/main/scala/org/scalajs/core/tools/optimizer/OptimizerCore.scala | Scala | bsd-3-clause | 146,697 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Framework **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.jasminetest
/** Dummy Exception to wrap stack traces passed to loggers */
class JasmineTestException(
message: String,
stackTrace: Array[StackTraceElement]
) extends Exception(message) {
override def getStackTrace = stackTrace
}
| matthughes/scala-js | jasmine-test-framework/src/main/scala/org/scalajs/jasminetest/JasmineTestException.scala | Scala | bsd-3-clause | 783 |
package ch.cern.sparkmeasure
import org.scalatest.{FlatSpec, Matchers}
import java.net.ServerSocket
import com.github.tomakehurst.wiremock.WireMockServer
import com.github.tomakehurst.wiremock.client.WireMock._
class PushGatewayTest extends FlatSpec with Matchers {
/** Get available ip port */
var ip_port = 0
try {
val socket = new ServerSocket(ip_port)
ip_port = socket.getLocalPort
socket.close
}
catch { case _: Throwable => }
if (ip_port == 0) {
it should "get available ip port" in {
(ip_port > 0) shouldEqual true
}
} else {
/** Run wiremock server on local machine with the port */
val wireMockServer = new WireMockServer(ip_port)
/** Init class PushGateway */
val serverIPnPort = s"localhost:" + ip_port.toString
val metricsJob = s"aaa_job"
val pushGateway = PushGateway(serverIPnPort, metricsJob)
/** Check metric name validation for Prometheus */
it should "accept label name containing letters and underscores" in {
pushGateway.validateLabel(s"aaa_label") shouldEqual s"aaa_label"
}
it should "change label name containing other symbols and trim spaces" in {
pushGateway.validateLabel(s" #aaa(label): ") shouldEqual s"aaa_label"
}
it should "accept metric name containing letters, underscores and colons" in {
pushGateway.validateMetric(s"aaa_metric:") shouldEqual s"aaa_metric:"
}
it should "change metric name containing other symbols and trim spaces" in {
pushGateway.validateMetric(s" #aaa(metric) ") shouldEqual s"aaa_metric"
}
/** Metrics data to send */
val metricsType = s"metricsType"
val labelName = s"labelName"
val labelValue = s"labelValue"
val urlBase = s"/metrics/job/" + metricsJob + s"/instance/sparkMeasure"
val urlFull = urlBase + s"/type/" + metricsType + s"/" + labelName + s"/" + labelValue
val content_type = s"text/plain; version=0.0.4"
val str_metrics = s"str_metrics"
wireMockServer.start()
wireMockServer.stubFor(post(urlEqualTo(urlFull))
.withHeader("Content-Type", equalTo(content_type))
.withRequestBody(containing(str_metrics))
.willReturn(
aResponse()
.withStatus(200)
)
)
/** Send metrics */
pushGateway.post(str_metrics, metricsType, labelName, labelValue)
/** Check sent request */
val reqList = wireMockServer.findAll(postRequestedFor(urlEqualTo(urlFull))
.withHeader("Content-Type", equalTo(content_type))
.withRequestBody(containing(str_metrics))
)
it should "send one post request with parameters to pushGateway" in {
reqList.size() shouldEqual 1
}
wireMockServer.stop()
}
}
| LucaCanali/sparkMeasure | src/test/scala/ch/cern/sparkmeasure/pushgatewayTest.scala | Scala | apache-2.0 | 2,714 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.web.services
import bootstrap.liftweb._
import com.normation.rudder.domain.policies.DirectiveId
import com.normation.rudder.web.model._
import scala.xml._
import net.liftweb.common._
import net.liftweb.http._
import net.liftweb.util.Helpers
import scala.collection.mutable.{ Map => MutMap }
import com.normation.cfclerk.domain._
import com.normation.exceptions.TechnicalException
import org.slf4j.LoggerFactory
import com.normation.utils.HashcodeCaching
/**
* Create web representation of Directive in the goal
* to configure them.
*
* This service does not ask anything on the backend, and
* so all information have to be given.
*
*/
class Section2FieldService(val fieldFactory: DirectiveFieldFactory, val translators: Translators) extends Loggable {
/**
* Fully initialize a DirectiveEditor from a list of variables
*/
def initDirectiveEditor(
policy : Technique
, directiveId : DirectiveId
, vars : Seq[Variable]
): Box[DirectiveEditor] = {
val valuesByName = vars.map(v => (v.spec.name, v.values)).toMap
val variableSpecs = vars.map(v => (v.spec.name -> v.spec)).toMap
val sections = policy.rootSection.copyWithoutSystemVars
val providesExpectedReports = policy.providesExpectedReports
//a policy is a new one if we don't have any saved values
//Don't forget that we may have empty saved value.
val isNewPolicy = valuesByName.size < 1 || valuesByName.forall { case (n,vals) => vals.size < 1 }
logger.debug("Is it a new directive ? " + isNewPolicy)
val sectionField = createSectionField(sections, valuesByName, isNewPolicy)
Full(DirectiveEditor(policy.id, directiveId, policy.name, policy.description, sectionField, variableSpecs, providesExpectedReports))
}
// --------------------------------------------
// description of the state machine
// --------------------------------------------
/*
*
*
* ----<--root----->--------
* ___ / | ___ \
* | variable sectionType1 | multiSection
* | | `--->--' | | ____
* `-----<-----<------'-----<---' | / |
* \ sectionType2 |
* `----<------' `-->--'
*
* sectionType1: a section that may have a multi-section for children
* sectionType2: a section that may only have simple sub section
*/
// --------------------------------------------
// implementation : TODO: implement above state
// machine for real, not with a copy&paste for
// createSingleSectionFieldForMultisec
// --------------------------------------------
def createSectionField(section: SectionSpec, valuesByName:Map[String,Seq[String]], isNewPolicy:Boolean): SectionField = {
val seqOfSectionMap = {
if (isNewPolicy) Seq(createDefaultMap(section))
else {
val all = createMapForEachSubSection(section, valuesByName)
if(all.size < 1) Seq(createDefaultMap(section)) else all
}
}
val readOnlySection = section.children.collect{ case x:PredefinedValuesVariableSpec => x}.size > 0
if (section.isMultivalued) {
val sectionFields = for (sectionMap <- seqOfSectionMap) yield createSingleSectionFieldForMultisec(section,sectionMap, isNewPolicy)
MultivaluedSectionField(sectionFields, () => {
//here, valuesByName is empty, we are creating a new map.
createSingleSectionField(section,Map(),createDefaultMap(section), true)
}
, priorityToVisibility(section.displayPriority)
, readOnlySection
)
} else {
createSingleSectionField(section, valuesByName, seqOfSectionMap.head, isNewPolicy)
}
}
private[this] def createSingleSectionField(sectionSpec:SectionSpec, valuesByName:Map[String,Seq[String]], sectionMap: Map[String, Option[String]], isNewPolicy:Boolean): SectionFieldImp = {
// only variables of the current section
var varMappings = Map[String, () => String]()
def createVarField(varSpec: VariableSpec, valueOpt: Option[String]): DirectiveField = {
val fieldKey = varSpec.name
val field = fieldFactory.forType(varSpec, fieldKey)
translators.get(field.manifest) match {
case None => throw new TechnicalException("No translator from type: " + field.manifest.toString)
case Some(t) =>
t.to.get("self") match {
case None => throw new TechnicalException("Missing 'self' translator property (from type %s to a serialized string for Variable)".format(field.manifest))
case Some(c) => //close the returned function with f and store it into varMappings
logger.trace("Add translator for variable '%s', get its value from field '%s.self'".format(fieldKey, fieldKey))
varMappings += (fieldKey -> { () => c(field.get) })
valueOpt match {
case None =>
case Some(value) =>
setValueForField(value, field, t.from)
}
}
}
field.displayName = varSpec.description
field.tooltip = varSpec.longDescription
field.optional = varSpec.constraint.mayBeEmpty
field
}
val children = for (child <- sectionSpec.children) yield {
child match {
case varSpec: SectionVariableSpec => createVarField(varSpec, sectionMap(varSpec.name))
case sectSpec: SectionSpec => createSectionField(sectSpec, valuesByName, isNewPolicy)
}
}
//actually create the SectionField for createSingleSectionField
SectionFieldImp(sectionSpec.name, children, priorityToVisibility(sectionSpec.displayPriority), varMappings)
}
private[this] def createSingleSectionFieldForMultisec(sectionSpec:SectionSpec, sectionMap: Map[String, Option[String]], isNewPolicy:Boolean): SectionFieldImp = {
// only variables of the current section
var varMappings = Map[String, () => String]()
def createVarField(varSpec: SectionVariableSpec, valueOpt: Option[String]): DirectiveField = {
val fieldKey = varSpec.name
val field = fieldFactory.forType(varSpec, fieldKey)
translators.get(field.manifest) match {
case None => throw new TechnicalException("No translator from type: " + field.manifest.toString)
case Some(t) =>
t.to.get("self") match {
case None => throw new TechnicalException("Missing 'self' translator property (from type %s to a serialized string for Variable)".format(field.manifest))
case Some(c) => //close the returned function with f and store it into varMappings
logger.trace("Add translator for variable '%s', get its value from field '%s.self'".format(fieldKey, fieldKey))
varMappings += (fieldKey -> { () => c(field.get) })
valueOpt match {
case None =>
case Some(value) =>
setValueForField(value, field, t.from)
}
}
}
field.displayName = varSpec.description
field.tooltip = varSpec.longDescription
field.optional = varSpec.constraint.mayBeEmpty
field
}
val children = for (child <- sectionSpec.children) yield {
child match {
case varSpec: SectionVariableSpec => createVarField(varSpec, sectionMap.getOrElse(varSpec.name,None))
case sectSpec: SectionSpec =>
val subSectionMap = if(isNewPolicy) createDefaultMap(sectSpec) else sectionMap
createSingleSectionFieldForMultisec(sectSpec, subSectionMap, isNewPolicy)
}
}
//actually create the SectionField for createSingleSectionField
SectionFieldImp(sectionSpec.name, children, priorityToVisibility(sectionSpec.displayPriority), varMappings)
}
// transforms
// Map(A -> Seq("A1", "A2"), B -> Seq("B1", "b2"))
// to
// Seq( Map((A -> "A1"), (B -> "B1")),
// Map((A -> "A2"), (B -> "B2")) )
//If there is no value, a None is returned
private def createMapForEachSubSection(section: SectionSpec, valuesByName:Map[String,Seq[String]]): Seq[Map[String, Option[String]]] = {
// values represent all the values we have for the same name of variable
case class NameValuesVar(name: String, values: Seq[String]) extends HashcodeCaching
// seq of variable values with same name correctly ordered
val seqOfNameValues : Seq[NameValuesVar] = {
for {
varSpec <- section.getAllVariables
} yield {
NameValuesVar(varSpec.name, valuesByName.getOrElse(varSpec.name, Seq[String]()))
}
}
if (seqOfNameValues.isEmpty) {
Seq(Map[String, Option[String]]())
} else {
for {
// If head has an empty sequence as value, it does not iterate for other variables
// To fix, we use the max size of of all variables (so those value can be used, missing will be set to None.
i <- 0 until seqOfNameValues.map(_.values.size).max
} yield {
for {
nameValues <- seqOfNameValues
} yield {
val valueOpt = try Some(nameValues.values(i)) catch { case e: Exception => None }
(nameValues.name, valueOpt)
}
}.toMap
}
}
private def createDefaultMap(section: SectionSpec): Map[String, Option[String]] =
section.getVariables.map(varSpec => (varSpec.name, varSpec.constraint.default)).toMap
private def setValueForField(
value: String,
currentField: DirectiveField,
unserializer: Unserializer[_]): Unit = {
//if the var is not a GUI only var, just find the field unserializer and use it
unserializer.get("self") match {
case Some(unser) => unser(value) match {
case Full(fv) => currentField.set(fv.asInstanceOf[currentField.ValueType]) //should be ok since we found the unserializer thanks to the field manifest
case _ =>
//let field un-initialized, but log why
logger.debug("Can not init field %s, translator gave no result for 'self' with value '%s'".
format(currentField.name, value))
}
case None => // can not init, no unserializer for it
logger.debug("Can not init field %s, no translator found for property 'self'".format(currentField.name))
}
}
/**
* From a priority, returns the visibility of a section
* For the moment, a naive approach is :
* - Low priority => hidden
* - High priority => displayed
*/
private[this] def priorityToVisibility(priority : DisplayPriority) : Boolean = {
priority match {
case LowDisplayPriority => false
case HighDisplayPriority => true
case _ => true
}
}
}
| Kegeruneku/rudder | rudder-web/src/main/scala/com/normation/rudder/web/services/Section2FieldService.scala | Scala | agpl-3.0 | 12,322 |
package io.iohk.ethereum.utils
object LoggingUtils {
def getClassName(cls: Class[_]): String = cls.getName.split("\\\\.").last
def getClassName(o: Object): String = getClassName(o.getClass)
}
| input-output-hk/etc-client | src/main/scala/io/iohk/ethereum/utils/LoggerUtils.scala | Scala | mit | 198 |
package com.hungrylearner.pso.swarm
import com.hungrylearner.pso.swarm.Report.{Progress, ProgressReport}
import akka.actor.ActorRef
import com.hungrylearner.pso.particle.EvaluatedPosition
/**
* A Worker does the actual work during swarming. Workers work and don't supervise other
* workers.
*
* @tparam F Fitness
* @tparam P Particle backing store
*/
trait Worker[F,P]
/**
* A Supervisor supervises child workers (or child supervisors) by evaluating each ProgressReport
* and deciding what to tell higher supervisors and children.
*
* TODO: Should we have a state? It might be nice to report COMPLETE.
*
* TODO: When do we clean up child actors?
*
* @tparam F Fitness
* @tparam P Particle backing store
*/
trait Supervisor[F,P] extends Worker[F,P] {
/**
* We received a ProgressReport from a child. We can use it to update our best position,
* tell our parent, tell our children, or do nothing with this report. We can also decide
* to wait for all children to catch up and complete the specified iteration before doing
* anything.
*/
def onProgressReport( childReport: ProgressReport[F,P], originator: ActorRef): Unit
/**
* A child has terminated.
* @param child The child that terminated.
*/
def onTerminated( child: ActorRef)
/**
* Decide if and when to influence children by sending them knowledge of a (potentially)
* better position. The position sent is usually the region's current bestPosition.
*
* @param evaluatedPosition The position given us by the Supervisor.
* @param iteration The iteration of the child when the progress report was generated.
* @param progress The current progress of our children for the reported iteration. This can be
* used in the decision of when to influence children. For example, we could
* wait on influencing children until all children have reported their progress
* has completed for the given command and iteration.
*/
protected def tellChildren( evaluatedPosition: EvaluatedPosition[F,P], iteration: Int, progress: Progress, originator: ActorRef)
}
| flintobrien/akka-multiswarm | src/main/scala/com/hungrylearner/pso/swarm/Worker.scala | Scala | apache-2.0 | 2,134 |
/*
* Copyright (c) 2014-16 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import org.junit.Test
import org.junit.Assert._
import ops.record._
import record._
import syntax.singleton._
import test._
import testutil._
import union._
object LabelledGenericTestsAux {
case class Book(author: String, title: String, id: Int, price: Double)
case class ExtendedBook(author: String, title: String, id: Int, price: Double, inPrint: Boolean)
case class BookWithMultipleAuthors(title: String, id: Int, authors: String*)
case class Private1(private val a: Int)
case class Private2(private val a: Int, b: Int)
case class Private3(a: Int, private val b: String)
case class Private4(private val a: Int, b: String)
val tapl = Book("Benjamin Pierce", "Types and Programming Languages", 262162091, 44.11)
val tapl2 = Book("Benjamin Pierce", "Types and Programming Languages (2nd Ed.)", 262162091, 46.11)
val taplExt = ExtendedBook("Benjamin Pierce", "Types and Programming Languages", 262162091, 44.11, true)
val dp = BookWithMultipleAuthors(
"Design Patterns", 201633612,
"Erich Gamma", "Richard Helm", "Ralph Johnson", "John Vlissides"
)
val taplRecord =
("author" ->> "Benjamin Pierce") ::
("title" ->> "Types and Programming Languages") ::
("id" ->> 262162091) ::
("price" ->> 44.11) ::
HNil
val dpRecord =
("title" ->> "Design Patterns") ::
("id" ->> 201633612) ::
("authors" ->> Seq("Erich Gamma", "Richard Helm", "Ralph Johnson", "John Vlissides")) ::
HNil
type BookRec = Record.`"author" -> String, "title" -> String, "id" -> Int, "price" -> Double`.T
type BookKeys = Keys[BookRec]
type BookValues = Values[BookRec]
type BookWithMultipleAuthorsRec = Record.`"title" -> String, "id" -> Int, "authors" -> Seq[String]`.T
sealed trait Tree
case class Node(left: Tree, right: Tree) extends Tree
case class Leaf(value: Int) extends Tree
sealed trait AbstractNonCC
class NonCCA(val i: Int, val s: String) extends AbstractNonCC
class NonCCB(val b: Boolean, val d: Double) extends AbstractNonCC
class NonCCWithCompanion private (val i: Int, val s: String)
object NonCCWithCompanion {
def apply(i: Int, s: String) = new NonCCWithCompanion(i, s)
def unapply(s: NonCCWithCompanion): Option[(Int, String)] = Some((s.i, s.s))
}
class NonCCLazy(prev0: => NonCCLazy, next0: => NonCCLazy) {
lazy val prev = prev0
lazy val next = next0
}
}
object ShapelessTaggedAux {
import tag.@@
trait CustomTag
case class Dummy(i: Int @@ CustomTag)
}
object ScalazTaggedAux {
import labelled.FieldType
type Tagged[A, T] = { type Tag = T; type Self = A }
type @@[T, Tag] = Tagged[T, Tag]
trait CustomTag
case class Dummy(i: Int @@ CustomTag)
case class DummyTagged(b: Boolean, i: Int @@ CustomTag)
trait TC[T] {
def apply(): String
}
object TC extends TCLowPriority {
implicit val intTC: TC[Int] = instance("Int")
implicit val booleanTC: TC[Boolean] = instance("Boolean")
implicit val taggedIntTC: TC[Int @@ CustomTag] = instance("TaggedInt")
implicit val hnilTC: TC[HNil] = instance("HNil")
implicit def hconsTC[K <: String, H, T <: HList](
implicit key: Witness.Aux[K], headTC: => TC[H], tailTC: TC[T]
): TC[FieldType[K, H] :: T] = instance {
s"${key.value}: ${headTC()} :: ${tailTC()}"
}
implicit def projectTC[F, G](
implicit lgen: LabelledGeneric.Aux[F, G], tc: => TC[G]
): TC[F] = instance {
s"Proj(${tc()})"
}
}
abstract class TCLowPriority {
def instance[T](display: String): TC[T] =
() => display
// FIXME: Workaround #309
implicit def hconsTCTagged[K <: String, H, HT, T <: HList](
implicit key: Witness.Aux[K], headTC: => TC[H @@ HT], tailTC: TC[T]
): TC[FieldType[K, H @@ HT] :: T] = instance {
s"${key.value}: ${headTC()} :: ${tailTC()}"
}
}
}
class LabelledGenericTests {
import LabelledGenericTestsAux._
@Test
def testProductBasics: Unit = {
val gen = LabelledGeneric[Book]
val b0 = gen.to(tapl)
typed[BookRec](b0)
assertEquals(taplRecord, b0)
val b1 = gen.from(b0)
typed[Book](b1)
assertEquals(tapl, b1)
val keys = b0.keys
assertEquals("author".narrow :: "title".narrow :: "id".narrow :: "price".narrow :: HNil, keys)
val values = b0.values
assertEquals("Benjamin Pierce" :: "Types and Programming Languages" :: 262162091 :: 44.11 :: HNil, values)
}
@Test
def testPrivateFields: Unit = {
val gen1 = LabelledGeneric[Private1]
val gen2 = LabelledGeneric[Private2]
val gen3 = LabelledGeneric[Private3]
val gen4 = LabelledGeneric[Private4]
val ab = "a".narrow :: "b".narrow :: HNil
val p1 = Private1(1)
val r1 = gen1.to(p1)
assertTypedEquals("a".narrow :: HNil, r1.keys)
assertTypedEquals(1 :: HNil, r1.values)
assertEquals(p1, gen1.from(r1))
val p2 = Private2(2, 12)
val r2 = gen2.to(p2)
assertTypedEquals(ab, r2.keys)
assertTypedEquals(2 :: 12 :: HNil, r2.values)
assertEquals(p2, gen2.from(r2))
val p3 = Private3(3, "p3")
val r3 = gen3.to(p3)
assertTypedEquals(ab, r3.keys)
assertTypedEquals(3 :: "p3" :: HNil, r3.values)
assertEquals(p3, gen3.from(r3))
val p4 = Private4(4, "p4")
val r4 = gen4.to(p4)
assertTypedEquals(ab, r4.keys)
assertTypedEquals(4 :: "p4" :: HNil, r4.values)
assertEquals(p4, gen4.from(r4))
}
@Test
def testProductWithVarargBasics: Unit = {
val gen = LabelledGeneric[BookWithMultipleAuthors]
val b0 = gen.to(dp)
typed[BookWithMultipleAuthorsRec](b0)
assertEquals(dpRecord, b0)
val keys = b0.keys
assertEquals("title".narrow :: "id".narrow :: "authors".narrow :: HNil, keys)
val values = b0.values
assertEquals(
"Design Patterns" :: 201633612 :: Seq("Erich Gamma", "Richard Helm", "Ralph Johnson", "John Vlissides") :: HNil,
values
)
}
@Test
def testGet: Unit = {
val gen = LabelledGeneric[Book]
val b0 = gen.to(tapl)
val e1 = b0.get("author")
typed[String](e1)
assertEquals("Benjamin Pierce", e1)
val e2 = b0.get("title")
typed[String](e2)
assertEquals( "Types and Programming Languages", e2)
val e3 = b0.get("id")
typed[Int](e3)
assertEquals(262162091, e3)
val e4 = b0.get("price")
typed[Double](e4)
assertEquals(44.11, e4, Double.MinPositiveValue)
}
@Test
def testApply: Unit = {
val gen = LabelledGeneric[Book]
val b0 = gen.to(tapl)
val e1 = b0("author")
typed[String](e1)
assertEquals("Benjamin Pierce", e1)
val e2 = b0("title")
typed[String](e2)
assertEquals( "Types and Programming Languages", e2)
val e3 = b0("id")
typed[Int](e3)
assertEquals(262162091, e3)
val e4 = b0("price")
typed[Double](e4)
assertEquals(44.11, e4, Double.MinPositiveValue)
}
@Test
def testAt: Unit = {
val gen = LabelledGeneric[Book]
val b0 = gen.to(tapl)
val v1 = b0.at(0)
typed[String](v1)
assertEquals("Benjamin Pierce", v1)
val v2 = b0.at(1)
typed[String](v2)
assertEquals( "Types and Programming Languages", v2)
val v3 = b0.at(2)
typed[Int](v3)
assertEquals(262162091, v3)
val v4 = b0.at(3)
typed[Double](v4)
assertEquals(44.11, v4, Double.MinPositiveValue)
}
@Test
def testUpdated: Unit = {
val gen = LabelledGeneric[Book]
val b0 = gen.to(tapl)
val b1 = b0.updated("title", "Types and Programming Languages (2nd Ed.)")
val b2 = b1.updated("price", 46.11)
val updated = gen.from(b2)
assertEquals(tapl2, updated)
}
@Test
def testUpdateWith: Unit = {
val gen = LabelledGeneric[Book]
val b0 = gen.to(tapl)
val b1 = b0.updateWith("title")(_+" (2nd Ed.)")
val b2 = b1.updateWith("price")(_+2.0)
val updated = gen.from(b2)
assertEquals(tapl2, updated)
}
@Test
def testExtension: Unit = {
val gen = LabelledGeneric[Book]
val gen2 = LabelledGeneric[ExtendedBook]
val b0 = gen.to(tapl)
val b1 = b0 + ("inPrint" ->> true)
val b2 = gen2.from(b1)
typed[ExtendedBook](b2)
assertEquals(taplExt, b2)
}
@Test
def testCoproductBasics: Unit = {
type TreeUnion = Union.`"Leaf" -> Leaf, "Node" -> Node`.T
val gen = LabelledGeneric[Tree]
val t = Node(Node(Leaf(1), Leaf(2)), Leaf(3))
val gt = gen.to(t)
typed[TreeUnion](gt)
}
@Test
def testAbstractNonCC: Unit = {
val ncca = new NonCCA(23, "foo")
val nccb = new NonCCB(true, 2.0)
val ancc: AbstractNonCC = ncca
type NonCCARec = Record.`"i" -> Int, "s" -> String`.T
type NonCCBRec = Record.`"b" -> Boolean, "d" -> Double`.T
type AbsUnion = Union.`"NonCCA" -> NonCCA, "NonCCB" -> NonCCB`.T
val genA = LabelledGeneric[NonCCA]
val genB = LabelledGeneric[NonCCB]
val genAbs = LabelledGeneric[AbstractNonCC]
val rA = genA.to(ncca)
assertTypedEquals[NonCCARec]("i" ->> 23 :: "s" ->> "foo" :: HNil, rA)
val rB = genB.to(nccb)
assertTypedEquals[NonCCBRec]("b" ->> true :: "d" ->> 2.0 :: HNil, rB)
val rAbs = genAbs.to(ancc)
val injA = Coproduct[AbsUnion]("NonCCA" ->> ncca)
assertTypedEquals[AbsUnion](injA, rAbs)
val fA = genA.from("i" ->> 13 :: "s" ->> "bar" :: HNil)
typed[NonCCA](fA)
assertEquals(13, fA.i)
assertEquals("bar", fA.s)
val fB = genB.from("b" ->> false :: "d" ->> 3.0 :: HNil)
typed[NonCCB](fB)
assertEquals(false, fB.b)
assertEquals(3.0, fB.d, Double.MinPositiveValue)
val injB = Coproduct[AbsUnion]("NonCCB" ->> nccb)
val fAbs = genAbs.from(injB)
typed[AbstractNonCC](fAbs)
assertTrue(fAbs.isInstanceOf[NonCCB])
assertEquals(true, fAbs.asInstanceOf[NonCCB].b)
assertEquals(2.0, fAbs.asInstanceOf[NonCCB].d, Double.MinPositiveValue)
}
@Test
def testNonCCWithCompanion: Unit = {
val nccc = NonCCWithCompanion(23, "foo")
val rec = ("i" ->> 23) :: ("s" ->> "foo") :: HNil
type NonCCRec = Record.`"i" -> Int, "s" -> String`.T
val gen = LabelledGeneric[NonCCWithCompanion]
val r = gen.to(nccc)
assertTypedEquals[NonCCRec](rec, r)
val f = gen.from("i" ->> 13 :: "s" ->> "bar" :: HNil)
typed[NonCCWithCompanion](f)
assertEquals(13, f.i)
assertEquals("bar", f.s)
}
@Test
def testNonCCLazy: Unit = {
lazy val (a: NonCCLazy, b: NonCCLazy, c: NonCCLazy) =
(new NonCCLazy(c, b), new NonCCLazy(a, c), new NonCCLazy(b, a))
val rec = "prev" ->> a :: "next" ->> c :: HNil
type LazyRec = Record.`"prev" -> NonCCLazy, "next" -> NonCCLazy`.T
val gen = LabelledGeneric[NonCCLazy]
val rB = gen.to(b)
assertTypedEquals[LazyRec](rec, rB)
val fD = gen.from("prev" ->> a :: "next" ->> c :: HNil)
typed[NonCCLazy](fD)
assertEquals(a, fD.prev)
assertEquals(c, fD.next)
}
@Test
def testShapelessTagged: Unit = {
import ShapelessTaggedAux._
val lgen = LabelledGeneric[Dummy]
val s = s"${lgen from Record(i=tag[CustomTag](0))}"
assertEquals(s, "Dummy(0)")
}
@Test
def testScalazTagged: Unit = {
import ScalazTaggedAux._
implicitly[TC[Int @@ CustomTag]]
implicitly[TC[Boolean]]
implicitly[Generic[Dummy]]
implicitly[LabelledGeneric[Dummy]]
implicitly[TC[Dummy]]
implicitly[TC[DummyTagged]]
type R = Record.`"i" -> Int @@ CustomTag`.T
val lgen = LabelledGeneric[Dummy]
implicitly[lgen.Repr =:= R]
implicitly[TC[R]]
type RT = Record.`"b" -> Boolean, "i" -> Int @@ CustomTag`.T
val lgent = LabelledGeneric[DummyTagged]
implicitly[lgent.Repr =:= RT]
implicitly[TC[RT]]
}
}
| isaka/shapeless | core/src/test/scala/shapeless/labelledgeneric.scala | Scala | apache-2.0 | 12,203 |
package reactivemongo.core.actors
import reactivemongo.core.errors.DriverException
// exceptions
object Exceptions {
import scala.util.control.NoStackTrace
private val primaryUnavailableMsg = "No primary node is available!"
private val nodeSetReachableMsg =
"The node set can not be reached! Please check your network connectivity"
/** An exception thrown when a request needs a non available primary. */
sealed class PrimaryUnavailableException private[reactivemongo] (
val message: String,
override val cause: Throwable = null
) extends DriverException with NoStackTrace {
def this(supervisor: String, connection: String, cause: Throwable) =
this(s"$primaryUnavailableMsg ($supervisor/$connection)", cause)
def this() = this(primaryUnavailableMsg, null)
}
@deprecated(message = "Use constructor with details", since = "0.12-RC0")
case object PrimaryUnavailableException extends PrimaryUnavailableException()
/**
* An exception thrown when the entire node set is unavailable.
* The application may not have access to the network anymore.
*/
sealed class NodeSetNotReachable private[reactivemongo] (
val message: String,
override val cause: Throwable
) extends DriverException with NoStackTrace {
private[reactivemongo] def this(supervisor: String, connection: String, cause: Throwable) = this(s"$nodeSetReachableMsg ($supervisor/$connection)", cause)
private[reactivemongo] def this() = this(nodeSetReachableMsg, null)
}
@deprecated(message = "Use constructor with details", since = "0.12-RC0")
case object NodeSetNotReachable extends NodeSetNotReachable()
sealed class ChannelNotFound private[reactivemongo] (
val message: String,
val retriable: Boolean,
override val cause: Throwable
) extends DriverException with NoStackTrace
@deprecated(message = "Use constructor with details", since = "0.12-RC0")
case object ChannelNotFound
extends ChannelNotFound("ChannelNotFound", false, null)
sealed class ClosedException private (
val message: String,
override val cause: Throwable
) extends DriverException with NoStackTrace {
private[reactivemongo] def this(supervisor: String, connection: String, cause: Throwable) = this(s"This MongoConnection is closed ($supervisor/$connection)", cause)
private[reactivemongo] def this(msg: String) = this(msg, null)
def this() = this("This MongoConnection is closed", null)
}
@deprecated(message = "Use constructor with details", since = "0.12-RC0")
case object ClosedException extends ClosedException()
final class InternalState private[actors] (
trace: Array[StackTraceElement]
) extends DriverException with NoStackTrace {
override def getMessage: String = null
val message = "InternalState"
super.setStackTrace(trace)
}
object InternalState {
val empty = new InternalState(Array.empty)
}
}
| maxime-gautre/ReactiveMongo | driver/src/main/scala/core/actors/Exceptions.scala | Scala | apache-2.0 | 2,930 |
package edu.colorado.plv.cuanto.jsy
package primitives
import edu.colorado.plv.cuanto.jsy.common.UnitOpParser
object Parser extends UnitOpParser with string.ParserLike with numerical.ParserWithBindingLike
| cuplv/cuanto | src/main/scala/edu/colorado/plv/cuanto/jsy/primitives/Parser.scala | Scala | apache-2.0 | 208 |
package org.elasticmq.rest.sqs.directives
import akka.http.scaladsl.server.{Directives, Route}
import org.elasticmq.rest.sqs.{Action, AnyParams, SQSException}
import org.elasticmq.util.Logging
trait UnmatchedActionRoutes {
this: Logging with Directives =>
def unmatchedAction(p: AnyParams): Route = {
extractRequestContext { _ =>
p.get("Action") match {
case Some(action) if Action.values.forall(_.toString != action) =>
logger.warn(s"Unknown action: $action")
throw new SQSException("InvalidAction")
case None =>
throw new SQSException("MissingAction")
case _ =>
reject
}
}
}
}
| adamw/elasticmq | rest/rest-sqs/src/main/scala/org/elasticmq/rest/sqs/directives/UnmatchedActionRoutes.scala | Scala | apache-2.0 | 672 |
package main.scala.MineSweeper
import scala.io.Source
object MineReader {
private def ParseMineFields(lines: Array[String]): List[MineField] = {
val dims = lines.head.split(" ").map(x => x.toInt)
dims match {
case Array(0, 0) => Nil // (0, 0) means the end of the input stream.
case Array(nrows, ncols) =>
val (first, rest) = lines.tail.splitAt(nrows)
MineField(first) :: ParseMineFields(rest)
case _ => Nil // illegal input, terminate the parser
}
}
def apply(filename: String): Option[List[MineField]] = {
val lines = Source.fromFile(filename).getLines.toArray
if (lines.isEmpty) {
None
} else {
Some(ParseMineFields(lines))
}
}
} | ollielo/ScalaKata | src/main/scala/MineSweeper/MineReader.scala | Scala | mit | 717 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import scala.reflect.ClassTag
/**
* ClassSimplexCriterion implements a criterion for classification.
* It learns an embedding per class, where each class' embedding is a
* point on an (N-1)-dimensional simplex, where N is the number of classes.
* @param nClasses the number of classes.
*/
@SerialVersionUID(- 8696382776046599502L)
class ClassSimplexCriterion[T: ClassTag](val nClasses: Int)
(implicit ev: TensorNumeric[T]) extends MSECriterion[T] {
require(nClasses > 1, "ClassSimplexCriterion: Required positive integer argument nClasses > 1," +
s"but get nClasses $nClasses")
private val simp = regsplex(nClasses - 1)
private val simplex = Tensor[T](simp.size(1), nClasses)
simplex.narrow(2, 1, simp.size(2)).copy(simp)
@transient
private var targetBuffer: Tensor[T] = null
private def regsplex(n : Int): Tensor[T] = {
val a = Tensor[T](n + 1, n)
var k = 1
val arr = new Array[Int](2)
while (k <= n) {
arr(0) = k
arr(1) = k
if (k == 1) a(arr) = ev.one
if (k > 1) {
val value1 = a.narrow(1, k, 1).narrow(2, 1, k - 1).norm(2)
a(arr) = ev.sqrt(ev.minus(ev.one, ev.times(value1, value1)))
}
var c = ev.minus(ev.times(a(arr), a(arr)), ev.one)
c = ev.divide(ev.minus(c, ev.fromType(1.0 / n)), a(arr))
a.narrow(1, k + 1, n - k + 1).narrow(2, k, 1).fill(c)
k += 1
}
a
}
private def transformTarget(target: Tensor[T]): Unit = {
require(target.dim() == 1, s"ClassSimplexCriterion: target should be 1D tensors only!" +
s"But get ${target.dim()}")
if (null == targetBuffer) targetBuffer = Tensor[T](nClasses)
targetBuffer.resize(target.size(1), nClasses)
var i = 1
while (i <= target.size(1)) {
targetBuffer(i).copy(simplex(ev.toType[Int](target(Array(i)))))
i += 1
}
}
override def updateOutput(input: Tensor[T], target: Tensor[T]): T = {
transformTarget(target)
require(input.nElement() == targetBuffer.nElement(), "ClassSimplexCriterion: " +
"element number wrong" +
s"input dim(${input.nElement()})" +
s"targetBuffer dim(${targetBuffer.nElement()})")
output = super.updateOutput(input, targetBuffer)
output
}
override def updateGradInput(input: Tensor[T], target: Tensor[T]): Tensor[T] = {
require(input.nElement() == targetBuffer.nElement(), "ClassSimplexCriterion: " +
"element number wrong" +
s"input dim(${input.nElement()})" +
s"targetBuffer dim(${targetBuffer.nElement()})")
gradInput = super.updateGradInput(input, targetBuffer)
gradInput
}
override def toString(): String = {
s"nn.ClassSimplexCriterion($nClasses)"
}
override def canEqual(other: Any): Boolean = other.isInstanceOf[ClassSimplexCriterion[T]]
override def equals(other: Any): Boolean = other match {
case that: ClassSimplexCriterion[T] =>
super.equals(that) &&
(that canEqual this) &&
nClasses == that.nClasses
case _ => false
}
override def hashCode(): Int = {
def getHashCode(a: Any): Int = if (a == null) 0 else a.hashCode()
val state = Seq(super.hashCode(), nClasses)
state.map(getHashCode).foldLeft(0)((a, b) => 31 * a + b)
}
}
object ClassSimplexCriterion {
def apply[@specialized(Float, Double) T: ClassTag](
nClasses: Int)(implicit ev: TensorNumeric[T]) : ClassSimplexCriterion[T] = {
new ClassSimplexCriterion[T](nClasses)
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/ClassSimplexCriterion.scala | Scala | apache-2.0 | 4,331 |
package domain.views
import akka.actor.Actor
import akka.contrib.persistence.mongodb.{MongoReadJournal, ScalaDslMongoReadJournal}
import akka.persistence.query.PersistenceQuery
import akka.stream.ActorMaterializer
import akka.util.Timeout
import scala.concurrent.duration._
case object LiveStreamFailed
trait JournalReadingView extends Actor {
val persistenceId: String
lazy val readJournal =
PersistenceQuery(context.system).readJournalFor[ScalaDslMongoReadJournal](MongoReadJournal.Identifier)
lazy val journalSource = readJournal.eventsByPersistenceId(persistenceId, fromSequenceNr = 0L, toSequenceNr = Long.MaxValue)
.map(_.event)
override def preStart = {
implicit val materializer = ActorMaterializer()
implicit val timeout = Timeout(5 seconds)
journalSource.runForeach(event => context.self ! event)
}
}
| tegonal/lasius | app/domain/views/JournalReadingView.scala | Scala | gpl-3.0 | 848 |
package liang.don.dzviewer.viewer.imagefetch.actor
import liang.don.dzviewer.viewer.imagefetch.TileFetcher
/**
* This implementation does not prefetch any tiles.
*
* @author Don Liang
* @Version 0.1.1, 15/09/2011
*/
trait DummyFetcherActor extends TileFetcher {
override def fetch() { }
}
| dl2k84/DeepZoomViewer | src/liang/don/dzviewer/viewer/imagefetch/actor/DummyFetcherActor.scala | Scala | mit | 300 |
package com.github.luzhuomi.regexphonetic
import com.github.luzhuomi.regexphonetic.{Dictionary => D}
import com.github.luzhuomi.regexphonetic.Common._
import scala.io.Source
object SoundMap {
case class SoundMap(rowIDs : List[String], colIDs : List[String], rowScores : D.Dictionary[String,List[(String,Float)]], colScores : D.Dictionary[String,List[(String,Float)]])
def emptyMap : SoundMap = new SoundMap(List(), List(), D.empty, D.empty)
def transpose (sm : SoundMap) : SoundMap = sm match {
case SoundMap (rowIDs, colIDs, rowScores, colScores) => SoundMap ( colIDs, rowIDs, colScores, rowScores)
} // transpose
def parseSoundMap(fp:String) : SoundMap = {
parseSoundMapFromBufferedSource(Source.fromFile(fp))
}
// option #1
// using side effect to read file directly
// downside requires using var instead of val
def parseSoundMapFromBufferedSource(bs:scala.io.BufferedSource) : SoundMap = {
// var colIDs : List[String] = List()
var soundMap = emptyMap
for (line <- bs.getLines()) {
if (soundMap.colIDs.length == 0) {
parseHeader(line) match {
case (_ :: columnIDs) => {
soundMap = SoundMap(List(), columnIDs, D.empty, D.empty)
} // case
case Nil => { }
} // match
} else {
// splitBy2(isTab)(line) match {
line.split("\\t").toList match {
case Nil => { }
case (rowID :: scores) => {
val row_scores = soundMap.colIDs
.zip(scores.map(parseScore))
.filter(sndIsSome)
.map( (xsy => xsy match {
case (x,Some(y)) => (x,y) // it will never be None
} ) )
val rScores = D.insert(rowID)(row_scores)(soundMap.rowScores)
val cScores = row_scores.foldLeft(soundMap.colScores)(
(d, colID_score ) => colID_score match {
case (colID,score) => D.lookup(colID)(d) match {
case None => D.insert(colID)(List((rowID,score)))(d)
case Some(kvs) => D.update(colID)(kvs ++ List((rowID,score)))(d)
} // inner match
}) // outer match and lambda
val rIDs = soundMap.rowIDs
val cIDs = soundMap.colIDs
soundMap = SoundMap(rIDs ++ List(rowID), cIDs, rScores, cScores)
} // case
} // match
} // if
} // for
soundMap
}
def sndIsSome[A,B](p:(A,Option[B])) : Boolean = p match {
case (x,Some(y)) => true
case _ => false
}
def parseHeader(s:String) : List[String] = s.split("\\t").toList // splitBy2(isTab)(s)
def parseScore(s:String) : Option[Float] =
if (s.length > 0) {
try { Some(s.toFloat) } catch { case _ => None }
} else {
None
}
}
| luzhuomi/regexphonetic | src/main/scala/com/gitbhub/luzhuomi/regexphonetic/SoundMap.scala | Scala | apache-2.0 | 2,558 |
package debop4s.core.utils
import java.util.concurrent._
import java.util.concurrent.atomic.AtomicBoolean
import debop4s.core._
import debop4s.core.concurrent.NamedPoolThreadFactory
import debop4s.core.conversions.time._
import org.slf4j.LoggerFactory
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{Future, _}
import scala.util.Try
import scala.util.control.NonFatal
/**
*
*/
trait TimerTask extends Closable {
/**
* Timer 작업을 취소합니다.
*/
def cancel(): Unit
/**
* 지정한 작업 완료 시간에 작업을 종료합니다.
* @param deadline 완료 시각
* @return
*/
override def close(deadline: Time): Future[Unit] = Future { cancel() }
}
/**
* 주기적으로 작업을 수행할 수 있도록 하는 Timer 입니다.
*/
trait Timer {
protected lazy val log = LoggerFactory.getLogger(getClass)
/**
* 특정 시각에 `block` 을 실행시킵니다.
* @param when 시작 시각
* @param block 실행할 메소드 블럭
* @return `TimerTask` instance
*/
def schedule(when: Time)(block: => Unit): TimerTask
/**
* 정기적으로 `block` 을 실행시킵니다.
* @param when 시작 시각
* @param period 반복 주기
* @param block 실행할 메소드 블럭
* @return `TimerTask` instance
*/
def schedule(when: Time, period: Duration)(block: => Unit): TimerTask
/**
* 정기적으로 `block` 을 실행시킵니다.
* @param period 반복 주기
* @param block 실행할 메소드 블럭
* @return `TimerTask` instance
*/
def schedule(period: Duration)(block: => Unit): TimerTask =
schedule(period.fromNow, period)(block)
/**
* 지정한 지연 시간 후에 `func` 을 수행합니다.
*/
def doLater[A](delay: Duration)(func: => A): Future[A] = {
log.debug(s"현재시각으로부터 delay=$delay 이후에 작업을 수행합니다.")
doAt(Time.now + delay)(func)
}
/**
* 특정 시간에 `func` 을 수행합니다.
*/
def doAt[A](time: Time)(func: => A): Future[A] = {
log.debug(s"지정된 시각 $time 에 작업을 시작하도록 예약 합니다.")
val pending = new AtomicBoolean(true)
val p = Promise[A]()
val task = schedule(time) {
if (pending.compareAndSet(true, false)) {
p complete Try(func)
}
}
p.future onFailure {
case cause =>
log.warn(s"작업 중 예외가 발생했습니다.", cause)
if (pending.compareAndSet(true, false))
task.cancel()
}
p.future
}
/**
* Timer 를 중단합니다.
*/
def stop(): Unit
}
object Timer {
val Nil: Timer = new NullTimer()
}
object NullTimerTask extends TimerTask {
def cancel() {}
}
/**
* NullTimer 는 모든 Task 에 대해 즉시 호출하고 끝냅니다.
*/
class NullTimer extends Timer {
override def schedule(when: Time)(block: => Unit): TimerTask = {
block
NullTimerTask
}
override def schedule(when: Time, period: Duration)(block: => Unit): TimerTask = {
block
NullTimerTask
}
override def stop() = {}
}
object ThreadStoppingTimer {
implicit val executor = new ForkJoinPool(2)
def apply(underlying: Timer)(implicit executor: ExecutorService): ThreadStoppingTimer =
new ThreadStoppingTimer(underlying, executor)
}
class ThreadStoppingTimer(underlying: Timer, executor: ExecutorService) extends Timer {
override def schedule(when: Time)(block: => Unit): TimerTask =
underlying.schedule(when)(block)
override def schedule(when: Time, period: Duration)(block: => Unit): TimerTask =
underlying.schedule(when, period)(block)
override def stop(): Unit =
executor.submit(Threads.makeRunnable { underlying.stop() })
}
trait ReferenceCountedTimer extends Timer {
def acquire()
}
object ReferenceCountingTimer {
def apply(factory: () => Timer): ReferenceCountingTimer =
new ReferenceCountingTimer(factory)
}
class ReferenceCountingTimer(factory: () => Timer) extends ReferenceCountedTimer {
private[this] var refcount = 0
private[this] var underlying = null: Timer
override def acquire(): Unit = synchronized {
refcount += 1
if (refcount == 1) {
require(underlying == null)
underlying = factory()
}
}
override def stop(): Unit = synchronized {
refcount -= 1
if (refcount == 0) {
underlying.stop()
underlying = null
}
}
override def schedule(when: Time)(block: => Unit): TimerTask = {
assert(underlying != null)
underlying.schedule(when)(block)
}
override def schedule(when: Time, period: Duration)(block: => Unit): TimerTask = {
assert(underlying != null)
underlying.schedule(when, period)(block)
}
}
/**
* companion object for [[JavaTimer]]
*/
object JavaTimer {
def apply(isDaemon: Boolean = false): JavaTimer = new JavaTimer(isDaemon)
}
/**
* `java.util.Timer` 를 사용하는 Timer 입니다.
*/
class JavaTimer(isDaemon: Boolean = false) extends Timer {
// def this() = this(false)
private[this] val underlying = new java.util.Timer(isDaemon)
/**
* 특정 시각에 `block` 을 실행합니다.
* @param when 시작 시각
* @param block 실행할 메소드 블럭
* @return `TimerTask` instance
*/
override def schedule(when: Time)(block: => Unit): TimerTask = {
val task = toJavaTimerTask(block)
underlying.schedule(task, when.toDate)
toTimerTask(task)
}
/**
* 특정 시각부터 `block`을 반복적으로 수행합니다.
* @param when 시작 시각
* @param period 반복 주기
* @param block 실행할 메소드 블럭
* @return `TimerTask` instance
*/
override def schedule(when: Time, period: Duration)(block: => Unit): TimerTask = {
val task = toJavaTimerTask(block)
underlying.schedule(task, when.toDate, period.toMillis)
toTimerTask(task)
}
override def stop() = underlying.cancel()
def logError(t: Throwable) {
log.error(s"WARNING: JavaTimer 에서 작업 실행 시 예외가 발생했습니다. $t")
t.printStackTrace(System.err)
}
private def toJavaTimerTask(block: => Unit) = new java.util.TimerTask {
override def run(): Unit = {
try {
block
} catch {
case NonFatal(t) => logError(t)
case fatal: Throwable =>
logError(fatal)
throw fatal
}
}
}
private[this] def toTimerTask(task: java.util.TimerTask) = new TimerTask {
def cancel() { task.cancel() }
}
}
/**
* companion object of [[ScheduledThreadPoolTimer]]
*/
object ScheduledThreadPoolTimer {
def apply(poolSize: Int = 2,
name: String = "timer",
makeDaemons: Boolean = false): ScheduledThreadPoolTimer = {
new ScheduledThreadPoolTimer(poolSize, name, makeDaemons)
}
}
/**
* `ScheduledThreadPool`을 이용하여 Timer 작업을 실행하는 Timer 입니다.
* @param poolSize thread pool 의 크기
* @param threadFactory thread factory
* @param rejectedExecutionHandler 작업 거부 시 수행할 handler
*/
class ScheduledThreadPoolTimer(poolSize: Int,
threadFactory: ThreadFactory,
rejectedExecutionHandler: Option[RejectedExecutionHandler]) extends Timer {
def this(poolSize: Int, threadFactory: ThreadFactory) =
this(poolSize, threadFactory, None)
def this(poolSize: Int, threadFactory: ThreadFactory, handler: RejectedExecutionHandler) =
this(poolSize, threadFactory, Some(handler))
/** Construct a ScheduledThreadPoolTimer with a NamedPoolThreadFactory. */
def this(poolSize: Int, name: String = "timer", makeDaemons: Boolean) =
this(poolSize, NamedPoolThreadFactory(name, makeDaemons), None)
private[this] val underlying: ScheduledThreadPoolExecutor = {
rejectedExecutionHandler match {
case Some(handler: RejectedExecutionHandler) =>
new ScheduledThreadPoolExecutor(poolSize, threadFactory, handler)
case None =>
new ScheduledThreadPoolExecutor(poolSize, threadFactory)
}
}
/**
* 특정 시각에 `block` 을 실행시킵니다.
* @param when 시작 시각
* @param block 실행할 메소드 블럭
* @return `TimerTask` instance
*/
override def schedule(when: Time)(block: => Unit): TimerTask = {
val runBlock = runnable { block }
val javaFuture = underlying.schedule(runBlock,
when.sinceNow.toMillis,
TimeUnit.MILLISECONDS)
new TimerTask {
override def cancel(): Unit = {
javaFuture.cancel(true)
underlying.remove(runBlock)
}
}
}
/**
* 정기적으로 `block` 을 실행시킵니다.
* @param when 시작 시각
* @param period 반복 주기
* @param block 실행할 메소드 블럭
* @return `TimerTask` instance
*/
override def schedule(when: Time, period: Duration)(block: => Unit): TimerTask =
schedule(when.sinceNow, period)(block)
/**
* 정기적으로 `block` 을 실행시킵니다.
* @param wait 초기 지연 시각
* @param period 반복 주기
* @param block 실행할 메소드 블럭
* @return `TimerTask` instance
*/
def schedule(wait: Duration, period: Duration)(block: => Unit): TimerTask = {
val runblock = runnable { block }
val javaFuture = underlying.scheduleAtFixedRate(runblock,
wait.toMillis,
period.toMillis,
TimeUnit.MILLISECONDS)
new TimerTask {
override def cancel(): Unit = {
javaFuture.cancel(true)
underlying.remove(runblock)
}
}
}
override def stop(): Unit = underlying.shutdown()
}
object MockTimer {
def apply(): MockTimer = new MockTimer()
}
/**
* 테스트용 Timer
*/
class MockTimer extends Timer {
// These are weird semantics admittedly, but there may
// be a bunch of tests that rely on them already.
case class Task(var when: Time, func: () => Unit) extends TimerTask {
var isCancelled: Boolean = false
override def cancel() {
isCancelled = true
nCancelled += 1
when = Time.now
tick()
}
}
var isStopped: Boolean = false
var tasks = ArrayBuffer[Task]()
var nCancelled: Int = 0
def tick() {
if (isStopped)
throw new IllegalStateException("timer is stopped already.")
val now = Time.now
val (toRun, toQueue) = tasks.partition(task => task.when <= now)
tasks = toQueue
toRun.filter(!_.isCancelled).foreach(_.func())
}
override def schedule(when: Time)(block: => Unit): TimerTask = {
val task = Task(when, () => block)
tasks += task
task
}
override def schedule(when: Time, period: Duration)(block: => Unit): TimerTask = {
def runAndReschedule() {
schedule(Time.now + period) { runAndReschedule() }
block
}
schedule(when) {
runAndReschedule()
}
}
override def stop(): Unit = {
log.trace(s"Timer를 중단합니다.")
isStopped = true
}
} | debop/debop4s | debop4s-core/src/main/scala/debop4s/core/utils/Timer.scala | Scala | apache-2.0 | 11,191 |
package polyite.schedule
import scala.math.BigInt.int2bigInt
import polyite.util.Rat
import polyite.util.Util
import scala.collection.mutable.HashSet
import isl.Isl.TypeAliases._
object ScheduleVectorUtils {
def checkAllZero(coeffs : Iterable[Rat]) : Boolean = coeffs.forall { _ == Rat(0) }
def checkAllZeroBigInt(coeffs : Iterable[BigInt]) : Boolean = coeffs.forall { _ == BigInt(0) }
def checkIsConstant(coeffs : Iterable[Rat], domInfo : DomainCoeffInfo) : Boolean = {
var found = true
for (sInfo : StmtCoeffInfo <- domInfo.stmtInfo.values) {
found &&= coeffs.view.drop(sInfo.itStart).take(sInfo.nrIt).forall { _ == Rat(0) }
}
found
}
def checkIsConstantForAnyStmt(coeffs : Iterable[Rat], domInfo : DomainCoeffInfo) : Boolean = {
var found = false
for (sInfo : StmtCoeffInfo <- domInfo.stmtInfo.values) {
found ||= coeffs.view.drop(sInfo.itStart).take(sInfo.nrIt).forall { _ == Rat(0) }
}
found
}
// v1 + c * v2
def add(v1 : List[Rat], v2 : List[Rat], c : Rat) : List[Rat] = v1.zip(v2)
.map((x : (Rat, Rat)) => x._1 + (x._2 * c))
def checkForZeroCoeffPerStatement(coeffs : Array[Rat],
domInfo : DomainCoeffInfo) : Boolean = {
var found = false
for (sInfo : StmtCoeffInfo <- domInfo.stmtInfo.values) {
val stmtCoeffs : Array[Rat] = (coeffs.drop(sInfo.itStart).take(sInfo.nrIt)
++ coeffs.drop(sInfo.parStart).take(domInfo.nrParPS)).:+(coeffs(sInfo.cstIdx))
found ||= (coeffs(sInfo.cstIdx) == Rat(0)) && checkAllZero(coeffs.view
.drop(sInfo.itStart).take(sInfo.nrIt)) && checkAllZero(coeffs.view
.drop(sInfo.itStart).take(sInfo.nrIt))
}
found
}
def getMaxAbsoluteComponent(v : Iterable[Rat]) : Rat = (v.map { x => x.abs })
.foldLeft(Rat(0))((x, y) => if (x > y) x else y)
def checkLinearIndep(v1 : Iterable[Rat], v2 : Iterable[Rat]) : Boolean = {
if (v1.size != v2.size)
throw new IllegalArgumentException("v1 and v2 don't have the same dimensionality!")
if (isZeroVector(v1) || isZeroVector(v2))
return false
val v1v2 = v1.zip(v2)
// e.g. (0, 1) or (-3, 0) implies linear independence
if (v1v2.foldLeft(false)((b : Boolean, t : (Rat, Rat)) => b
|| (t._1 == Rat(0) && t._2 != Rat(0)) || (t._1 != Rat(0) && t._2 == Rat(0))))
return true
val v1v2NoZero : Iterable[(Rat, Rat)] = v1v2
.filterNot((t : (Rat, Rat)) => t._1 == Rat(0) /* && t._2 == Rat(0)*/ )
val idx1Factor : Rat = v1v2NoZero.head._1 / v1v2NoZero.head._2
return !(v1v2NoZero forall ((t : (Rat, Rat)) => (t._2 * idx1Factor) == t._1))
}
def isZeroVector(v : Iterable[Rat]) = v.forall { _ == Rat(0) }
/**
* Multiplies each component of the given vector of rational numbers with
* their common denominator. The result is a vector of integers.
*/
def multiplyWithCommonDenominator(v : List[Rat]) : List[BigInt] = {
val denomVals : HashSet[BigInt] = HashSet.empty
val commonDenom : BigInt = v.foldLeft(BigInt(1))((d : BigInt, x : Rat) =>
if (d % x.denominator == 0)
d
else {
d * (x.denominator / x.denominator.gcd(d))
})
val result : List[BigInt] = v map { x => x.numerator * (commonDenom / x.denominator) }
return result
}
} | stganser/polyite | src/polyite/schedule/ScheduleVectorUtils.scala | Scala | mit | 3,260 |
package de.unihamburg.vsis.sddf.visualisation.model
import org.apache.spark.rdd.RDD
import de.unihamburg.vsis.sddf.reading.Tuple
class ReadingModel extends BasicAnalysable {
var _tuples: Option[RDD[Tuple]] = None
def tuples = {
if (_tuples.isDefined) {
_tuples.get
} else {
throw new Exception("Tuples not defined")
}
}
def tuples_=(tuples: RDD[Tuple]) = _tuples = Option(tuples)
lazy val corpusSize = tuples.count()
}
| numbnut/sddf | src/main/scala/de/unihamburg/vsis/sddf/visualisation/model/ReadingModel.scala | Scala | gpl-3.0 | 459 |
/**
* This file is part of mycollab-web.
*
* mycollab-web is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-web is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-web. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.module.project.view.parameters
import com.esofthead.mycollab.vaadin.mvp.ScreenData
/**
* @author MyCollab Ltd.
* @since 5.0.3
*/
object ProjectSettingScreenData {
class ViewSettings extends ScreenData {}
}
| maduhu/mycollab | mycollab-web/src/main/scala/com.esofthead.mycollab.module.project.view.parameters/ProjectSettingScreenData.scala | Scala | agpl-3.0 | 940 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.kafka010
import java.{util => ju}
import scala.collection.mutable.ArrayBuffer
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.spark.{Partition, SparkContext, TaskContext}
import org.apache.spark.partial.{BoundedDouble, PartialResult}
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.NextIterator
/** Offset range that one partition of the KafkaSourceRDD has to read */
private[kafka010] case class KafkaSourceRDDOffsetRange(
topicPartition: TopicPartition,
fromOffset: Long,
untilOffset: Long,
preferredLoc: Option[String]) {
def topic: String = topicPartition.topic
def partition: Int = topicPartition.partition
def size: Long = untilOffset - fromOffset
}
/** Partition of the KafkaSourceRDD */
private[kafka010] case class KafkaSourceRDDPartition(
index: Int, offsetRange: KafkaSourceRDDOffsetRange) extends Partition
/**
* An RDD that reads data from Kafka based on offset ranges across multiple partitions.
* Additionally, it allows preferred locations to be set for each topic + partition, so that
* the [[KafkaSource]] can ensure the same executor always reads the same topic + partition
* and cached KafkaConsuemrs (see [[CachedKafkaConsumer]] can be used read data efficiently.
*
* @param sc the [[SparkContext]]
* @param executorKafkaParams Kafka configuration for creating KafkaConsumer on the executors
* @param offsetRanges Offset ranges that define the Kafka data belonging to this RDD
*/
private[kafka010] class KafkaSourceRDD(
sc: SparkContext,
executorKafkaParams: ju.Map[String, Object],
offsetRanges: Seq[KafkaSourceRDDOffsetRange],
pollTimeoutMs: Long,
failOnDataLoss: Boolean)
extends RDD[ConsumerRecord[Array[Byte], Array[Byte]]](sc, Nil) {
override def persist(newLevel: StorageLevel): this.type = {
logError("Kafka ConsumerRecord is not serializable. " +
"Use .map to extract fields before calling .persist or .window")
super.persist(newLevel)
}
override def getPartitions: Array[Partition] = {
offsetRanges.zipWithIndex.map { case (o, i) => new KafkaSourceRDDPartition(i, o) }.toArray
}
override def count(): Long = offsetRanges.map(_.size).sum
override def countApprox(timeout: Long, confidence: Double): PartialResult[BoundedDouble] = {
val c = count
new PartialResult(new BoundedDouble(c, 1.0, c, c), true)
}
override def isEmpty(): Boolean = count == 0L
override def take(num: Int): Array[ConsumerRecord[Array[Byte], Array[Byte]]] = {
val nonEmptyPartitions =
this.partitions.map(_.asInstanceOf[KafkaSourceRDDPartition]).filter(_.offsetRange.size > 0)
if (num < 1 || nonEmptyPartitions.isEmpty) {
return new Array[ConsumerRecord[Array[Byte], Array[Byte]]](0)
}
// Determine in advance how many messages need to be taken from each partition
val parts = nonEmptyPartitions.foldLeft(Map[Int, Int]()) { (result, part) =>
val remain = num - result.values.sum
if (remain > 0) {
val taken = Math.min(remain, part.offsetRange.size)
result + (part.index -> taken.toInt)
} else {
result
}
}
val buf = new ArrayBuffer[ConsumerRecord[Array[Byte], Array[Byte]]]
val res = context.runJob(
this,
(tc: TaskContext, it: Iterator[ConsumerRecord[Array[Byte], Array[Byte]]]) =>
it.take(parts(tc.partitionId)).toArray, parts.keys.toArray
)
res.foreach(buf ++= _)
buf.toArray
}
override def getPreferredLocations(split: Partition): Seq[String] = {
val part = split.asInstanceOf[KafkaSourceRDDPartition]
part.offsetRange.preferredLoc.map(Seq(_)).getOrElse(Seq.empty)
}
override def compute(
thePart: Partition,
context: TaskContext): Iterator[ConsumerRecord[Array[Byte], Array[Byte]]] = {
val range = thePart.asInstanceOf[KafkaSourceRDDPartition].offsetRange
assert(
range.fromOffset <= range.untilOffset,
s"Beginning offset ${range.fromOffset} is after the ending offset ${range.untilOffset} " +
s"for topic ${range.topic} partition ${range.partition}. " +
"You either provided an invalid fromOffset, or the Kafka topic has been damaged")
if (range.fromOffset == range.untilOffset) {
logInfo(s"Beginning offset ${range.fromOffset} is the same as ending offset " +
s"skipping ${range.topic} ${range.partition}")
Iterator.empty
} else {
new NextIterator[ConsumerRecord[Array[Byte], Array[Byte]]]() {
val consumer = CachedKafkaConsumer.getOrCreate(
range.topic, range.partition, executorKafkaParams)
var requestOffset = range.fromOffset
override def getNext(): ConsumerRecord[Array[Byte], Array[Byte]] = {
if (requestOffset >= range.untilOffset) {
// Processed all offsets in this partition.
finished = true
null
} else {
val r = consumer.get(requestOffset, range.untilOffset, pollTimeoutMs, failOnDataLoss)
if (r == null) {
// Losing some data. Skip the rest offsets in this partition.
finished = true
null
} else {
requestOffset = r.offset + 1
r
}
}
}
override protected def close(): Unit = {}
}
}
}
}
| myfleetingtime/spark2.11_bingo | external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaSourceRDD.scala | Scala | apache-2.0 | 6,255 |
package org.jetbrains.plugins.scala
package codeInspection.syntacticSimplification
import com.intellij.codeInspection.{ProblemHighlightType, ProblemsHolder}
import com.intellij.openapi.project.Project
import com.intellij.psi._
import org.jetbrains.annotations.Nls
import org.jetbrains.plugins.scala.codeInspection.collections.MethodRepr
import org.jetbrains.plugins.scala.codeInspection.syntacticSimplification.ConvertibleToMethodValueInspection._
import org.jetbrains.plugins.scala.codeInspection.{AbstractFixOnPsiElement, AbstractInspection, ScalaInspectionBundle}
import org.jetbrains.plugins.scala.extensions.{&&, PsiElementExt, PsiModifierListOwnerExt, ResolvesTo, childOf}
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScConstructorInvocation, ScMethodLike}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScVariable
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScClassParameter
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypeBoundsOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportStmt
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.{createExpressionFromText, createExpressionWithContextFromText}
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.api.FunctionType
import org.jetbrains.plugins.scala.lang.psi.types.result._
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaNamesUtil
import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import org.jetbrains.plugins.scala.project.ProjectPsiElementExt
import org.jetbrains.plugins.scala.externalLibraries.kindProjector.PolymorphicLambda
import scala.annotation.nowarn
/**
* Nikolay.Tropin
* 5/30/13
*/
object ConvertibleToMethodValueInspection {
val inspectionName: String = ScalaInspectionBundle.message("convertible.to.method.value.name")
val inspectionId = "ConvertibleToMethodValue"
/**
* Since kind-projector operates *before* typer, it can't analyse types of the
* expressions used in rewrites and therefore requires arguments to value-level
* lambdas to be identifiale as functions solely by shape (i.e. explicit match-cases
* or anonymous functions, not method references).
*/
private object ArgumentToPolymorphicLambda {
def unapply(expr: ScExpression): Boolean =
if (!expr.kindProjectorPluginEnabled) false
else
expr match {
case childOf(_, childOf(_, ScMethodCall(PolymorphicLambda(_, _, _), _))) => true
case _ => false
}
}
}
@nowarn("msg=" + AbstractInspection.DeprecationText)
class ConvertibleToMethodValueInspection extends AbstractInspection(inspectionName) {
override def actionFor(implicit holder: ProblemsHolder, isOnTheFly: Boolean): PartialFunction[PsiElement, Any] = {
case ArgumentToPolymorphicLambda() => () // disallowed by kind projector rules
case MethodRepr(_, _, Some(ref), _)
if ref.bind().exists(involvesImplicitsOrByNameParams) => //do nothing
case MethodRepr(expr, qualOpt, Some(_), args) =>
if (allArgsUnderscores(args) && qualOpt.forall(onlyStableValuesUsed))
registerProblem(holder, expr, ScalaInspectionBundle.message("convertible.to.method.value.anonymous.hint"))
case [email protected](bindingExpr) =>
val isInParameterOfParameterizedClass = und.parentOfType(classOf[ScClassParameter])
.exists(_.containingClass.hasTypeParameters)
def mayReplace() = bindingExpr match {
case ResolvesTo(fun) if hasByNameOrImplicitParam(fun) || hasInitialEmptyArgList(fun) => false
case ScReferenceExpression.withQualifier(qual) => onlyStableValuesUsed(qual)
case _ => true
}
if (!isInParameterOfParameterizedClass && mayReplace())
registerProblem(holder, und, ScalaInspectionBundle.message("convertible.to.method.value.eta.hint"))
}
private def hasInitialEmptyArgList(element: PsiElement): Boolean =
element match {
case func: ScMethodLike =>
func.effectiveParameterClauses.headOption.exists(_.parameters.isEmpty)
case method: PsiMethod =>
!method.hasParameters
case _ =>
false
}
private def involvesImplicitsOrByNameParams(srr: ScalaResolveResult): Boolean = {
srr.implicitType.nonEmpty ||
srr.implicitFunction.nonEmpty ||
hasByNameOrImplicitParam(srr.getElement) ||
hasContextOrViewBoundTypeParam(srr.getElement)
}
private def allArgsUnderscores(args: Seq[ScExpression]): Boolean = {
args.nonEmpty && args.forall(arg => arg.isInstanceOf[ScUnderscoreSection] && ScUnderScoreSectionUtil.isUnderscore(arg))
}
private def onlyStableValuesUsed(qual: ScExpression): Boolean = {
def isStable(named: PsiNamedElement) = ScalaPsiUtil.nameContext(named) match {
case cp: ScClassParameter => !cp.isVar
case f: PsiField => f.hasFinalModifier
case o: ScObject => o.isLocal || ScalaPsiUtil.hasStablePath(o)
case _: PsiMethod | _: ScVariable => false
case _ => true
}
qual.depthFirst(e => !e.isInstanceOf[ScImportStmt]).forall {
case _: ScNewTemplateDefinition => false
case (_: ScReferenceExpression | ScConstructorInvocation.byReference(_)) && ResolvesTo(named: PsiNamedElement) => isStable(named)
case _ => true
}
}
private def registerProblem(holder: ProblemsHolder, expr: ScExpression, @Nls hint: String): Unit = {
possibleReplacements(expr).find(isSuitableForReplace(expr, _)).foreach { replacement =>
holder.registerProblem(expr, inspectionName,
ProblemHighlightType.GENERIC_ERROR_OR_WARNING,
new ConvertibleToMethodValueQuickFix(expr, replacement, hint))
}
}
private def methodWithoutArgumentsText(expr: ScExpression): Seq[String] = expr match {
case call: ScMethodCall => Seq(call.getEffectiveInvokedExpr.getText)
case ScInfixExpr(_, oper, _) if !ScalaNamesUtil.isOperatorName(oper.refName) =>
val infixCopy = expr.copy.asInstanceOf[ScInfixExpr]
infixCopy.getNode.removeChild(infixCopy.right.getNode)
Seq(infixCopy.getText)
case und: ScUnderscoreSection => und.bindingExpr.map(_.getText).toSeq
case _ => Seq.empty
}
private def isSuitableForReplace(oldExpr: ScExpression, newExprText: String): Boolean = {
val newExpr = createExpressionWithContextFromText(newExprText, oldExpr.getContext, oldExpr)
oldExpr.expectedType(fromUnderscore = false) match {
case Some(expectedType) if FunctionType.isFunctionType(expectedType) =>
def conformsExpected(expr: ScExpression): Boolean = expr.`type`().getOrAny conforms expectedType
conformsExpected(oldExpr) && conformsExpected(newExpr) && oldExpr.`type`().getOrAny.conforms(newExpr.`type`().getOrNothing)
case None if newExprText endsWith "_" =>
(oldExpr.`type`(), newExpr.`type`()) match {
case (Right(oldType), Right(newType)) => oldType.equiv(newType)
case _ => false
}
case _ => false
}
}
private def possibleReplacements(expr: ScExpression): Seq[String] = {
val withoutArguments = methodWithoutArgumentsText(expr)
val withUnderscore =
if (expr.getText endsWith "_") Nil
else withoutArguments.map(_ + " _")
withoutArguments ++ withUnderscore
}
private def hasByNameOrImplicitParam(elem: PsiElement): Boolean = {
elem match {
case fun: ScMethodLike => fun.parameterList.params.exists(p => p.isCallByNameParameter || p.isImplicitParameter)
case _ => false
}
}
private def hasContextOrViewBoundTypeParam(elem: PsiElement): Boolean = {
elem match {
case fun: ScMethodLike => fun.getTypeParameters.exists { case bounds: ScTypeBoundsOwner => bounds.hasImplicitBounds }
case _ => false
}
}
}
class ConvertibleToMethodValueQuickFix(expr: ScExpression, replacement: String, @Nls hint: String)
extends AbstractFixOnPsiElement(hint, expr) {
override protected def doApplyFix(scExpr: ScExpression)
(implicit project: Project): Unit = {
val newExpr = createExpressionFromText(replacement)
scExpr.replaceExpression(newExpr, removeParenthesis = true)
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/syntacticSimplification/ConvertibleToMethodValueInspection.scala | Scala | apache-2.0 | 8,459 |
package dk.tennis.compare.analysis
import breeze.plot.Figure
import breeze.plot._
import java.util.Date
import java.text.SimpleDateFormat
import java.text.NumberFormat
import java.text.FieldPosition
import java.text.ParsePosition
case class TimePlot(legend: Boolean) {
val f = Figure()
f.subplot(0).legend = legend
def add(data: Seq[Tuple2[Date, Double]], label: String) {
val x = data.map(d => d._1.getTime().toDouble / (1000L * 3600 * 24))
val y = data.map(d => d._2)
val df = new SimpleDateFormat("dd-MM-yyy")
val nf = new NumberFormat {
def format(number: Double, toAppendTo: StringBuffer, pos: FieldPosition): StringBuffer = {
toAppendTo.append(df.format(number * (1000L * 3600 * 24)))
toAppendTo
}
def format(number: Long, toAppendTo: StringBuffer, pos: FieldPosition): StringBuffer = {
throw new UnsupportedOperationException("Not implemented")
}
def parse(source: String, parsePosition: ParsePosition): Number = {
throw new UnsupportedOperationException("Not implemented")
}
}
f.subplot(0).xaxis.setNumberFormatOverride(nf)
f.subplot(0).ylim(0, 10)
f.subplot(0) += plot(x, y, name = label)
}
} | danielkorzekwa/tennis-player-compare | model-tester/src/main/scala/dk/tennis/compare/analysis/TimePlot.scala | Scala | bsd-2-clause | 1,218 |
package monocle.std
import monocle.MonocleSuite
import monocle.law.discipline.{IsoTests, PrismTests}
/*class ValidationSpec extends MonocleSuite {
checkAll("Validation is isomorphic to Disjunction", IsoTests(monocle.std.validation.validationToDisjunction[String, Int]))
checkAll("success", PrismTests(monocle.std.validation.success[String, Int]))
checkAll("failure", PrismTests(monocle.std.validation.failure[String, Int]))
}*/
| fkz/Monocle | test/shared/src/test/scala/monocle/std/ValidationSpec.scala | Scala | mit | 436 |
object SpecializationAbstractOverride {
trait A[@specialized(Int) T] { def foo(t: T) }
trait B extends A[Int] { def foo(t: Int) { println("B.foo") } }
trait M extends B { abstract override def foo(t: Int) { super.foo(t) ; println ("M.foo") } }
object C extends B with M
object D extends B { override def foo(t: Int) { super.foo(t); println("M.foo") } }
def main(args: Array[String]) {
D.foo(42) // OK, prints B.foo M.foo
C.foo(42) // StackOverflowError
}
}
| felixmulder/scala | test/pending/run/t4996.scala | Scala | bsd-3-clause | 558 |
final object Test extends java.lang.Object with Application {
class Foo(val s: String, val n: Int) extends java.lang.Object {
};
def foo[A >: Nothing <: Any, B >: Nothing <: Any, C >: Nothing <: Any]
(unapply1: (A) => Option[(B, C)], v: A): Unit =
unapply1.apply(v) match {
case Some((fst @ _, snd @ _)) =>
scala.Predef.println(scala.Tuple2.apply[java.lang.String, java.lang.String]("first: ".+(fst), " second: ".+(snd)))
case _ => scala.Predef.println(":(")
}
Test.this.foo[Test.Foo, String, Int]({
((eta$0$1: Test.Foo) => Test.this.Foo.unapply(eta$0$1))
}, Test.this.Foo.apply("this might be fun", 10));
final object Foo extends java.lang.Object with ((String, Int) => Test.Foo) {
def unapply(x$0: Test.Foo): Some[(String, Int)] = scala.Some.apply[(String, Int)](scala.Tuple2.apply[String, Int](x$0.s, x$0.n));
def apply(s: String, n: Int): Test.Foo = new Test.this.Foo(s, n)
}
}
| felixmulder/scala | test/pending/run/t0508x.scala | Scala | bsd-3-clause | 1,003 |
/**
* Copyright (C) 2009-2013 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.actor
import scala.collection.immutable
import akka.dispatch.sysmsg._
//import routing._
import akka.dispatch._
import akka.event._
import akka.util.{ /*Switch,*/ Helpers }
import akka.util.Collections.EmptyImmutableSeq
import scala.util.{ Success, Failure }
import scala.util.control.NonFatal
import java.util.concurrent.atomic.AtomicLong
import scala.concurrent.{ ExecutionContext, Future, Promise }
import scala.annotation.implicitNotFound
/**
* Interface for all ActorRef providers to implement.
*/
trait ActorRefProvider {
/**
* Reference to the supervisor of guardian and systemGuardian; this is
* exposed so that the ActorSystemImpl can use it as lookupRoot, i.e.
* for anchoring absolute actor look-ups.
*/
def rootGuardian: InternalActorRef
/**
* Reference to the supervisor of guardian and systemGuardian at the specified address;
* this is exposed so that the ActorRefFactory can use it as lookupRoot, i.e.
* for anchoring absolute actor selections.
*/
def rootGuardianAt(address: Address): ActorRef
/**
* Reference to the supervisor used for all top-level user actors.
*/
def guardian: LocalActorRef
/**
* Reference to the supervisor used for all top-level system actors.
*/
def systemGuardian: LocalActorRef
/**
* Dead letter destination for this provider.
*/
def deadLetters: ActorRef
/**
* The root path for all actors within this actor system, not including any remote address information.
*/
def rootPath: ActorPath
/**
* The Settings associated with this ActorRefProvider
*/
def settings: ActorSystem.Settings
/**
* Initialization of an ActorRefProvider happens in two steps: first
* construction of the object with settings, eventStream, etc.
* and then—when the ActorSystem is constructed—the second phase during
* which actors may be created (e.g. the guardians).
*/
def init(system: ActorSystemImpl): Unit
/**
* The Deployer associated with this ActorRefProvider
*/
//def deployer: Deployer
/**
* Generates and returns a unique actor path below “/temp”.
*/
def tempPath(): ActorPath
/**
* Returns the actor reference representing the “/temp” path.
*/
def tempContainer: InternalActorRef
/**
* Registers an actorRef at a path returned by tempPath();
* do NOT pass in any other path.
*/
def registerTempActor(actorRef: InternalActorRef, path: ActorPath): Unit
/**
* Unregister a temporary actor from the “/temp” path
* (i.e. obtained from tempPath()); do NOT pass in any other path.
*/
def unregisterTempActor(path: ActorPath): Unit
/**
* Actor factory with create-only semantics: will create an actor as
* described by props with the given supervisor and path (may be different
* in case of remote supervision). If systemService is true, deployment is
* bypassed (local-only). If ``Some(deploy)`` is passed in, it should be
* regarded as taking precedence over the nominally applicable settings,
* but it should be overridable from external configuration; the lookup of
* the latter can be suppressed by setting ``lookupDeploy`` to ``false``.
*/
def actorOf(
system: ActorSystemImpl,
props: Props,
supervisor: InternalActorRef,
path: ActorPath,
systemService: Boolean,
//deploy: Option[Deploy],
//lookupDeploy: Boolean,
async: Boolean): InternalActorRef
/**
* Create actor reference for a specified path. If no such
* actor exists, it will be (equivalent to) a dead letter reference.
*/
//def resolveActorRef(path: String): ActorRef
/**
* Create actor reference for a specified path. If no such
* actor exists, it will be (equivalent to) a dead letter reference.
*/
def resolveActorRef(path: ActorPath): ActorRef
/**
* This Future is completed upon termination of this ActorRefProvider, which
* is usually initiated by stopping the guardian via ActorSystem.stop().
*/
def terminationFuture: Future[Unit]
/**
* Obtain the address which is to be used within sender references when
* sending to the given other address or none if the other address cannot be
* reached from this system (i.e. no means of communication known; no
* attempt is made to verify actual reachability).
*/
def getExternalAddressFor(addr: Address): Option[Address]
/**
* Obtain the external address of the default transport.
*/
def getDefaultAddress: Address
}
/**
* INTERNAL API
*/
private[akka] object SystemGuardian {
/**
* For the purpose of orderly shutdown it's possible
* to register interest in the termination of systemGuardian
* and receive a notification [[akka.actor.Guardian.TerminationHook]]
* before systemGuardian is stopped. The registered hook is supposed
* to reply with [[akka.actor.Guardian.TerminationHookDone]] and the
* systemGuardian will not stop until all registered hooks have replied.
*/
case object RegisterTerminationHook
case object TerminationHook
case object TerminationHookDone
}
private[akka] object LocalActorRefProvider {
/*
* Root and user guardian
*/
private class Guardian(override val supervisorStrategy: SupervisorStrategy)
extends Actor {
def receive = {
case Terminated(_) => context.stop(self)
case StopChild(child) => context.stop(child)
case m => context.system.deadLetters forward DeadLetter(m, sender, self)
}
// guardian MUST NOT lose its children during restart
override def preRestart(cause: Throwable, msg: Option[Any]) {}
}
/**
* System guardian
*/
private class SystemGuardian(
override val supervisorStrategy: SupervisorStrategy,
val guardian: ActorRef)
extends Actor {
import SystemGuardian._
var terminationHooks = Set.empty[ActorRef]
def receive = {
case Terminated(`guardian`) =>
// time for the systemGuardian to stop, but first notify all the
// termination hooks, they will reply with TerminationHookDone
// and when all are done the systemGuardian is stopped
context.become(terminating)
terminationHooks foreach { _ ! TerminationHook }
stopWhenAllTerminationHooksDone()
case Terminated(a) =>
// a registered, and watched termination hook terminated before
// termination process of guardian has started
terminationHooks -= a
case StopChild(child) => context.stop(child)
case RegisterTerminationHook if sender != context.system.deadLetters =>
terminationHooks += sender
context watch sender
case m => context.system.deadLetters forward DeadLetter(m, sender, self)
}
def terminating: Receive = {
case Terminated(a) => stopWhenAllTerminationHooksDone(a)
case TerminationHookDone => stopWhenAllTerminationHooksDone(sender)
case m => context.system.deadLetters forward DeadLetter(m, sender, self)
}
def stopWhenAllTerminationHooksDone(remove: ActorRef): Unit = {
terminationHooks -= remove
stopWhenAllTerminationHooksDone()
}
def stopWhenAllTerminationHooksDone(): Unit =
if (terminationHooks.isEmpty) {
//context.system.eventStream.stopDefaultLoggers(context.system)
context.stop(self)
}
// guardian MUST NOT lose its children during restart
override def preRestart(cause: Throwable, msg: Option[Any]) {}
}
}
/**
* Local ActorRef provider.
*
* INTERNAL API!
*
* Depending on this class is not supported, only the [[ActorRefProvider]] interface is supported.
*/
private[akka] class LocalActorRefProvider private[akka] (
_systemName: String,
override val settings: ActorSystem.Settings,
val eventStream: EventStream,
//val dynamicAccess: DynamicAccess,
//override val deployer: Deployer,
_deadLetters: Option[ActorPath => InternalActorRef])
extends ActorRefProvider {
// this is the constructor needed for reflectively instantiating the provider
def this(_systemName: String,
settings: ActorSystem.Settings,
eventStream: EventStream/*,
dynamicAccess: DynamicAccess*/) =
this(_systemName,
settings,
eventStream,
//dynamicAccess,
//new Deployer(settings, dynamicAccess),
None)
override val rootPath: ActorPath = RootActorPath(Address(/*"akka",*/ _systemName))
//private[actors] val log: LoggingAdapter =
// Logging(eventStream, "LocalActorRefProvider(" + rootPath.address + ")")
protected object log {
def error(msg: String): Unit = Console.err.println(msg)
def error(ex: Throwable, msg: String): Unit = error(s"$ex -- $msg")
def debug(msg: String): Unit = Console.err.println(msg)
def debug(ex: Throwable, msg: String): Unit = debug(s"$ex -- $msg")
}
override val deadLetters: InternalActorRef =
_deadLetters.getOrElse((p: ActorPath) =>
new DeadLetterActorRef(this, p, eventStream)).apply(rootPath / "deadLetters")
/*
* generate name for temporary actor refs
*/
private var tempNumber = 0L
private def tempName() = Helpers.base64({
val n = tempNumber
tempNumber += 1
n
})
private val tempNode = rootPath / "temp"
override def tempPath(): ActorPath = tempNode / tempName()
/**
* Top-level anchor for the supervision hierarchy of this actor system. Will
* receive only Supervise/ChildTerminated system messages or Failure message.
*/
private[akka] val theOneWhoWalksTheBubblesOfSpaceTime: InternalActorRef = new MinimalActorRef {
private[this] var stopped: Boolean = false
var causeOfTermination: Option[Throwable] = None
val path = (rootPath / "bubble-walker") withUid 1
def provider: ActorRefProvider = LocalActorRefProvider.this
override def stop(): Unit = {
if (!stopped) {
stopped = true
terminationPromise.complete(
causeOfTermination.map(Failure(_)).getOrElse(Success(())))
}
}
override def !(message: Any)(implicit sender: ActorRef = Actor.noSender): Unit = {
if (!stopped) {
message match {
case null => throw new InvalidMessageException("Message is null")
case _ =>
log.error(s"$this received unexpected message [$message]")
}
}
}
override def sendSystemMessage(message: SystemMessage): Unit = {
if (!stopped) {
message match {
case Failed(child, ex, _) =>
log.error(ex, s"guardian $child failed, shutting down!")
causeOfTermination = Some(ex)
child.asInstanceOf[InternalActorRef].stop()
case Supervise(_, _) =>
// TODO register child in some map to keep track of it and enable shutdown after all dead
case _: DeathWatchNotification =>
stop()
case _ =>
log.error(s"$this received unexpected system message [$message]")
}
}
}
}
/*
* The problem is that ActorRefs need a reference to the ActorSystem to
* provide their service. Hence they cannot be created while the
* constructors of ActorSystem and ActorRefProvider are still running.
* The solution is to split out that last part into an init() method,
* but it also requires these references to be @volatile and lazy.
*/
private var system: ActorSystemImpl = _
lazy val terminationPromise: Promise[Unit] = Promise[Unit]()
def terminationFuture: Future[Unit] = terminationPromise.future
private var extraNames: Map[String, InternalActorRef] = Map()
/**
* Higher-level providers (or extensions) might want to register new synthetic
* top-level paths for doing special stuff. This is the way to do just that.
* Just be careful to complete all this before ActorSystem.start() finishes,
* or before you start your own auto-spawned actors.
*/
def registerExtraNames(_extras: Map[String, InternalActorRef]): Unit =
extraNames ++= _extras
//private def guardianSupervisorStrategyConfigurator =
// dynamicAccess.createInstanceFor[SupervisorStrategyConfigurator](settings.SupervisorStrategyClass, EmptyImmutableSeq).get
/**
* Overridable supervision strategy to be used by the “/user” guardian.
*/
protected def rootGuardianStrategy: SupervisorStrategy = OneForOneStrategy() {
case ex =>
log.error(ex, "guardian failed, shutting down system")
SupervisorStrategy.Stop
}
/**
* Overridable supervision strategy to be used by the “/user” guardian.
*/
protected def guardianStrategy: SupervisorStrategy =
SupervisorStrategy.defaultStrategy
//guardianSupervisorStrategyConfigurator.create()
/**
* Overridable supervision strategy to be used by the “/system” guardian.
*/
protected def systemGuardianStrategy: SupervisorStrategy =
SupervisorStrategy.defaultStrategy
val mailboxes: Mailboxes = new Mailboxes(deadLetters)
private lazy val defaultDispatcher: MessageDispatcher =
new MessageDispatcher(mailboxes)
//system.dispatchers.defaultGlobalDispatcher
//private lazy val defaultMailbox = system.mailboxes.lookup(Mailboxes.DefaultMailboxId)
override lazy val rootGuardian: LocalActorRef =
new LocalActorRef(
system,
rootPath,
theOneWhoWalksTheBubblesOfSpaceTime,
Props(new LocalActorRefProvider.Guardian(rootGuardianStrategy)),
defaultDispatcher) {
override def getParent: InternalActorRef = this
override def getSingleChild(name: String): InternalActorRef = name match {
case "temp" => tempContainer
case "deadLetters" => deadLetters
case other =>
extraNames.get(other).getOrElse(super.getSingleChild(other))
}
}
override def rootGuardianAt(address: Address): ActorRef =
if (address == rootPath.address) rootGuardian
else deadLetters
override lazy val guardian: LocalActorRef = {
val cell = rootGuardian.actorCell
cell.checkChildNameAvailable("user")
val ref = new LocalActorRef(system, (rootPath / "user") withUid 2, rootGuardian,
Props(new LocalActorRefProvider.Guardian(guardianStrategy)),
defaultDispatcher)
cell.initChild(ref)
ref.start()
ref
}
override lazy val systemGuardian: LocalActorRef = {
val cell = rootGuardian.actorCell
cell.checkChildNameAvailable("system")
val ref = new LocalActorRef(system, (rootPath / "system") withUid 3, rootGuardian,
Props(new LocalActorRefProvider.SystemGuardian(systemGuardianStrategy, guardian)),
defaultDispatcher)
cell.initChild(ref)
ref.start()
ref
}
lazy val tempContainer =
new VirtualPathContainer(system.provider, tempNode, rootGuardian/*, log*/)
def registerTempActor(actorRef: InternalActorRef, path: ActorPath): Unit = {
assert(path.parent eq tempNode, "cannot registerTempActor() with anything not obtained from tempPath()")
tempContainer.addChild(path.name, actorRef)
}
def unregisterTempActor(path: ActorPath): Unit = {
assert(path.parent eq tempNode, "cannot unregisterTempActor() with anything not obtained from tempPath()")
tempContainer.removeChild(path.name)
}
def init(_system: ActorSystemImpl) {
system = _system
rootGuardian.start()
// chain death watchers so that killing guardian stops the application
systemGuardian.sendSystemMessage(Watch(guardian, systemGuardian))
rootGuardian.sendSystemMessage(Watch(systemGuardian, rootGuardian))
//eventStream.startDefaultLoggers(_system)
}
/*def resolveActorRef(path: String): ActorRef = path match {
case ActorPathExtractor(address, elems) if address == rootPath.address =>
resolveActorRef(rootGuardian, elems)
case _ =>
log.debug(s"resolve of unknown path [$path] failed")
deadLetters
}*/
def resolveActorRef(path: ActorPath): ActorRef = {
if (path.root == rootPath) resolveActorRef(rootGuardian, path.elements)
else {
log.debug(s"resolve of foreign ActorPath [$path] failed")
deadLetters
}
}
/**
* INTERNAL API
*/
private[akka] def resolveActorRef(ref: InternalActorRef,
pathElements: Iterable[String]): InternalActorRef =
if (pathElements.isEmpty) {
log.debug("resolve of empty path sequence fails (per definition)")
deadLetters
} else ref.getChild(pathElements.iterator) match {
case Nobody =>
log.debug(s"resolve of path sequence [/${pathElements.mkString("/")}] failed")
new EmptyLocalActorRef(system.provider, ref.path / pathElements, eventStream)
case x => x
}
def actorOf(system: ActorSystemImpl, props: Props, supervisor: InternalActorRef, path: ActorPath,
systemService: Boolean, /*deploy: Option[Deploy], lookupDeploy: Boolean,*/ async: Boolean): InternalActorRef = {
new LocalActorRef(system, path, supervisor, props, system.dispatcher)
}
def getExternalAddressFor(addr: Address): Option[Address] =
if (addr == rootPath.address) Some(addr) else None
def getDefaultAddress: Address = rootPath.address
}
| sjrd/scala-js-actors | actors/src/main/scala/akka/actor/ActorRefProvider.scala | Scala | bsd-3-clause | 17,190 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations.retriever
import uk.gov.hmrc.ct.accounts._
import uk.gov.hmrc.ct.accounts.frs10x.boxes.ACQ8999
import uk.gov.hmrc.ct.box.retriever.BoxRetriever
import uk.gov.hmrc.ct.computations._
import uk.gov.hmrc.ct.computations.covidSupport.{CP121, CP122, CP123, CP124, CP125, CP126}
import uk.gov.hmrc.ct.computations.lowEmissionCars._
import uk.gov.hmrc.ct.computations.machineryAndPlant._
import uk.gov.hmrc.ct.{CATO01, CATO02, CATO03, CATO13, CATO14, CATO15, CATO16, CATO20, CATO21, CATO22, CATO23, CATO24}
trait ComputationsBoxRetriever extends BoxRetriever {
def ap1(): AP1
def ap2(): AP2
def ap3(): AP3
def ac401(): AC401
def ac402(): AC402
def ac403(): AC403
def ac404(): AC404
def cp1(): CP1
def cp2(): CP2
def cp7(): CP7
def cp8(): CP8
def cp15(): CP15
def cp16(): CP16
def cp17(): CP17
def cp18(): CP18
def cp19(): CP19
def cp20(): CP20
def cp21(): CP21
def cp22(): CP22
def cp23(): CP23
def cp24(): CP24
def cp25(): CP25
def cp26(): CP26
def cp27(): CP27
def cp28(): CP28
def cp29(): CP29
def cp30(): CP30
def cp31(): CP31
def cp32(): CP32
def cp33(): CP33
def cp34(): CP34
def cp35(): CP35
def cp36(): CP36
def cp37(): CP37
def cp43(): CP43
def cp46(): CP46
def cp47(): CP47
def cp48(): CP48
def cp49(): CP49
def cp51(): CP51
def cp52(): CP52
def cp53(): CP53
def cp55(): CP55
def cp57(): CP57
def cp78(): CP78
def cp79(): CP79
def cp80(): CP80
def cp82(): CP82
def cp83(): CP83
def cp84(): CP84
def cp85(): CP85
def cp86(): CP86
def cp87Input(): CP87Input
def cp87a(): CP87a
def cp88(): CP88
def cp89(): CP89
def cp91Input(): CP91Input
def cp281(): CP281
def cp281a(): CP281a
def cp281b(): CP281b = CP281b.calculate(this)
def cp281c(): CP281c
def cp281d(): CP281d = CP281d.calculate(this)
def cp283a(): CP283a
def cp283b(): CP283b
def cp283c(): CP283c
def cp283d(): CP283d
def cp285(): CP285
def cp286(): CP286
def cp286a(): CP286a
def cp286b(): CP286b
def cp287(): CP287
def cp288a(): CP288a
def cp288b(): CP288b
def cp301(): CP301
def cp302(): CP302
def cp303(): CP303
def cp3010() : CP3010
def cp3020() : CP3020
def cp3030() : CP3030
def cp501(): CP501
def cp502(): CP502
def cp503(): CP503
def cp510(): CP510
def cp14(): CP14 = CP14.calculate(this)
def cp38(): CP38 = CP38.calculate(this)
def cp39(): CP39 = CP39(cp14())
def cp40(): CP40 = CP40(cp38())
def cp44(): CP44 = CP44.calculate(this)
def cp45(): CP45 = CP45(cp44())
def cp54(): CP54 = CP54.calculate(this)
def cp58(): CP58 = CP58(cp43())
def cp59(): CP59 = CP59.calculate(this)
def cp81(): CP81 = CP81.calculate(this)
def cp87(): CP87 = CP87(cp87Input())
def cp90(): CP90 = CP90.calculate(this)
def cp91(): CP91 = CP91.calculate(this)
def cp92(): CP92 = CP92.calculate(this)
def cp93(): CP93 = CP93(cp186())
def cp94(): CP94 = CP94.calculate(this)
def cp95(): CP95 = CP95(cp93())
def cp96(): CP96 = CP96(cp91())
def cp97(): CP97 = CP97.calculate(this)
def cp98(): CP98 = CP98(cp96())
def cp99(): CP99 = CP99.calculate(this)
def cp100(): CP100 = CP100(cp45())
def cp101(): CP101 = CP101(cp46())
def cp102(): CP102 = CP102(cp47())
def cp103(): CP103 = CP103(cp48())
def cp104(): CP104 = CP104(cp49())
def cp105(): CP105 = CP105.calculate(this)
def cp106(): CP106 = CP106(cp51())
def cp107(): CP107 = CP107(cp52())
def cp108(): CP108 = CP108(cp53())
def cp109(): CP109 = CP109.calculate(this)
def cp110(): CP110 = CP110.calculate(this)
def cp114(): CP114 = CP114(cp58())
def cp116(): CP116 = CP116(cp59())
def cp117(): CP117 = CP117.calculate(this)
def cp118(): CP118 = CP118.calculate(this)
def cp120(): CP120
def cp121(): CP121
def cp122(): CP122
def cp123(): CP123
def cp124(): CP124
def cp125(): CP125
def cp126(): CP126 = CP126.calculate(this)
def cp127(): CP127
def cp128(): CP128
def cp129(): CP129
def cp130(): CP130 = CP130.calculate(this)
def cp186(): CP186 = CP186.calculate(this)
def cp234(): CP234 = CP234(cp281())
def cp235(): CP235 = CP235.calculate(this)
def cp237(): CP237 = CP237(cp287())
def cp238(): CP238 = CP238(cp290())
def cp238a(): CP238a = CP238a(chooseCp997())
def cp239(): CP239 = CP239(cp294())
def cp240(): CP240 = CP240(cp288())
def cp245(): CP245 = CP245(cp96())
def cp246(): CP246 = CP246(cp93())
def cp247(): CP247 = CP247(cp91())
def cp248(): CP248 = CP248(cp186())
def cp249(): CP249 = CP249(cp88())
def cp251(): CP251 = CP251(cp81())
def cp252(): CP252
def cp253(): CP253 = CP253.calculate(this)
def cp256(): CP256 = CP256.calculate(this)
def cp257(): CP257 = CP257.calculate(this)
def cp258(): CP258 = CP258.calculate(this)
def cp259(): CP259 = CP259.calculate(this)
def cp263(): CP263 = CP263.calculate(this)
def cp264(): CP264 = CP264(cp239())
def cp265(): CP265 = CP265.calculate(this)
def cp266(): CP266 = CP266(cp295())
def cp273(): CP273 = CP273(cp251())
def cp274(): CP274 = CP274(cp253())
def cp278(): CP278 = CP278(cp252())
def cp279(): CP279 = CP279(cp88())
def cp282(): CP282 = CP282.calculate(this)
def cp283(): CP283 = CP283.calculate(this)
def cp284(): CP284 = CP284.calculate(this)
def cp288(): CP288 = CP288.calculate(this)
def cp289(): CP289 = CP289.calculate(this)
def cp290(): CP290 = CP290.calculate(this)
def cp290a(): CP290a = CP290a(cp283a())
def cp290b(): CP290b = CP290b(cp283b())
def cp290c(): CP290c = CP290c(cp283c())
def cp290d(): CP290d = CP290d(cp283d())
def cp291(): CP291 = CP291.calculate(this)
def cp292(): CP292 = CP292(cp58())
def cp293(): CP293 = CP293.calculate(this)
def cp294(): CP294 = CP294.calculate(this)
def cp295(): CP295 = CP295.calculate(this)
def cp305(): CP305 = CP305.calculate(this)
def cp500(): CP500 = CP500.calculate(this)
def cp504(): CP504 = CP504(cp501())
def cp505(): CP505 = CP505(cp502())
def cp507(): CP507 = CP507(cp501())
def cp508(): CP508 = CP508(cp503())
def cp509(): CP509 = CP509.calculate(this)
def cp511(): CP511 = CP511.calculate(this)
def cp512(): CP512 = CP512(cp511())
def cp513(): CP513 = CP513(cp502())
def cp514(): CP514 = CP514(cp511())
def cp515(): CP515 = CP515(cp513())
def cp665(): CP665
def cp666(): CP666
def cp667(): CP667
def cp668(): CP668
def cp669(): CP669 = CP669.calculate(this)
def cp670(): CP670 = CP670.calculate(this)
def cp671(): CP671 = CP671(cp91())
def cp672(): CP672
def cp672a(): CP672a
def cp673(): CP673
def cp674(): CP674
def cp980(): CP980
def cp981(): CP981
def cp982(): CP982
def cp983(): CP983
def cp984(): CP984 = CP984.calculate(this)
def cp986(): CP986 = CP986.calculate(this)
def cp997(): CP997
def cp997NI(): CP997NI = CP997NI.calculate(this)
def cp997c(): CP997c
def cp997d(): CP997d
def cp997e(): CP997e = CP997e.calculate(this)
def cp998(): CP998 = CP998.calculate(this)
def cp999(): CP999 = CP999.calculate(this)
def cpAux1(): CPAux1 = CPAux1.calculate(this)
def cpAux2(): CPAux2 = CPAux2.calculate(this)
def cpAux3(): CPAux3 = CPAux3.calculate(this)
def cpQ1000(): CPQ1000
def cpQ7(): CPQ7
def cpQ8(): CPQ8
def cpQ10(): CPQ10
def cpQ11(): CPQ11
def cpQ17(): CPQ17
def cpQ117(): CPQ117
def cpQ18(): CPQ18
def cpQ19(): CPQ19
def cpQ20(): CPQ20
def cpQ21(): CPQ21
def cpQ321(): CPQ321
def cato01(): CATO01 = CATO01.calculate(this)
def cato02(): CATO02 = CATO02.calculate(this)
def cato03(): CATO03 = CATO03.calculate(this)
def cato13(): CATO13 = CATO13.calculate(this)
def cato14(): CATO14 = CATO14.calculate(this)
def cato15(): CATO15 = CATO15.calculate(this)
def cato16(): CATO16 = CATO16.calculate(this)
def cato20(): CATO20 = CATO20.calculate(this)
def cato21(): CATO21 = CATO21.calculate(this)
def cato22(): CATO22 = CATO22.calculate(this)
def cato23(): CATO23 = CATO23.calculate(this)
def cato24(): CATO24
def lec01(): LEC01
def sba01(): SBA01
def sba02(): List[Option[Int]] = CP296.getCostForEachBuilding(this)
def cp296(): CP296 = CP296.calculate(this)
def cp297(): CP297 = CP297.calculate(this)
def cp298(): CP298 = CP298.calculate(this)
def lec10(): LEC10 = LEC10.calculate(this)
def lec11(): LEC11 = LEC11.calculate(this)
def lec12(): LEC12 = LEC12.calculate(this)
def lec13(): LEC13 = LEC13.calculate(this)
// tricky! losses.northernIrelandJourneyActive should be used. Failed on retriever type integration.
def chooseCp997(): CP997Abstract = {
val ni = this.cp997NI()
if ( ni.value.isDefined )
ni
else
this.cp997()
}
def acq8999a():ACQ8999
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/retriever/ComputationsBoxRetriever.scala | Scala | apache-2.0 | 9,487 |
package mesosphere.marathon.tasks
import mesosphere.marathon.Protos.MarathonTask
import mesosphere.marathon.state.{ AppDefinition, Timestamp }
import mesosphere.marathon.{ MarathonConf, MarathonSpec, MarathonTestHelper }
import mesosphere.mesos.protos.Implicits.{ slaveIDToProto, taskIDToProto }
import mesosphere.mesos.protos.{ SlaveID, TaskID }
import org.mockito.Mockito._
class DefaultTaskFactoryTest extends MarathonSpec {
test("Copy SlaveID from Offer to Task") {
val offer = MarathonTestHelper.makeBasicOffer()
.setHostname("some_host")
.setSlaveId(SlaveID("some slave ID"))
.build()
val version: Timestamp = Timestamp(5)
val appDefinition: AppDefinition = AppDefinition(ports = List(), version = version)
val runningTasks: Set[MarathonTask] = Set(MarathonTask.newBuilder().setId("some task ID").build())
when(taskIdUtil.newTaskId(appDefinition.id)).thenReturn(TaskID("some task ID"))
val task = taskFactory.newTask(appDefinition, offer, runningTasks).get
val expectedTask = MarathonTasks.makeTask(
"some task ID", "some_host", List(),
List(), version, offer.getSlaveId
)
assert(task.marathonTask == expectedTask)
}
var taskIdUtil: TaskIdUtil = _
var taskTracker: TaskTracker = _
var config: MarathonConf = _
var taskFactory: DefaultTaskFactory = _
before {
taskIdUtil = mock[TaskIdUtil]
taskTracker = mock[TaskTracker]
config = MarathonTestHelper.defaultConfig()
taskFactory = new DefaultTaskFactory(taskIdUtil, config, MarathonTestHelper.getSchemaMapper())
}
}
| sepiroth887/marathon | src/test/scala/mesosphere/marathon/tasks/DefaultTaskFactoryTest.scala | Scala | apache-2.0 | 1,578 |
package no.nr.edvard.osiris.analysis
import no.nr.edvard.osiris.model.JavaMethod
import collection.immutable.{List, Map}
case class CreatorCorpusMember(method: JavaMethod, distance: Int)
object CreatorCorpus {
def buildFrom(
methods: Seq[JavaMethod],
referenceFlowGraph: ReferenceFlowGraph
): Seq[CreatorCorpusMember] = {
val predMap = flatInvert(referenceFlowGraph.edges)
val DISTANCE_INFINITY = Int.MaxValue/3
val flowProblem = new DataFlowProblem {
var nextUniqueId = -1
val creatorFinder = new CreatorFinder
class CreatorNode(val method: JavaMethod) extends Node {
override val uniqueId = { nextUniqueId += 1; nextUniqueId }
val isCreator = creatorFinder.isCreator(method.methodNode)
override val isStartNode: Boolean = isCreator
var creatorDist = if (isCreator) 0 else DISTANCE_INFINITY
override lazy val pred: Seq[CreatorNode] =
predMap.get(method) match {
case Some(preds) => preds.map(nodeMap(_))
case None => List[CreatorNode]()
}
override val succ: Seq[CreatorNode] = null /* Unused. */
override def update(registerTouch: Node => Unit) =
pred.foreach(p =>
if (p.creatorDist > creatorDist + 1) {
require(p.creatorDist == DISTANCE_INFINITY) /* Assert O(n) */
p.creatorDist = creatorDist + 1
registerTouch(p)
}
)
}
val nodeMap: Map[JavaMethod, CreatorNode] =
methods.map(m => m -> new CreatorNode(m)).toMap
override val nodes: List[CreatorNode] = nodeMap.values.toList
}
DataFlowProblemSolver.solve(flowProblem)
flowProblem.nodes
.filter(_.creatorDist < DISTANCE_INFINITY)
.sortBy(_.creatorDist)
.map(node => CreatorCorpusMember(node.method, node.creatorDist))
}
// TODO! Move. and optimize ;-)
def flatInvert[A, B](map: Map[A, TraversableOnce[B]]): Map[B, Seq[A]] = {
import collection.mutable
val accum = map.values.flatten.map(_ -> mutable.ArrayBuffer[A]()).toMap
map.foreach { case (k, v) => {
v.foreach(vv => accum(vv) += k)
}}
accum
}
} | edwkar/edwbsc | projects/Osiris/src/main/scala/no/nr/edvard/osiris/analysis/CreatorCorpus.scala | Scala | gpl-2.0 | 2,183 |
import scala.swing._
/** A module that displays damage per second information over various timeframes
*
* @param windowPeriods A string that represents the timeframes over which to display damage statistics
* @param currentFightDelay The time that must elapse without damage occurring for a fight to be considered complete; if/when new damage occurs after this delay, the damage will contribute to a new "current fight" damage value
*/
class DamageMeter(windowPeriods: String, currentFightDelay: Int) {
private val RollingWindowDamageDescription = "Seconds"
private val CurrentDamageDescription = "Current Fight"
private val currentDamageLabel = new Label{text = "<html><body style='text-align:center'>" + CurrentDamageDescription + "<br/><br/><div style='font-size:56px'>" + 0.0 + "</div></body></html>"; foreground = java.awt.Color.WHITE } // This label will be added to primary frame; value of text property will be updated when new data is available
private val windowPeriodsArray = windowPeriods.split(",")
private var windowPeriodsMap:Map[Int,Label] = Map()
for (i <- 0 to windowPeriodsArray.length - 1 ) {
windowPeriodsMap += (windowPeriodsArray(i).toInt -> new Label{text = "<html><body style='text-align:center'>" + windowPeriodsArray(i) + " " + RollingWindowDamageDescription + "<br/><span style='font-size:14px'>" + 0.0 + "</span></body></html>"; foreground = java.awt.Color.WHITE })
}
// This MainFrame is the primary frame representing the DamageMeter itself
private val mainFrame = new MainFrame {
title = "EQLogParser - DamageMeter"
contents = new GridPanel(1,2) {
contents += currentDamageLabel
background = new Color(40,0,0)
contents += new GridPanel(windowPeriodsMap.size,1) {
for (period <- windowPeriodsMap.values) {
contents += period
}
background = new Color(40,0,0)
}
iconImage = toolkit.getImage("src\\\\main\\\\resources\\\\eq_logo.png")
}
size = new Dimension(500,300)
centerOnScreen()
}
private var damageRecords = scala.collection.mutable.Map[Long,Int]() // Stores record of each occurrence of damage: [timestamp, damage amount]
private val damagePattern = "(?:You|non-melee|taken)(?:\\\\s|\\\\w)*?(\\\\d+)(?:\\\\s|\\\\w)*damage".r // The regex pattern that will be used to identify damage in a log file line
private var currentFightStart:Long = -1
private var mostRecentDamage:Long = 0
/** Process a new log file line. If line represents damage done, store amount and timestamp of damage, and update current fight timestamps. */
def processNewLine(line: String): Unit = {
val currentTimestamp: Long = System.currentTimeMillis / 1000
for(matchDamage <- damagePattern.findAllIn(line).matchData) {
addDamageRecord(currentTimestamp, matchDamage.group(1).toInt)
mostRecentDamage = currentTimestamp
if (currentFightStart == -1)
currentFightStart = currentTimestamp
}
}
/** Re-calculate all damage values and update label text values with new information */
private def updateOutputValues(): Unit = {
val currentTimestamp: Long = System.currentTimeMillis / 1000
var damagePerSecondCalculationSum = 0
if (currentTimestamp - mostRecentDamage > currentFightDelay) {
currentFightStart = -1
currentDamageLabel.text = "<html><body style='text-align:center'>" + CurrentDamageDescription + "<br/><br/><div style='font-size:56px'>" + 0.0 + "</div></body></html>"
}
else {
damagePerSecondCalculationSum = 0
for (damage <- damageRecords.filterKeys(_ >= currentFightStart).values) {
damagePerSecondCalculationSum += damage
}
currentDamageLabel.text = "<html><body style='text-align:center'>" + CurrentDamageDescription + "<br/><br/><div style='font-size:56px'>" + "%.1f".format(damagePerSecondCalculationSum/(currentTimestamp-currentFightStart).toDouble).toDouble + "</div></body></html>"
}
for (period <- windowPeriodsArray) {
damagePerSecondCalculationSum = 0
for (damage <- damageRecords.filterKeys(currentTimestamp - _ < period.toInt).values) {
damagePerSecondCalculationSum += damage
}
windowPeriodsMap(period.toInt).text = "<html><body style='text-align:center'>" + period + " " + RollingWindowDamageDescription + "<br/><span style='font-size:14px'>" + "%.1f".format(damagePerSecondCalculationSum/period.toDouble).toDouble + "</span></body></html>"
}
}
/** Store an occurrence of damage and associated timestamp */
private def addDamageRecord(currentTimestamp: Long, damage: Int): Unit = {
if (!damageRecords.contains(currentTimestamp)) {
damageRecords.put(currentTimestamp, damage)
}
else {
damageRecords(currentTimestamp) += damage
}
println(currentTimestamp + " | " + damage) // debug
}
/** Display the DamageMeter and begin it's execution */
def launch(): Unit = {
updateOutputValues()
mainFrame.visible = true
val updateThread = new Thread(new Runnable {
def run() {
while(true) {
Thread.sleep(1000)
updateOutputValues()
}
}
})
updateThread.start
}
}
| tfellison/eq-log-parser | src/main/scala/DamageMeter.scala | Scala | mit | 5,170 |
package com.twitter.finagle.netty4.ssl.server
import com.twitter.finagle.netty4.ssl.{Alpn, Netty4SslHandler}
import com.twitter.finagle.param.Stats
import com.twitter.finagle.ssl.{ApplicationProtocols, Engine}
import com.twitter.finagle.ssl.server.{
SslServerConfiguration,
SslServerEngineFactory,
SslServerSessionVerifier
}
import com.twitter.finagle.transport.Transport
import com.twitter.finagle.{Address, Stack}
import io.netty.channel.{Channel, ChannelInitializer, ChannelPipeline}
import io.netty.handler.ssl.SslHandler
import java.net.InetSocketAddress
/**
* A channel initializer that takes [[Stack.Params]] and upgrades the pipeline with missing
* SSL/TLS pieces required for server-side transport encryption.
*/
final private[finagle] class Netty4ServerSslChannelInitializer(params: Stack.Params)
extends ChannelInitializer[Channel] {
/**
* Read the configured `SslServerEngineFactory` out of the stack param.
* The default for servers is `JdkServerEngineFactory`. If it's configured
* to use the default, for Netty 4, we replace it with the [[Netty4ServerEngineFactory]]
* instead.
*/
private[this] def selectEngineFactory(ch: Channel): SslServerEngineFactory = {
val defaultEngineFactory = SslServerEngineFactory.Param.param.default.factory
val engineFactory = params[SslServerEngineFactory.Param].factory
if (engineFactory == defaultEngineFactory) Netty4ServerEngineFactory(ch.alloc())
else engineFactory
}
/**
* This method combines `ApplicationProtocols` that may have been set by the user
* with ones that are set based on using a protocol like HTTP/2.
*/
private[this] def combineApplicationProtocols(
config: SslServerConfiguration
): SslServerConfiguration = {
val protocols = params[Alpn].protocols
config.copy(
applicationProtocols = ApplicationProtocols.combine(protocols, config.applicationProtocols)
)
}
private[this] def createSslHandler(engine: Engine): SslHandler = {
// Rip the `SSLEngine` out of the wrapper `Engine` and use it to
// create an `SslHandler`.
val statsReceiver = params[Stats].statsReceiver.scope("tls")
new Netty4SslHandler(engine, statsReceiver)
}
private[this] def createSslConnectHandler(
sslHandler: SslHandler,
remoteAddress: Address,
config: SslServerConfiguration
): SslServerVerificationHandler = {
val sessionVerifier = params[SslServerSessionVerifier.Param].verifier
new SslServerVerificationHandler(sslHandler, remoteAddress, config, sessionVerifier)
}
private[this] def addHandlersToPipeline(
pipeline: ChannelPipeline,
sslHandler: SslHandler,
sslConnectHandler: SslServerVerificationHandler
): Unit = {
pipeline.addFirst("sslConnect", sslConnectHandler)
pipeline.addFirst("ssl", sslHandler)
}
/**
* In this method, an `Engine` is created by an `SslServerEngineFactory` via
* an `SslServerConfiguration`. The `Engine` is then used to create the appropriate
* Netty handler, and it is subsequently added to the channel pipeline.
*/
def initChannel(ch: Channel): Unit = {
val remoteAddress: Address =
// guard against disconnected sessions and test environments with embedded channels
if (ch.remoteAddress == null || !ch.remoteAddress.isInstanceOf[InetSocketAddress])
Address.failing
else Address(ch.remoteAddress.asInstanceOf[InetSocketAddress])
val Transport.ServerSsl(configuration) = params[Transport.ServerSsl]
for (config <- configuration) {
val factory: SslServerEngineFactory = selectEngineFactory(ch)
val combined: SslServerConfiguration = combineApplicationProtocols(config)
val engine: Engine = factory(combined)
val sslHandler: SslHandler = createSslHandler(engine)
val sslConnectHandler: SslServerVerificationHandler =
createSslConnectHandler(sslHandler, remoteAddress, combined)
addHandlersToPipeline(ch.pipeline, sslHandler, sslConnectHandler)
}
}
}
| luciferous/finagle | finagle-netty4/src/main/scala/com/twitter/finagle/netty4/ssl/server/Netty4ServerSslChannelInitializer.scala | Scala | apache-2.0 | 3,998 |
/**
* Copyright (c) 2017-2018 BusyMachines
*
* See company homepage at: https://www.busymachines.com/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package busymachines.core
import busymachines.core.Anomaly.Parameters
/**
*
* @author Lorand Szakacs, [email protected], [email protected]
* @since 26 Dec 2017
*
*/
trait NotFoundAnomaly extends Anomaly with MeaningfulAnomalies.NotFound with Product with Serializable
object NotFoundAnomaly extends AnomalyConstructors[NotFoundAnomaly] {
override def apply(id: AnomalyID): NotFoundAnomaly = NotFoundAnomalyImpl(id = id)
override def apply(message: String): NotFoundAnomaly = NotFoundAnomalyImpl(message = message)
override def apply(parameters: Parameters): NotFoundAnomaly = NotFoundAnomalyImpl(parameters = parameters)
override def apply(id: AnomalyID, message: String): NotFoundAnomaly =
NotFoundAnomalyImpl(id = id, message = message)
override def apply(id: AnomalyID, parameters: Parameters): NotFoundAnomaly =
NotFoundAnomalyImpl(id = id, parameters = parameters)
override def apply(message: String, parameters: Parameters): NotFoundAnomaly =
NotFoundAnomalyImpl(message = message, parameters = parameters)
override def apply(id: AnomalyID, message: String, parameters: Parameters): NotFoundAnomaly =
NotFoundAnomalyImpl(id = id, message = message, parameters = parameters)
override def apply(a: Anomaly): NotFoundAnomaly =
NotFoundAnomalyImpl(id = a.id, message = a.message, parameters = a.parameters)
}
final private[core] case class NotFoundAnomalyImpl(
override val id: AnomalyID = NotFoundAnomalyID,
override val message: String = MeaningfulAnomalies.NotFoundMsg,
override val parameters: Parameters = Parameters.empty,
) extends NotFoundAnomaly with Product with Serializable {
override def asThrowable: Throwable = NotFoundFailureImpl(id, message, parameters)
}
//=============================================================================
//=============================================================================
//=============================================================================
abstract class NotFoundFailure(
override val message: String,
causedBy: Option[Throwable] = None,
) extends AnomalousFailure(message, causedBy) with NotFoundAnomaly with Product with Serializable {
override def id: AnomalyID = NotFoundAnomalyID
}
object NotFoundFailure
extends NotFoundFailure(MeaningfulAnomalies.NotFoundMsg, None) with SingletonAnomalyProduct
with FailureConstructors[NotFoundFailure] {
override def apply(causedBy: Throwable): NotFoundFailure =
NotFoundFailureImpl(message = causedBy.getMessage, causedBy = Option(causedBy))
override def apply(id: AnomalyID, message: String, causedBy: Throwable): NotFoundFailure =
NotFoundFailureImpl(id = id, message = message, causedBy = Option(causedBy))
override def apply(id: AnomalyID, parameters: Parameters, causedBy: Throwable): NotFoundFailure =
NotFoundFailureImpl(id = id, parameters = parameters, causedBy = Option(causedBy))
override def apply(message: String, parameters: Parameters, causedBy: Throwable): NotFoundFailure =
NotFoundFailureImpl(message = message, parameters = parameters, causedBy = Option(causedBy))
override def apply(id: AnomalyID, message: String, parameters: Parameters, causedBy: Throwable): NotFoundFailure =
NotFoundFailureImpl(id = id, message = message, parameters = parameters, causedBy = Option(causedBy))
override def apply(a: Anomaly, causedBy: Throwable): NotFoundFailure =
NotFoundFailureImpl(id = a.id, message = a.message, parameters = a.parameters, causedBy = Option(causedBy))
override def apply(id: AnomalyID): NotFoundFailure =
NotFoundFailureImpl(id = id)
override def apply(message: String): NotFoundFailure =
NotFoundFailureImpl(message = message)
override def apply(parameters: Parameters): NotFoundFailure =
NotFoundFailureImpl(parameters = parameters)
override def apply(id: AnomalyID, message: String): NotFoundFailure =
NotFoundFailureImpl(id = id, message = message)
override def apply(id: AnomalyID, parameters: Parameters): NotFoundFailure =
NotFoundFailureImpl(id = id, parameters = parameters)
override def apply(message: String, parameters: Parameters): NotFoundFailure =
NotFoundFailureImpl(message = message, parameters = parameters)
override def apply(id: AnomalyID, message: String, parameters: Parameters): NotFoundFailure =
NotFoundFailureImpl(id = id, message = message, parameters = parameters)
//we intentionally not pass a causedBy a.asThrowable. Not really meaningful in this case
override def apply(a: Anomaly): NotFoundFailure =
NotFoundFailureImpl(id = a.id, message = a.message, parameters = a.parameters)
override def apply(message: String, causedBy: Throwable): NotFoundFailure =
NotFoundFailureImpl(message = message, causedBy = Option(causedBy))
}
final private[core] case class NotFoundFailureImpl(
override val id: AnomalyID = NotFoundAnomalyID,
override val message: String = MeaningfulAnomalies.NotFoundMsg,
override val parameters: Parameters = Parameters.empty,
causedBy: Option[Throwable] = None,
) extends NotFoundFailure(message, causedBy) with Product with Serializable
| busymachines/busymachines-commons | core/src/main/scala/busymachines/core/notFoundAnomaly.scala | Scala | apache-2.0 | 5,915 |
/*
* filter: inliner warnings; re-run with
*/
import scala.tools.nsc._
import scala.tools.partest.CompilerTest
import scala.collection.{ mutable, immutable, generic }
object Test extends CompilerTest {
import global._
import rootMirror._
import definitions._
override def code = """
package extest {
trait Bippy[A <: AnyRef, B] { } // wildcards
trait BippyLike[A <: AnyRef, B <: List[A], This <: BippyLike[A, B, This] with Bippy[A, B]] // no wildcards
trait BippyBud[A <: AnyRef, B, C <: List[A]]
trait Cov01[+A <: AnyRef, +B] { }
trait Cov02[+A <: AnyRef, B] { }
trait Cov03[+A <: AnyRef, -B] { }
trait Cov04[ A <: AnyRef, +B] { }
trait Cov05[ A <: AnyRef, B] { }
trait Cov06[ A <: AnyRef, -B] { }
trait Cov07[-A <: AnyRef, +B] { }
trait Cov08[-A <: AnyRef, B] { }
trait Cov09[-A <: AnyRef, -B] { }
trait Cov11[+A <: AnyRef, +B <: List[_]] { }
trait Cov12[+A <: AnyRef, B <: List[_]] { }
trait Cov13[+A <: AnyRef, -B <: List[_]] { }
trait Cov14[ A <: AnyRef, +B <: List[_]] { }
trait Cov15[ A <: AnyRef, B <: List[_]] { }
trait Cov16[ A <: AnyRef, -B <: List[_]] { }
trait Cov17[-A <: AnyRef, +B <: List[_]] { }
trait Cov18[-A <: AnyRef, B <: List[_]] { }
trait Cov19[-A <: AnyRef, -B <: List[_]] { }
trait Cov21[+A, +B] { }
trait Cov22[+A, B] { }
trait Cov23[+A, -B] { }
trait Cov24[ A, +B] { }
trait Cov25[ A, B] { }
trait Cov26[ A, -B] { }
trait Cov27[-A, +B] { }
trait Cov28[-A, B] { }
trait Cov29[-A, -B] { }
trait Cov31[+A, +B, C <: ((A, B))] { }
trait Cov32[+A, B, C <: ((A, B))] { }
trait Cov33[+A, -B, C <: ((A, _))] { }
trait Cov34[ A, +B, C <: ((A, B))] { }
trait Cov35[ A, B, C <: ((A, B))] { }
trait Cov36[ A, -B, C <: ((A, _))] { }
trait Cov37[-A, +B, C <: ((_, B))] { }
trait Cov38[-A, B, C <: ((_, B))] { }
trait Cov39[-A, -B, C <: ((_, _))] { }
trait Cov41[+A >: Null, +B] { }
trait Cov42[+A >: Null, B] { }
trait Cov43[+A >: Null, -B] { }
trait Cov44[ A >: Null, +B] { }
trait Cov45[ A >: Null, B] { }
trait Cov46[ A >: Null, -B] { }
trait Cov47[-A >: Null, +B] { }
trait Cov48[-A >: Null, B] { }
trait Cov49[-A >: Null, -B] { }
trait Covariant[+A <: AnyRef, +B] { }
trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: CovariantLike[A, B, This] with Covariant[A, B]]
trait Contra[-A >: AnyRef, -B] { }
trait ContraLike[-A >: AnyRef, -B >: List[A]]
}
"""
override def check(source: String, unit: global.CompilationUnit) {
getPackage(TermName("extest")).moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz =>
exitingTyper {
clazz.info
println(clazz.defString)
println(" " + classExistentialType(clazz) + "\\n")
}
}
}
}
| felixmulder/scala | test/files/run/existentials-in-compiler.scala | Scala | bsd-3-clause | 2,791 |
package net.fluxo.plugins.kas
import net.xeoh.plugins.base.Plugin
import org.apache.log4j.Level
/**
* @author Ronald Kurniawan (viper)
* @version 0.1, /01/15
*/
trait TrKas extends Plugin {
def primaryCommand(): String
def setMailLoggerName(name: String)
def process(fullCommand: Array[String]): String
def writeToLog(entry: String, logLevel: Level)
}
| fluxodesign/DDModules | PluginKickassTo/src/main/scala/net/fluxo/plugins/kas/TrKas.scala | Scala | lgpl-3.0 | 362 |
/*******************************************************************************
* Copyright 2010 Maxime Lévesque
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************** */
package org.squeryl.adapters
import java.sql.SQLException
import org.squeryl.internals.{StatementWriter, FieldMetaData, DatabaseAdapter}
import org.squeryl.dsl.ast._
import org.squeryl.{Schema}
class MSSQLServer extends DatabaseAdapter {
override def isFullOuterJoinSupported = false
override def intTypeDeclaration = "int"
override def stringTypeDeclaration = "varchar"
override def stringTypeDeclaration(length:Int) = "varchar("+length+")"
override def booleanTypeDeclaration = "bit"
override def doubleTypeDeclaration = "float"
override def longTypeDeclaration = "bigint"
override def bigDecimalTypeDeclaration = "decimal"
override def bigDecimalTypeDeclaration(precision:Int, scale:Int) = "numeric(" + precision + "," + scale + ")"
override def binaryTypeDeclaration = "varbinary(8000)"
override def dateTypeDeclaration = "date"
override def floatTypeDeclaration = "real"
override def timestampTypeDeclaration = "datetime"
override def supportsUnionQueryOptions = false
override def writeColumnDeclaration(fmd: FieldMetaData, isPrimaryKey: Boolean, schema: Schema): String = {
var res = " " + quoteIdentifier(fmd.columnName) + " " + databaseTypeFor(fmd)
if(!fmd.isOption)
res += " not null"
if(isPrimaryKey)
res += " primary key"
if(supportsAutoIncrementInColumnDeclaration && fmd.isAutoIncremented)
res += " IDENTITY(1,1)"
res
}
override def isTableDoesNotExistException(e: SQLException): Boolean =
e.getErrorCode == 3701
override def writeEndOfQueryHint(isForUpdate: () => Boolean, qen: QueryExpressionElements, sw: StatementWriter) = {}
override def writeEndOfFromHint(qen: QueryExpressionElements, sw: StatementWriter) =
if(qen.isForUpdate) {
sw.write("with(updlock, rowlock)")
sw.pushPendingNextLine
}
override def writeConcatFunctionCall(fn: FunctionNode, sw: StatementWriter) =
sw.writeNodesWithSeparator(fn.args, " + ", false)
override def writeConcatOperator(left: ExpressionNode, right: ExpressionNode, sw: StatementWriter) = {
val binaryOpNode = new BinaryOperatorNode(left, right, "+")
binaryOpNode.doWrite(sw)
}
override def writeRegexExpression(left: ExpressionNode, pattern: String, sw: StatementWriter) = {
// If you are keen enough you can implement a UDF and subclass this method to call out to it.
// http://msdn.microsoft.com/en-us/magazine/cc163473.aspx
throw new UnsupportedOperationException("MSSQL does not yet support a regex function")
}
//SELECT TOP <pageSize> CustomerID,CompanyName,ContactName,ContactTitle
// FROM
// (SELECT TOP <currentPageNumber * pageSize>
// CustomerID,CompanyName,ContactName,ContactTitle
// FROM
// Customers AS T1 ORDER BY ContactName DESC)
// AS T2 ORDER BY ContactName ASC
// override def writeQuery(qen: QueryExpressionElements, sw: StatementWriter) =
// if(qen.page == None)
// super.writeQuery(qen, sw)
// else {
// val page = qen.page.get
// val beginOffset = page._1
// val pageSize = page._2
// //val endOffset = pageSize + beginOffset
// val sl = qen.selectList.filter(e => ! e.inhibited)
// val ob =
// if(! qen.orderByClause.isEmpty && qen.parent == None )
// qen.orderByClause.filter(e => ! e.inhibited)
// else
// Nil
//
// val obInverse = ob.map(_.asInstanceOf[OrderByExpression].inverse)
//
// sw.write("select * from (")
// sw.nextLine
// sw.write("select TOP " + pageSize + " * ")
// sw.nextLine
// sw.write("from (")
// sw.nextLine
// sw.writeIndented {
// super.writeQuery(qen, sw, false, Some(" TOP " + (beginOffset + pageSize) + " "))
// }
// sw.write(") _____z1_____")
// if(ob != Nil) {
// sw.nextLine
// sw.write(" order by ")
// sw.write(ob.map(_.asInstanceOf[OrderByExpression].inverse).map(_.writeToString.replace('.','_')).mkString(","))
// }
// sw.write(") _____z2_____")
// if(ob != Nil) {
// sw.nextLine
// sw.write(" order by ")
// sw.write(ob.map(_.writeToString.replace('.','_')).mkString(","))
// }
//
// println(sw.statement)
// }
override def writeQuery(qen: QueryExpressionElements, sw: StatementWriter) =
if(qen.page == None)
super.writeQuery(qen, sw)
else {
val page = qen.page.get
val beginOffset = page._1
val pageSize = page._2
sw.writeIndented {
super.writeQuery(qen, sw, false, Some(" TOP " + (beginOffset + pageSize) + " "))
}
}
private def _stripPrefix(selectE: String):String = {
val i = selectE.lastIndexOf(" as ")
selectE.substring(i + 4, selectE.length)
}
override def writePaginatedQueryDeclaration(page: () => Option[(Int, Int)], qen: QueryExpressionElements, sw: StatementWriter) = {}
override def quoteIdentifier(s: String) = "[" + s + "]"
}
| wukaikailive/squeryl | src/main/scala/org/squeryl/adapters/MSSQLServer.scala | Scala | apache-2.0 | 5,824 |
// Solution-6.scala
// Solution to Exercise 6 in "Comprehensions"
import com.atomicscala.AtomicTest._
case class Activity(date:String, action:String)
def getActivities(date:String, activities:Vector[Activity]):Vector[String] = {
for {
a <- activities
if a.date equals(date)
}
yield a.action
}
val activities = Vector(
Activity("01-01", "Run"),
Activity("01-03", "Ski"),
Activity("01-04", "Run"),
Activity("01-10", "Ski"),
Activity("01-03", "Run"))
getActivities("01-01", activities) is Vector("Run")
getActivities("01-02", activities) is Vector()
getActivities("01-03", activities) is Vector("Ski", "Run")
getActivities("01-04", activities) is Vector("Run")
getActivities("01-10", activities) is Vector("Ski")
/* OUTPUT_SHOULD_BE
Vector(Run)
Vector()
Vector(Ski, Run)
Vector(Run)
Vector(Ski)
*/
| P7h/ScalaPlayground | Atomic Scala/atomic-scala-solutions/33_Comprehensions/Solution-6.scala | Scala | apache-2.0 | 824 |
package fpgatidbits.dma
import Chisel._
import fpgatidbits.ocm._
import fpgatidbits.streams._
class ReadOrderCacheParams (
val mrp: MemReqParams,
val maxBurst: Int, // largest burst size (in beats) to handle
val outstandingReqs: Int, // max # of simultaneous outstanding requests
val chanIDBase: Int // base channel id value for output mem reqs
)
class ReadOrderCacheIO(p: MemReqParams, maxBurst: Int) extends Bundle {
// interface towards in-order processing elements
val reqOrdered = Decoupled(new GenericMemoryRequest(p)).flip
val rspOrdered = Decoupled(new GenericMemoryResponse(p))
// unordered interface towards out-of-order memory system
val reqMem = Decoupled(new GenericMemoryRequest(p))
val rspMem = Decoupled(new GenericMemoryResponse(p)).flip
// controls for ID queue reinit
val doInit = Bool(INPUT) // re-initialize queue
val initCount = UInt(INPUT, width = 8) // # IDs to initializes
}
class ReadOrderCache(p: ReadOrderCacheParams) extends Module {
val io = new ReadOrderCacheIO(p.mrp, p.maxBurst)
val beat = UInt(0, width = p.mrp.dataWidth)
val rid = UInt(0, width = p.mrp.idWidth)
val mreq = new GenericMemoryRequest(p.mrp)
val mrsp = new GenericMemoryResponse(p.mrp)
// queue with pool of available request IDs
val freeReqID = Module(new ReqIDQueue(
p.mrp.idWidth, p.outstandingReqs, p.chanIDBase)).io
freeReqID.doInit := io.doInit
freeReqID.initCount := io.initCount
// queue with issued requests
val busyReqs = Module(new FPGAQueue(mreq, p.outstandingReqs)).io
// multichannel queue for buffering received read data
val storage = Module(new MultiChanQueueSimple(
gen = mrsp, chans = p.outstandingReqs, elemsPerChan = p.maxBurst,
getChan = {x: GenericMemoryResponse => x.channelID - UInt(p.chanIDBase)}
)).io
// issue new requests: sync freeReqID and incoming reqs
val readyReqs = StreamJoin(
inA = freeReqID.idOut, inB = io.reqOrdered, genO = mreq,
join = {(freeID: UInt, r: GenericMemoryRequest) => GenericMemoryRequest(
p = p.mrp, addr = r.addr, write = Bool(false), id = freeID,
numBytes = r.numBytes
)}
)
// save original request ID upon entry
// TODO should replace this with Cloakroom structure
val origReqID = Mem(mreq.channelID.cloneType, p.outstandingReqs)
when(readyReqs.ready & readyReqs.valid) {
origReqID(freeReqID.idOut.bits) := io.reqOrdered.bits.channelID
}
//StreamMonitor(readyReqs, Bool(true), "readyReqs")
// issued requests go to both mem req channel and busyReqs queue
val reqIssueFork = Module(new StreamFork(
genIn = mreq, genA = mreq, genB = mreq,
forkA = {x: GenericMemoryRequest => x},
forkB = {x: GenericMemoryRequest => x}
)).io
readyReqs <> reqIssueFork.in
reqIssueFork.outA <> io.reqMem
reqIssueFork.outB <> busyReqs.enq
// buffer incoming responses in the multichannel queue
io.rspMem <> storage.in
// ordered response data comes from the appropriate storage queue
io.rspOrdered.bits.readData := storage.out.bits.readData
io.rspOrdered.bits.isWrite := Bool(false)
io.rspOrdered.bits.metaData := UInt(0)
// create a "repeated" version of the head of the busy queue -- each repeat
// corresponds to one burst beat
val repBitWidth = 1 + log2Up(p.maxBurst)
val busyRep = Module(new StreamRepeatElem(mreq.getWidth(), repBitWidth)).io
val bytesInBeat = UInt(p.mrp.dataWidth/8) // TODO correct for sub-word reads?
busyRep.inElem.valid := busyReqs.deq.valid
busyRep.inRepCnt.valid := busyReqs.deq.valid
busyRep.inElem.bits := busyReqs.deq.bits.toBits
busyRep.inRepCnt.bits := busyReqs.deq.bits.numBytes / bytesInBeat
busyReqs.deq.ready := busyRep.inElem.ready
val busyRepHead = mreq.fromBits(busyRep.out.bits)
storage.outSel := busyRepHead.channelID - UInt(p.chanIDBase)
// join the storage.out and busyRep.out streams
io.rspOrdered.valid := storage.out.valid & busyRep.out.valid
storage.out.ready := io.rspOrdered.ready & busyRep.out.valid
busyRep.out.ready := io.rspOrdered.ready & storage.out.valid
// the head-of-line ID will be recycled when we are done with it
freeReqID.idIn.valid := Bool(false)
freeReqID.idIn.bits := busyRepHead.channelID
// restore the original request's channel ID with lookup
io.rspOrdered.bits.channelID := origReqID(busyRepHead.channelID)
val regBeatCounter = Reg(init = UInt(0, repBitWidth))
when(busyRep.out.valid & busyRep.out.ready) {
regBeatCounter := regBeatCounter + UInt(1)
when(regBeatCounter === (busyRepHead.numBytes / bytesInBeat) - UInt(1)) {
regBeatCounter := UInt(0)
freeReqID.idIn.valid := Bool(true) // always room in the ID pool
}
}
}
// a queue for storing the available request IDs, plus a little initializer
// to initially fill it with the range of available IDs --
// essentially the "pool of available request IDs". supports auto on-reset
// intialization (just fills with max # IDs) as well as manual re-init
// to limit the # of IDs in the pool further (requester becomes less aggressive)
// NOTE: make sure all IDs have been returned to the pool before doing
// manual re-initialization, weird things will happen otherwise
class ReqIDQueue(idWidth: Int, maxEntries: Int, startID: Int) extends Module {
val idElem = UInt(width = idWidth)
val io = new Bundle {
val doInit = Bool(INPUT) // re-initialize queue
val initCount = UInt(INPUT, width = 8) // # IDs to initializes
val idIn = Decoupled(idElem).flip // recycled IDs into the pool
val idOut = Decoupled(idElem) // available IDs from the pool
}
val initGen = Module(new SequenceGenerator(idWidth)).io
// initialize contents once upon reset, and when requested afterwards
val regFirstInit = Reg(init = Bool(true)) // distinguish reset & manual init
val regDoInit = Reg(init = Bool(true))
when(io.doInit) { regDoInit := Bool(true) } // trigger manual reinit
when(regDoInit & initGen.finished) {
regDoInit := Bool(false)
regFirstInit := Bool(false)
}
// clear queue contents prior to manual reinit
val resetQueue = reset | (io.doInit & !regDoInit)
val idQ = Module(new Queue(idElem, maxEntries, _reset=resetQueue)).io
idQ.deq <> io.idOut
initGen.start := regDoInit
// on-reset init fills the queue with max # elements
initGen.count := Mux(regFirstInit, UInt(maxEntries), io.initCount)
initGen.step := UInt(1)
initGen.init := UInt(startID)
val idSources = Seq(io.idIn, initGen.seq)
DecoupledInputMux(regDoInit, idSources) <> idQ.enq
}
// BRAM-based reqID queue, suitable for larger ID pools. does not support
// reinitialization with a smaller pool of elements
class ReqIDQueueBRAM(idWidth: Int, maxEntries: Int, startID: Int) extends Module {
val idElem = UInt(width = idWidth)
val io = new Bundle {
val idIn = Decoupled(idElem).flip // recycled IDs into the pool
val idOut = Decoupled(idElem) // available IDs from the pool
}
val initGen = Module(new SequenceGenerator(idWidth)).io
// initialize contents once upon reset, and when requested afterwards
val regDoInit = Reg(init = Bool(true))
when(regDoInit & initGen.finished) {
regDoInit := Bool(false)
}
val idQ = Module(new BRAMQueue(idElem, maxEntries)).io
idQ.deq <> io.idOut
initGen.start := regDoInit
// on-reset init fills the queue with max # elements
initGen.count := UInt(maxEntries)
initGen.step := UInt(1)
initGen.init := UInt(startID)
val idSources = Seq(io.idIn, initGen.seq)
DecoupledInputMux(regDoInit, idSources) <> idQ.enq
}
| maltanar/fpga-tidbits | src/main/scala/fpgatidbits/dma/ReadOrderCache.scala | Scala | bsd-2-clause | 7,583 |
package com.github.raphaelpanta
object PrintFizzBuzzList extends App {
override def main(args: Array[String]): Unit = {
val numbers = List range(1, 101)
val fizzBuzz = new FizzBuzz
val result = fizzBuzz fizzBuzzList numbers
result.foreach { number => println(number) }
}
} | raphaelpanta/FizzBuzzScala | src/com/github/raphaelpanta/PrintFizzBuzzList.scala | Scala | mit | 305 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package operator.user
package join
import org.objectweb.asm.Type
import org.objectweb.asm.signature.SignatureVisitor
import com.asakusafw.lang.compiler.model.graph.UserOperator
import com.asakusafw.spark.compiler.spi.{ OperatorCompiler, OperatorType }
import com.asakusafw.spark.runtime.fragment.user.join.ShuffledMasterCheckOperatorFragment
import com.asakusafw.spark.tools.asm._
import com.asakusafw.vocabulary.operator.{ MasterCheck => MasterCheckOp }
class ShuffledMasterCheckOperatorCompiler extends UserOperatorCompiler {
override def support(
operator: UserOperator)(
implicit context: OperatorCompiler.Context): Boolean = {
operator.annotationDesc.resolveClass == classOf[MasterCheckOp]
}
override def operatorType: OperatorType = OperatorType.CoGroupType
override def compile(
operator: UserOperator)(
implicit context: OperatorCompiler.Context): Type = {
assert(support(operator),
s"The operator type is not supported: ${operator.annotationDesc.resolveClass.getSimpleName}"
+ s" [${operator}]")
assert(operator.inputs.size >= 2,
"The size of inputs should be greater than or equals to 2: " +
s"${operator.inputs.size} [${operator}]")
assert(
operator.outputs.forall(output =>
output.dataModelType == operator.inputs(MasterCheckOp.ID_INPUT_TRANSACTION).dataModelType),
s"All of output types should be the same as the transaction type: ${
operator.outputs.map(_.dataModelType).mkString("(", ",", ")")
} [${operator}]")
val builder = new ShuffledMasterCheckOperatorFragmentClassBuilder(operator)
context.addClass(builder)
}
}
private class ShuffledMasterCheckOperatorFragmentClassBuilder(
operator: UserOperator)(
implicit context: OperatorCompiler.Context)
extends JoinOperatorFragmentClassBuilder(
classOf[IndexedSeq[Iterator[_]]].asType,
operator,
operator.inputs(MasterCheckOp.ID_INPUT_MASTER),
operator.inputs(MasterCheckOp.ID_INPUT_TRANSACTION))(
Option(
new ClassSignatureBuilder()
.newSuperclass {
_.newClassType(classOf[ShuffledMasterCheckOperatorFragment[_, _]].asType) {
_.newTypeArgument(
SignatureVisitor.INSTANCEOF,
operator.inputs(MasterCheckOp.ID_INPUT_MASTER).dataModelType)
.newTypeArgument(
SignatureVisitor.INSTANCEOF,
operator.inputs(MasterCheckOp.ID_INPUT_TRANSACTION).dataModelType)
}
}),
classOf[ShuffledMasterCheckOperatorFragment[_, _]].asType)
with ShuffledJoin
with MasterCheck {
override def defCtor()(implicit mb: MethodBuilder): Unit = {
val thisVar :: broadcastsVar :: fragmentVars = mb.argVars
thisVar.push().invokeInit(
superType,
fragmentVars(MasterCheckOp.ID_OUTPUT_MISSED).push(),
fragmentVars(MasterCheckOp.ID_OUTPUT_FOUND).push())
}
}
| ueshin/asakusafw-spark | compiler/src/main/scala/com/asakusafw/spark/compiler/operator/user/join/ShuffledMasterCheckOperatorCompiler.scala | Scala | apache-2.0 | 3,553 |
/*
* Copyright 2013 - 2020 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.phantom.streams.suites.iteratee
import com.outworkers.phantom.dsl._
import com.outworkers.phantom.streams._
import com.outworkers.phantom.tables.{JodaRow, TestDatabase}
import com.outworkers.util.samplers._
import org.joda.time.{DateTime, DateTimeZone}
import org.scalameter.api.{Gen => MeterGen, gen => _, _}
import org.scalatest.time.SpanSugar._
import scala.concurrent.{Await, Future}
class IterateeBenchmarkPerformanceTest extends Bench.LocalTime with TestDatabase.connector.Connector {
TestDatabase.primitivesJoda.createSchema()
implicit object JodaTimeSampler extends Sample[DateTime] {
override def sample: DateTime = DateTime.now(DateTimeZone.UTC)
}
val limit = 50
val sampleGenLimit = 30
val fs = for {
step <- 1 to 3
rows = Iterator.fill(limit)(gen[JodaRow])
batch = rows.foldLeft(Batch.unlogged)((b, row) => {
b.add(TestDatabase.primitivesJoda.store(row))
})
w = batch.future()
f = w map (_ => println(s"step $step was completed successfully"))
r = Await.result(f, 20 seconds)
} yield f map (_ => r)
Await.ready(Future.sequence(fs), 20 seconds)
val sizes: MeterGen[Int] = MeterGen.range("size")(limit, sampleGenLimit, limit)
performance of "Enumerator" in {
measure method "enumerator" in {
using(sizes) in {
size => Await.ready(TestDatabase.primitivesJoda.select.limit(size).fetchEnumerator run Iteratee.forEach { r => }, 10 seconds)
}
}
}
}
| outworkers/phantom | phantom-streams/src/test/scala/com/outworkers/phantom/streams/suites/iteratee/IterateeBenchmarkPerformanceTest.scala | Scala | apache-2.0 | 2,082 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package docs.home.scaladsl.serialization.v1
import com.lightbend.lagom.scaladsl.playjson.JsonMigration
import com.lightbend.lagom.scaladsl.playjson.JsonMigrations
import com.lightbend.lagom.scaladsl.playjson.JsonSerializerRegistry
import com.lightbend.lagom.scaladsl.playjson.JsonSerializer
import scala.collection.immutable.Seq
//#rename-class
case class OrderAdded(shoppingCartId: String)
//#rename-class
| rcavalcanti/lagom | docs/manual/scala/guide/cluster/code/docs/home/scaladsl/serialization/v1/OrderAdded.scala | Scala | apache-2.0 | 488 |
package com.github.pheymann.rrt.io
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.Uri.Query
import akka.http.scaladsl.model.headers.RawHeader
import akka.http.scaladsl.model._
import akka.stream.Materializer
import com.github.pheymann.rrt.{RequestData, ServiceConfig, TestConfig}
import scala.concurrent.Future
object RestService {
private final val HttpPrefix = "http://"
private[io] def createHeaders(headers: List[(String, String)]): List[HttpHeader] = {
headers.map {case (key, value) => RawHeader(key, value)}
}
private[io] def buildUriStr(host: String, port: Int, uri: String): String = {
val tmpUri = s"$host:$port$uri"
if (host.contains(HttpPrefix))
tmpUri
else
HttpPrefix + tmpUri
}
private def requestService(method: HttpMethod,
data: RequestData,
headers: List[(String, String)],
config: ServiceConfig)
(implicit system: ActorSystem, materializer: Materializer): Future[HttpResponse] = {
import config._
import data._
val completeUri = buildUriStr(host, port, uri)
Http().singleRequest(HttpRequest(
method,
Uri(completeUri).withQuery(Query(params)),
createHeaders(headers),
entity = bodyOpt.fold(HttpEntity.Empty)(body => HttpEntity(data.contentTypeOpt.getOrElse(ContentTypes.`text/plain(UTF-8)`), body))
))
}
def requestFromActual(method: HttpMethod,
data: RequestData,
config: TestConfig)
(implicit system: ActorSystem, materializer: Materializer): Future[HttpResponse] = {
requestService(method, data, config.headers, config.actual)
}
def requestFromExpected(method: HttpMethod,
data: RequestData,
config: TestConfig)
(implicit system: ActorSystem, materializer: Materializer): Future[HttpResponse] = {
requestService(method, data, config.headers, config.expected)
}
}
| pheymann/rest-refactoring-test | core/src/main/scala/com/github/pheymann/rrt/io/RestService.scala | Scala | mit | 2,107 |
package fpinscala.errorhandling
import scala.{
Option => _,
Some => _,
Either => _,
_
} // hide std library `Option`, `Some` and `Either`, since we are writing our own in this chapter
sealed trait Option[+A] {
def map[B](f: A => B): Option[B] = ???
def getOrElse[B >: A](default: => B): B = ???
def flatMap[B](f: A => Option[B]): Option[B] = ???
def orElse[B >: A](ob: => Option[B]): Option[B] = ???
def filter(f: A => Boolean): Option[A] = ???
}
case class Some[+A](get: A) extends Option[A]
case object None extends Option[Nothing]
object Option {
def failingFn(i: Int): Int = {
val y: Int = throw new Exception("fail!") // `val y: Int = ...` declares `y` as having type `Int`, and sets it equal to the right hand side of the `=`.
try {
val x = 42 + 5
x + y
} catch { case e: Exception => 43 } // A `catch` block is just a pattern matching block like the ones we've seen. `case e: Exception` is a pattern that matches any `Exception`, and it binds this value to the identifier `e`. The match returns the value 43.
}
def failingFn2(i: Int): Int = {
try {
val x = 42 + 5
x + ((throw new Exception("fail!")): Int) // A thrown Exception can be given any type; here we're annotating it with the type `Int`
} catch { case e: Exception => 43 }
}
def mean(xs: Seq[Double]): Option[Double] =
if (xs.isEmpty) None
else Some(xs.sum / xs.length)
def variance(xs: Seq[Double]): Option[Double] = ???
def map2[A, B, C](a: Option[A], b: Option[B])(f: (A, B) => C): Option[C] = ???
def sequence[A](a: List[Option[A]]): Option[List[A]] = ???
def traverse[A, B](a: List[A])(f: A => Option[B]): Option[List[B]] = ???
}
| antonioj-mattos/fpinscala | exercises/src/main/scala/fpinscala/errorhandling/Option.scala | Scala | mit | 1,702 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.benchmark
import java.io.{File, FileOutputStream, OutputStream}
/**
* A base class for generate benchmark results to a file.
* For JDK9+, JDK major version number is added to the file names to distinguish the results.
*/
abstract class BenchmarkBase {
var output: Option[OutputStream] = None
/**
* Main process of the whole benchmark.
* Implementations of this method are supposed to use the wrapper method `runBenchmark`
* for each benchmark scenario.
*/
def runBenchmarkSuite(mainArgs: Array[String]): Unit
final def runBenchmark(benchmarkName: String)(func: => Any): Unit = {
val separator = "=" * 96
val testHeader = (separator + '\\n' + benchmarkName + '\\n' + separator + '\\n' + '\\n').getBytes
output.foreach(_.write(testHeader))
func
output.foreach(_.write('\\n'))
}
def main(args: Array[String]): Unit = {
val regenerateBenchmarkFiles: Boolean = System.getenv("SPARK_GENERATE_BENCHMARK_FILES") == "1"
if (regenerateBenchmarkFiles) {
val version = System.getProperty("java.version").split("\\\\D+")(0).toInt
val jdkString = if (version > 8) s"-jdk$version" else ""
val resultFileName =
s"${this.getClass.getSimpleName.replace("$", "")}$jdkString$suffix-results.txt"
val file = new File(s"benchmarks/$resultFileName")
if (!file.exists()) {
file.createNewFile()
}
output = Some(new FileOutputStream(file))
}
runBenchmarkSuite(args)
output.foreach { o =>
if (o != null) {
o.close()
}
}
afterAll()
}
def suffix: String = ""
/**
* Any shutdown code to ensure a clean shutdown
*/
def afterAll(): Unit = {}
}
| witgo/spark | core/src/test/scala/org/apache/spark/benchmark/BenchmarkBase.scala | Scala | apache-2.0 | 2,508 |
package universe
import rescala.reactives.Signals
import universe.Globals.engine._
import scala.util.Random
/**
* A World object unites a space (Board object), time (Time object), and a random object
* It also defines all repetitive actions, such as spawning new Animals and Plants
*/
class World(val width: Int = 100, val height: Int = 100) {
implicit val world = this
val board = new Board(width, height)
val time = new Time
val randomness = new Random(1)
val statusString: Signal[String] = Signals.lift(board.animalsAlive, board.animalsBorn) { (a, b) =>
s"Animals alive: $a Total born: $b"
}
var updates: List[() => Unit] = Nil
def status = statusString.now
def tick() = {
time.tick(())
board.removeDead()
val pc = board.elements.par
pc.tasksupport = Globals.taskSupport
pc.foreach { case (pos, be) => be.doStep(pos) }
}
/** batch spawns n Animals and m Plants */
def batchSpawn(nAnimals: Int, mPlants: Int): Unit = {
for (_ <- 1 to nAnimals) spawn(newAnimal)
for (_ <- 1 to mPlants) spawn(new Plant)
}
/** returns an animal at random */
def newAnimal: Animal = newAnimal(randomness.nextBoolean(), randomness.nextBoolean())
def newAnimal(isHerbivore: Boolean, isMale: Boolean): Animal = {
if (isHerbivore) {
if (isMale) new MaleHerbivore else new FemaleHerbivore
}
else {
if (isMale) new MaleCarnivore else new FemaleCarnivore
}
}
/** spawns the given Board element at a free random position in the world */
def spawn(element: BoardElement): Unit = {
spawn(element, board.randomFreePosition(randomness))
}
// each day, spawn a new plant
time.day.changed += { _ => //#HDL //#IF
plan(this spawn new Plant)
}
//each week, spawn a new animal
time.week.changed += { _ => //#HDL //#IF
plan(this spawn newAnimal)
}
/** spawns the given board element at the given position */
def spawn(element: BoardElement, pos: Pos) = board.add(element, pos)
def plan(f: => Unit) = synchronized(updates ::= f _)
def runPlan() = {
val pc = updates.par
pc.tasksupport = Globals.taskSupport
pc.foreach { _.apply() }
updates = Nil
}
}
| volkc/REScala | Examples/Universe/src/main/scala/universe/World.scala | Scala | apache-2.0 | 2,179 |
/***********************************************************************
* Copyright (c) 2013-2016 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
*************************************************************************/
package org.locationtech.geomesa.kafka08
import java.util.Properties
import kafka.utils.TestUtils
import org.locationtech.geomesa.kafka.TestKafkaUtils
class TestKafkaUtils08 extends TestKafkaUtils {
override def createBrokerConfig(nodeId: Int, zkConnect: String): Properties = TestUtils.createBrokerConfig(nodeId)
override def choosePort: Int = TestUtils.choosePort()
}
| mdzimmerman/geomesa | geomesa-kafka/geomesa-kafka-08-utils/src/test/scala/org/locationtech/geomesa/kafka08/TestKafkaUtils08.scala | Scala | apache-2.0 | 839 |
/*
*
* /\\\\\\\\\\
* /\\\\\\///\\\\\\
* /\\\\\\/ \\///\\\\\\ /\\\\\\\\\\\\\\\\\\ /\\\\\\ /\\\\\\
* /\\\\\\ \\//\\\\\\ /\\\\\\/////\\\\\\ /\\\\\\\\\\\\\\\\\\\\\\ \\/// /\\\\\\\\\\ /\\\\\\\\\\ /\\\\\\ /\\\\\\ /\\\\\\\\\\\\\\\\\\\\
* \\/\\\\\\ \\/\\\\\\ \\/\\\\\\\\\\\\\\\\\\\\ \\////\\\\\\//// /\\\\\\ /\\\\\\///\\\\\\\\\\///\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\//////
* \\//\\\\\\ /\\\\\\ \\/\\\\\\////// \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\//\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\\\\\\\\\\\\\\\
* \\///\\\\\\ /\\\\\\ \\/\\\\\\ \\/\\\\\\_/\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\////////\\\\\\
* \\///\\\\\\\\\\/ \\/\\\\\\ \\//\\\\\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\/\\\\\\ \\//\\\\\\\\\\\\\\\\\\ /\\\\\\\\\\\\\\\\\\\\
* \\///// \\/// \\///// \\/// \\/// \\/// \\/// \\///////// \\//////////
*
* The mathematical programming library for Scala.
*
*/
package optimus.algebra
/**
* A constraint has the form (expression RELATION expression). RELATION can be
* one of the {<=, =, >=}.
*
* @param lhs left hand side expression
* @param operator relation operator
* @param rhs right hand side expression
*/
case class Constraint(lhs: Expression, operator: ConstraintRelation, rhs: Expression) {
override def toString: String = s"$lhs $operator $rhs"
/**
* @param obj an object to compare
* @return true in case this object has identical constant
* and terms as the obj argument; false otherwise.
*/
override def equals(obj: Any): Boolean = obj match {
case that: Constraint =>
// Move terms in the left hand side of the expression in order to properly check equality.
val (a, b) = (lhs - rhs, that.lhs - that.rhs)
operator == that.operator && (a == b || -a == b || a == -b || -a == -b)
case _ => false
}
}
| vagm/Optimus | core/src/main/scala/optimus/algebra/Constraint.scala | Scala | lgpl-3.0 | 1,686 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.integration.torch
import com.intel.analytics.bigdl.nn.MV
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.{RandomGenerator, T, Table}
import scala.collection.mutable
@com.intel.analytics.bigdl.tags.Serial
class MVSpec extends TorchSpec {
def randn(): Double = RandomGenerator.RNG.uniform(-10, 10)
"A MV" should "generate correct output with no transform and no batch" in {
torchCheck()
val input1 = Tensor[Double](3, 3).apply1(x => randn())
val input2 = Tensor[Double](3).apply1(x => randn())
val input = T(input1, input2)
val gradOutput = Tensor[Double](3)
gradOutput.apply1(x => randn())
val module = new MV[Double]()
val start = System.nanoTime()
var output = Tensor[Double]()
var gradInput = T()
for (i <- 1 to 5) {
output = module.forward(input)
gradInput = module.backward(input, gradOutput)
}
val end = System.nanoTime()
val scalaTime = end - start
val code = "module = nn.MV()\\n" +
"for i = 1,5,1 do\\n" +
"output = module:forward(input)\\n " +
"gradInput = module:backward(input, gradOutput)\\n" +
"end"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Table]
luaOutput should be (output)
gradInput should equal (luaGradInput)
println("Test case : MV, Torch : " +
luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
"A MV" should "generate correct output with transform and no batch" in {
torchCheck()
val input1 = Tensor[Double](3, 3).apply1(x => randn())
val input2 = Tensor[Double](3).apply1(x => randn())
val input = T(input1, input2)
val gradOutput = Tensor[Double](3)
gradOutput.apply1(x => randn())
val module = new MV[Double](true)
val start = System.nanoTime()
val output = module.forward(input)
val gradInput = module.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "module = nn.MV(true)\\n" +
"output = module:forward(input)\\n " +
"gradInput = module:backward(input, gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Table]
luaOutput should be (output)
gradInput should equal (luaGradInput)
println("Test case : MV, Torch : " +
luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
"A MV" should "generate correct output with no transform and batch" in {
torchCheck()
val input1 = Tensor[Double](3, 3, 3).apply1(x => randn())
val input2 = Tensor[Double](3, 3).apply1(x => randn())
val input = T(input1, input2)
val gradOutput = Tensor[Double](3, 3)
gradOutput.apply1(x => randn())
val module = new MV[Double]()
val start = System.nanoTime()
val output = module.forward(input)
val gradInput = module.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "module = nn.MV()\\n" +
"output = module:forward(input)\\n " +
"gradInput = module:backward(input, gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Table]
luaOutput should be (output)
gradInput should equal (luaGradInput)
println("Test case : MV, Torch : " +
luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
"A MV" should "generate correct output with transform and batch" in {
torchCheck()
val input1 = Tensor[Double](3, 3, 3).apply1(x => randn())
val input2 = Tensor[Double](3, 3).apply1(x => randn())
val input = T(input1, input2)
val gradOutput = Tensor[Double](3, 3)
gradOutput.apply1(x => randn())
val module = new MV[Double](true)
val start = System.nanoTime()
val output = module.forward(input)
val gradInput = module.backward(input, gradOutput)
val end = System.nanoTime()
val scalaTime = end - start
val code = "module = nn.MV(true)\\n" +
"output = module:forward(input)\\n " +
"gradInput = module:backward(input, gradOutput)"
val (luaTime, torchResult) = TH.run(code, Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput"))
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Table]
luaOutput should be (output)
gradInput should equal (luaGradInput)
println("Test case : MV, Torch : " +
luaTime + " s, Scala : " + scalaTime / 1e9 + " s")
}
}
| zhangxiaoli73/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/integration/torch/MVSpec.scala | Scala | apache-2.0 | 5,692 |
/*
* UDPChannelImpl.scala
* (ScalaOSC)
*
* Copyright (c) 2008-2021 Hanns Holger Rutz. All rights reserved.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.osc
package impl
import java.net.InetSocketAddress
private[osc] trait UDPChannelImpl extends NetChannelImpl with UDP.Channel {
override protected def config: UDP.Config
final def transport: Transport = config.transport
final def localSocketAddress: InetSocketAddress = {
val so = channel.socket()
new InetSocketAddress(so.getLocalAddress, so.getLocalPort)
}
final def remoteSocketAddress: InetSocketAddress = {
val so = channel.socket()
new InetSocketAddress(so.getInetAddress, so.getPort)
}
}
| Sciss/ScalaOSC | shared/src/main/scala/de/sciss/osc/impl/UDPChannelImpl.scala | Scala | lgpl-2.1 | 833 |
package org.powlab.jeye.decode.graph
import scala.collection.mutable.ArrayBuffer
import org.powlab.jeye.decode.RuntimeOpcode
import org.powlab.jeye.decode.graph.OpcodeNodes._
import org.powlab.jeye.core.Exception
import org.powlab.jeye.core.Exception._
import org.junit.Assert._
import org.junit.Test
import org.powlab.jeye.decode.TestUtils._
import org.powlab.jeye.core.Opcodes._
/**
* Описание узлов.
* Требования: ReadOnly
*/
class OpcodeNodesTest {
@Test
def testScanOpcodeNodes {
// 1. стресс тест -> null
try {
scanOpcodeNodes(null, node => {fail("Обработчик не должен быть вызван")})
} catch {
case ex: Throwable => assertTrue(ex.getMessage().contains("Область: " + Exception.NODE_AREA))
}
// 2. single
scanOpcodeNodes(NODE_NOP, node => {assertEquals(NODE_NOP, node)})
// 3. group
val opcodes = ArrayBuffer[OpcodeNode](NODE_NOP)
val group = new GroupOpcodeNode(opcodes, false)
val expected = ArrayBuffer[OpcodeNode](NODE_NOP, group)
val actual = new ArrayBuffer[OpcodeNode]
scanOpcodeNodes(group, node => {actual += node})
assertEquals(expected, actual)
}
} | powlab/jeye | src/test/scala/org/powlab/jeye/decode/graph/OpcodeNodesTest.scala | Scala | apache-2.0 | 1,210 |
package scaldi
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scaldi.Injectable.inject
class InjectedMacroSpec extends AnyWordSpec with Matchers {
"`injected` macro" should {
"support basic injection" in {
implicit val inj = new Module {
bind[DepSimple] to injected[DepSimple]
binding to "Test"
bind[Server] to new HttpServer("localhost", 80)
}
inject[DepSimple] should equal(new DepSimple("Test", new HttpServer("localhost", 80)))
}
"support multiple argument lists" in {
implicit val inj = new Module {
bind[DepMultiArgList] to injected[DepMultiArgList]
binding to "Test"
bind[Server] to new HttpServer("localhost", 80)
bind[Long] to 100L
bind[List[Int]] to List(1, 2, 3)
}
inject[DepMultiArgList] should equal(
new DepMultiArgList("Test", new HttpServer("localhost", 80))(100L)(100L, List(1, 2, 3))
)
}
"support default arguments in the first argument list" in {
implicit val inj = new Module {
bind[DepWithDefaults] to injected[DepWithDefaults]
bind[Server] to new HttpServer("localhost", 80)
}
inject[DepWithDefaults] should equal(new DepWithDefaults(d2 = new HttpServer("localhost", 80)))
}
"support overrides" in {
implicit val inj = new Module {
bind[DepWithDefaults] to injected[DepWithDefaults](
Symbol("d1") -> Dep1("Override"),
Symbol("d3") -> Dep2("Another override")
)
bind[Server] to new HttpServer("localhost", 80)
binding to Dep1("Defined in module")
}
inject[DepWithDefaults] should equal(
new DepWithDefaults(Dep1("Override"), new HttpServer("localhost", 80), Dep2("Another override"))
)
}
"inject implicit injector" in {
implicit val inj = new ImplicitArgumentsModule
inject[ImplicitArguments] should equal(new ImplicitArguments(dep1 = Dep1("foo"), Dep2("foo"), Dep3("foo")))
}
}
}
class ImplicitArgumentsModule extends Module {
bind to injected[ImplicitArguments](Symbol("dep1") -> inject[Dep1](identified by Symbol("foo")))
bind[Dep1] identifiedBy Symbol("foo") to Dep1("foo")
bind[Dep1] identifiedBy Symbol("bar") to Dep1("bar")
binding to Dep2("foo")
binding to Dep3("foo")
}
case class ImplicitArguments(dep1: Dep1, dep2: Dep2, dep3: Dep3)(implicit inj: Injector)
class DepSimple(a: String, s: Server) extends Debug(Symbol("a") -> a, Symbol("s") -> s)
class DepWithDefaults(d1: Dep1 = Dep1("Default Value"), d2: Server, d3: Dep2 = Dep2("123"))
extends Debug(Symbol("d1") -> d1, Symbol("d2") -> d2, Symbol("d3") -> d3)
class DepMultiArgList(a: String, s: Server)(l: Long)(l1: Long, c: List[Int])
extends Debug(Symbol("a") -> a, Symbol("s") -> s, Symbol("l") -> l, Symbol("l1") -> l1, Symbol("c") -> c)
class Debug(val props: (Symbol, Any)*) {
// hashCode is not overridden because I will use only equals in the test
override def equals(obj: scala.Any): Boolean =
obj.getClass == this.getClass && obj.asInstanceOf[Debug].props == this.props
override def toString: String = props.toString()
}
| scaldi/scaldi | src/test/scala/scaldi/InjectedMacroSpec.scala | Scala | apache-2.0 | 3,201 |
package com.progfun.worksheet
object TestApp extends App {
val words = Array("a", "ab", "abc")
val count = 10
val cnt = words.map {
word => (word, count)
}
cnt.foreach(println)
}
| lispmind/progfun | src/main/scala/com/progfun/worksheet/TestApp.scala | Scala | mit | 195 |
package org.hammerlab.guacamole.loci.args
import org.hammerlab.args4s.StringOptionHandler
import org.kohsuke.args4j.{ Option => Args4jOption }
trait ForceCallLociArgs {
@Args4jOption(
name = "--force-call-loci",
usage = "Always call the given sites",
handler = classOf[StringOptionHandler]
)
var forceCallLociStrOpt: Option[String] = None
@Args4jOption(
name = "--force-call-loci-file",
usage = "Always call the given sites",
handler = classOf[StringOptionHandler]
)
var forceCallLociFileOpt: Option[String] = None
}
| hammerlab/guacamole | src/main/scala/org/hammerlab/guacamole/loci/args/ForceCallLociArgs.scala | Scala | apache-2.0 | 556 |
package reopp.common.choco.dataconnectors
import reopp.common.choco._
import reopp.common.Utils._
/**
* Created with IntelliJ IDEA.
* User: jose
* Date: 19/06/12
* Time: 21:04
* To change this template use File | Settings | File Templates.
*/
class ChoIMerger(x:String,y:String,z:String,uid:Int) extends ChoDataConnector(List(x,y),uid) {
useData = true
useCC3 = false
val (xv,yv,zv) = (Var(flowVar(x,uid)) , Var(flowVar(y,uid)), Var(flowVar(z,uid)))
val constraints = ChoConstraints(List(
(zv <-> (xv or yv)),
(xv and yv) --> VarEq(dataVar(x,uid),dataVar(z,uid)) or VarEq(dataVar(y,uid),dataVar(z,uid)),
(Neg(xv) and yv) --> VarEq(dataVar(y,uid),dataVar(z,uid)),
(xv and Neg(yv)) --> VarEq(dataVar(x,uid),dataVar(z,uid))
))
def getConstraints = constraints
// suggests which ends must have dataflow if "end" has also dataflow
def guessRequirements(end: String) = end match {
case `x` => Set(z)
case `y` => Set(z)
case `z` => Set(x) // priority to x
case _ => Set()
}
} | joseproenca/ip-constraints | code/src/main/scala/reopp/common/choco/dataconnectors/ChoIMerger.scala | Scala | mit | 1,034 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.ui
import java.util.concurrent.ConcurrentLinkedQueue
import java.util.{LinkedHashMap, Map => JMap, Properties}
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.{HashMap, Queue}
import org.apache.spark.scheduler._
import org.apache.spark.streaming.StreamingContext
import org.apache.spark.streaming.event.{Event, EventSource}
import org.apache.spark.streaming.scheduler._
private[streaming] class StreamingJobProgressListener(ssc: StreamingContext)
extends SparkListener with StreamingListener {
private val waitingBatchUIData = new HashMap[Long, BatchUIData]
private val runningBatchUIData = new HashMap[Long, BatchUIData]
private val completedBatchUIData = new Queue[BatchUIData]
private val batchUIDataLimit = ssc.conf.getInt("spark.streaming.ui.retainedBatches", 1000)
private var totalCompletedBatches = 0L
private var totalReceivedRecords = 0L
private var totalProcessedRecords = 0L
private val receiverInfos = new HashMap[Int, ReceiverInfo]
private val eventSourceInfos = new mutable.HashMap[EventSource, EventSourceUIData]
// Because onJobStart and onBatchXXX messages are processed in different threads,
// we may not be able to get the corresponding BatchUIData when receiving onJobStart. So here we
// cannot use a map of (Long, BatchUIData).
private[ui] val batchEventToOutputOpIdSparkJobIdPair =
new LinkedHashMap[Long, ConcurrentLinkedQueue[OutputOpIdAndSparkJobId]] {
override def removeEldestEntry(
p1: JMap.Entry[Long, ConcurrentLinkedQueue[OutputOpIdAndSparkJobId]]): Boolean = {
// If a lot of "onBatchCompleted"s happen before "onJobStart" (image if
// SparkContext.listenerBus is very slow), "batchTimeToOutputOpIdToSparkJobIds"
// may add some information for a removed batch when processing "onJobStart". It will be a
// memory leak.
//
// To avoid the memory leak, we control the size of "batchTimeToOutputOpIdToSparkJobIds" and
// evict the eldest one.
//
// Note: if "onJobStart" happens before "onBatchSubmitted", the size of
// "batchTimeToOutputOpIdToSparkJobIds" may be greater than the number of the retained
// batches temporarily, so here we use "10" to handle such case. This is not a perfect
// solution, but at least it can handle most of cases.
size() >
waitingBatchUIData.size + runningBatchUIData.size + completedBatchUIData.size + 10
}
}
val batchDuration = ssc.graph.batchDuration.milliseconds
override def onReceiverStarted(receiverStarted: StreamingListenerReceiverStarted) {
synchronized {
receiverInfos(receiverStarted.receiverInfo.streamId) = receiverStarted.receiverInfo
}
}
override def onReceiverError(receiverError: StreamingListenerReceiverError) {
synchronized {
receiverInfos(receiverError.receiverInfo.streamId) = receiverError.receiverInfo
}
}
override def onReceiverStopped(receiverStopped: StreamingListenerReceiverStopped) {
synchronized {
receiverInfos(receiverStopped.receiverInfo.streamId) = receiverStopped.receiverInfo
}
}
override def onEventSourceStarted(eventSourceStarted: StreamingListenerEventSourceStarted) {
synchronized {
val eventSource = eventSourceStarted.eventSource
eventSourceInfos(eventSource) = eventSourceInfos.getOrElse(eventSource,
EventSourceUIData.from(eventSource)).setStarted()
}
}
override def onEventSourceStopped(eventSourceStopped: StreamingListenerEventSourceStopped) {
synchronized {
val eventSource = eventSourceStopped.eventSource
eventSourceInfos(eventSource) = eventSourceInfos.getOrElse(eventSource,
EventSourceUIData.from(eventSource)).setStopped()
}
}
override def onBatchSubmitted(batchSubmitted: StreamingListenerBatchSubmitted): Unit = {
synchronized {
val batchEvent = batchSubmitted.batchInfo.batchEvent
waitingBatchUIData(batchEvent.instanceId) =
BatchUIData(batchSubmitted.batchInfo)
val eventSource = batchEvent.eventSource
eventSourceInfos.get(eventSource).foreach { eventSourceUIData =>
eventSourceUIData.incrementBatches()
eventSourceUIData.setFirstBatchEvent(batchEvent)
eventSourceUIData.setLastBatchEvent(batchEvent)
}
}
}
override def onBatchStarted(batchStarted: StreamingListenerBatchStarted): Unit = synchronized {
val batchUIData = BatchUIData(batchStarted.batchInfo)
runningBatchUIData(batchStarted.batchInfo.batchEvent.instanceId) = batchUIData
waitingBatchUIData.remove(batchStarted.batchInfo.batchEvent.instanceId)
totalReceivedRecords += batchUIData.numRecords
}
override def onBatchCompleted(batchCompleted: StreamingListenerBatchCompleted): Unit = {
synchronized {
waitingBatchUIData.remove(batchCompleted.batchInfo.batchEvent.instanceId)
runningBatchUIData.remove(batchCompleted.batchInfo.batchEvent.instanceId)
val batchUIData = BatchUIData(batchCompleted.batchInfo)
completedBatchUIData.enqueue(batchUIData)
if (completedBatchUIData.size > batchUIDataLimit) {
val removedBatch = completedBatchUIData.dequeue()
batchEventToOutputOpIdSparkJobIdPair.remove(removedBatch.batchEvent.instanceId)
}
totalCompletedBatches += 1L
totalProcessedRecords += batchUIData.numRecords
}
}
override def onOutputOperationStarted(
outputOperationStarted: StreamingListenerOutputOperationStarted): Unit = synchronized {
// This method is called after onBatchStarted
runningBatchUIData(outputOperationStarted.outputOperationInfo.batchEvent.instanceId).
updateOutputOperationInfo(outputOperationStarted.outputOperationInfo)
}
override def onOutputOperationCompleted(
outputOperationCompleted: StreamingListenerOutputOperationCompleted): Unit = synchronized {
// This method is called before onBatchCompleted
runningBatchUIData(outputOperationCompleted.outputOperationInfo.batchEvent.instanceId).
updateOutputOperationInfo(outputOperationCompleted.outputOperationInfo)
}
override def onJobStart(jobStart: SparkListenerJobStart): Unit = synchronized {
getBatchEventIdAndOutputOpId(jobStart.properties).foreach { case (batchEventId, outputOpId) =>
var outputOpIdToSparkJobIds = batchEventToOutputOpIdSparkJobIdPair.get(batchEventId)
if (outputOpIdToSparkJobIds == null) {
outputOpIdToSparkJobIds = new ConcurrentLinkedQueue[OutputOpIdAndSparkJobId]()
batchEventToOutputOpIdSparkJobIdPair.put(batchEventId, outputOpIdToSparkJobIds)
}
outputOpIdToSparkJobIds.add(OutputOpIdAndSparkJobId(outputOpId, jobStart.jobId))
}
}
private def getBatchEventIdAndOutputOpId(properties: Properties): Option[(Long, Int)] = {
val batchEventId = properties.getProperty(JobScheduler.BATCH_EVENT_PROPERTY_KEY)
if (batchEventId == null) {
// Not submitted from JobScheduler
None
} else {
val outputOpId = properties.getProperty(JobScheduler.OUTPUT_OP_ID_PROPERTY_KEY)
assert(outputOpId != null)
Some(batchEventId.toLong -> outputOpId.toInt)
}
}
def numReceivers: Int = synchronized {
receiverInfos.size
}
def numActiveReceivers: Int = synchronized {
receiverInfos.count(_._2.active)
}
def numInactiveReceivers: Int = {
ssc.graph.getReceiverInputStreams().length - numActiveReceivers
}
def numEventSources: Int = synchronized {
eventSourceInfos.size
}
def numTotalCompletedBatches: Long = synchronized {
totalCompletedBatches
}
def numTotalReceivedRecords: Long = synchronized {
totalReceivedRecords
}
def numTotalProcessedRecords: Long = synchronized {
totalProcessedRecords
}
def numUnprocessedBatches: Long = synchronized {
waitingBatchUIData.size + runningBatchUIData.size
}
def waitingBatches: Seq[BatchUIData] = synchronized {
waitingBatchUIData.values.toSeq
}
def runningBatches: Seq[BatchUIData] = synchronized {
runningBatchUIData.values.toSeq
}
def retainedCompletedBatches: Seq[BatchUIData] = synchronized {
completedBatchUIData.toSeq
}
def streamName(streamId: Int): Option[String] = {
ssc.graph.getInputStreamName(streamId)
}
/**
* Return all InputDStream Ids
*/
def streamIds: Seq[Int] = ssc.graph.getInputStreams().map(_.id)
/**
* Return all of the record rates for each InputDStream in each batch. The key of the return value
* is the stream id, and the value is a sequence of batch time with its record rate.
*/
def receivedRecordRateWithBatchTime: Map[Int, Seq[(Long, Double)]] = synchronized {
val _retainedBatches = retainedBatches
val latestBatches = _retainedBatches.map { batchUIData =>
(batchUIData.batchEvent.time.milliseconds, batchUIData.streamIdToInputInfo.mapValues(_.numRecords))
}
streamIds.map { streamId =>
val recordRates = latestBatches.map {
case (batchTime, streamIdToNumRecords) =>
val numRecords = streamIdToNumRecords.getOrElse(streamId, 0L)
(batchTime, numRecords * 1000.0 / batchDuration)
}
(streamId, recordRates)
}.toMap
}
def lastReceivedBatchRecords: Map[Int, Long] = synchronized {
val lastReceivedBlockInfoOption =
lastReceivedBatch.map(_.streamIdToInputInfo.mapValues(_.numRecords))
lastReceivedBlockInfoOption.map { lastReceivedBlockInfo =>
streamIds.map { streamId =>
(streamId, lastReceivedBlockInfo.getOrElse(streamId, 0L))
}.toMap
}.getOrElse {
streamIds.map(streamId => (streamId, 0L)).toMap
}
}
def receiverInfo(receiverId: Int): Option[ReceiverInfo] = synchronized {
receiverInfos.get(receiverId)
}
def allEventSourceInfo(): Seq[EventSourceUIData] = synchronized {
eventSourceInfos.values.toSeq
}
def lastCompletedBatch: Option[BatchUIData] = synchronized {
completedBatchUIData.sortBy(_.batchEvent)(Event.ordering).lastOption
}
def lastReceivedBatch: Option[BatchUIData] = synchronized {
retainedBatches.lastOption
}
def retainedBatches: Seq[BatchUIData] = synchronized {
(waitingBatchUIData.values.toSeq ++
runningBatchUIData.values.toSeq ++ completedBatchUIData).sortBy(_.batchEvent)(Event.ordering)
}
def getBatchUIData(batchEventInstanceId: Long): Option[BatchUIData] = synchronized {
val batchUIData = waitingBatchUIData.get(batchEventInstanceId).orElse {
runningBatchUIData.get(batchEventInstanceId).orElse {
completedBatchUIData.find(batch => batch.batchEvent.instanceId == batchEventInstanceId)
}
}
batchUIData.foreach { _batchUIData =>
// We use an Iterable rather than explicitly converting to a seq so that updates
// will propagate
val outputOpIdToSparkJobIds: Iterable[OutputOpIdAndSparkJobId] =
Option(batchEventToOutputOpIdSparkJobIdPair.get(batchEventInstanceId))
.map(_.asScala).getOrElse(Seq.empty)
_batchUIData.outputOpIdSparkJobIdPairs = outputOpIdToSparkJobIds
}
batchUIData
}
}
private[streaming] object StreamingJobProgressListener {
type SparkJobId = Int
type OutputOpId = Int
}
| mashin-io/rich-spark | streaming/src/main/scala/org/apache/spark/streaming/ui/StreamingJobProgressListener.scala | Scala | apache-2.0 | 12,071 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.gihyo.spark.ch06
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{StreamingContext, Seconds}
import org.apache.spark.streaming.dstream.InputDStream
object gihyo_6_2_1_Sample {
def main(args: Array[String]) {
if (args.length != 2) {
new IllegalArgumentException("Invalid arguments")
System.exit(1)
}
val targetHost = args(0)
val targetHostPort = args(1).toInt
val conf = new SparkConf().setAppName("NetworkWordCount")
val ssc = new StreamingContext(conf, Seconds(5))
val lines = ssc.socketTextStream(targetHost, targetHostPort)
val wordCounts = run(lines)
ssc.start
ssc.awaitTermination
}
def run(stream: InputDStream[String]) {
val words = stream.flatMap(_.split(" "))
val pairs = words.map(word => (word, 1))
val wordCounts = pairs.reduceByKey(_ + _)
wordCounts.print
}
}
| yu-iskw/gihyo-spark-book-example | src/main/scala/jp/gihyo/spark/ch06/gihyo_6_2_1_Sample.scala | Scala | apache-2.0 | 1,687 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.nodes.dataset
import org.apache.calcite.plan._
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.TableScan
import org.apache.calcite.rel.metadata.RelMetadataQuery
import org.apache.flink.api.java.DataSet
import org.apache.flink.table.api.{BatchQueryConfig, BatchTableEnvironment}
import org.apache.flink.table.plan.schema.{DataSetTable, RowSchema}
import org.apache.flink.types.Row
/**
* Flink RelNode which matches along with DataSource.
* It ensures that types without deterministic field order (e.g. POJOs) are not part of
* the plan translation.
*/
class DataSetScan(
cluster: RelOptCluster,
traitSet: RelTraitSet,
table: RelOptTable,
rowRelDataType: RelDataType)
extends TableScan(cluster, traitSet, table)
with BatchScan {
val dataSetTable: DataSetTable[Any] = getTable.unwrap(classOf[DataSetTable[Any]])
override def deriveRowType(): RelDataType = rowRelDataType
override def computeSelfCost(planner: RelOptPlanner, metadata: RelMetadataQuery): RelOptCost = {
val rowCnt = metadata.getRowCount(this)
planner.getCostFactory.makeCost(rowCnt, rowCnt, 0)
}
override def copy(traitSet: RelTraitSet, inputs: java.util.List[RelNode]): RelNode = {
new DataSetScan(
cluster,
traitSet,
getTable,
getRowType
)
}
override def translateToPlan(
tableEnv: BatchTableEnvironment,
queryConfig: BatchQueryConfig): DataSet[Row] = {
val schema = new RowSchema(rowRelDataType)
val inputDataSet: DataSet[Any] = dataSetTable.dataSet
val fieldIdxs = dataSetTable.fieldIndexes
val config = tableEnv.getConfig
convertToInternalRow(schema, inputDataSet, fieldIdxs, config, None)
}
}
| ueshin/apache-flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/nodes/dataset/DataSetScan.scala | Scala | apache-2.0 | 2,602 |
package unfiltered.util
import scala.annotation.tailrec
/** Unfiltered's base server trait, something plans can be added to */
trait Server { self =>
// ServerBuilder is concretely defined in the final case classes,
// and we can always return `this` for ServerBuilder
type ServerBuilder >: self.type <: Server
}
trait PlanServer[T] extends Server { self =>
type ServerBuilder >: self.type <: PlanServer[T]
def plan(plan: T): ServerBuilder = makePlan(plan)
def makePlan(plan: => T): ServerBuilder
}
/** Describes a server's host and port bindings. */
trait PortBindingInfo {
def host: String
def port: Int
def scheme: String
def url = s"$scheme://$host:$port"
}
trait HttpPortBinding {
val scheme = "http"
}
trait HttpsPortBinding {
val scheme = "https"
}
trait StartableServer extends Server {
def start(): ServerBuilder
def stop(): ServerBuilder
def destroy(): ServerBuilder
/** network interface/host name and port bound for a server */
def portBindings: Iterable[PortBindingInfo]
}
trait RunnableServer extends StartableServer { self =>
/** Calls run with no afterStart or afterStop functions */
def run(): Unit = {
run { _ => () }
}
/** Starts the server then takes an action */
def run(afterStart: ServerBuilder => Unit): Unit = {
run(afterStart, { _ => () })
}
/** Starts the server, calls afterStart. then waits. The waiting behavior
* depends on whether the current thread is "main"; if not "main" it
* assumes this is an interactive session with sbt and waits for any input,
* then calls stop(), afterStop(...), and finally destroy(). If the
* current thread is "main", it waits indefinitely and performs stop()
* and afterStop(...) in a shutdown hook.
*/
def run(afterStart: ServerBuilder => Unit, afterStop: ServerBuilder => Unit): Unit = {
Thread.currentThread.getName match {
case "main" =>
Runtime.getRuntime.addShutdownHook(new Thread {
override def run(): Unit = {
RunnableServer.this.stop()
afterStop(RunnableServer.this)
}
})
start()
afterStart(RunnableServer.this)
val lock = new AnyRef
lock.synchronized { lock.wait() }
case _ =>
start()
afterStart(RunnableServer.this)
println("Embedded server listening at")
for (binding <- portBindings)
println(s" ${binding.url}")
println("Press any key to stop.")
@tailrec
def doWait(): Unit = {
try { Thread.sleep(1000) } catch { case _: InterruptedException => () }
if(System.in.available() <= 0)
doWait()
}
doWait()
stop()
afterStop(RunnableServer.this)
destroy()
}
}
}
| xuwei-k/unfiltered | util/src/main/scala/servers.scala | Scala | mit | 2,765 |
object Main extends App{
def func(x: Int, y: Int):Int = x + y
def func2 = "World"
println("Hello World")
}
| maedaunderscore/sbt-sample | src/main/scala/Main.scala | Scala | mit | 114 |
/*
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package graph
import scala.collection.JavaConversions._
import org.objectweb.asm.Type
import com.asakusafw.lang.compiler.model.graph.ExternalInput
import com.asakusafw.lang.compiler.planning.SubPlan
import com.asakusafw.spark.compiler.planning.SubPlanInfo
import com.asakusafw.spark.compiler.spi.NodeCompiler
class DirectInputCompiler extends NodeCompiler {
override def support(
subplan: SubPlan)(
implicit context: NodeCompiler.Context): Boolean = {
if (context.options.useInputDirect) {
val subPlanInfo = subplan.getAttribute(classOf[SubPlanInfo])
val primaryOperator = subPlanInfo.getPrimaryOperator
if (primaryOperator.isInstanceOf[ExternalInput]) {
val operator = primaryOperator.asInstanceOf[ExternalInput]
Option(operator.getInfo).flatMap { info =>
context.getInputFormatInfo(operator.getName, info)
}.isDefined
} else {
false
}
} else {
false
}
}
override def instantiator: Instantiator = InputInstantiator
override def compile(
subplan: SubPlan)(
implicit context: NodeCompiler.Context): Type = {
assert(support(subplan), s"The subplan is not supported: ${subplan}")
val subPlanInfo = subplan.getAttribute(classOf[SubPlanInfo])
val primaryOperator = subPlanInfo.getPrimaryOperator
assert(primaryOperator.isInstanceOf[ExternalInput],
s"The primary operator should be external input: ${primaryOperator} [${subplan}]")
val operator = primaryOperator.asInstanceOf[ExternalInput]
val inputFormatInfo = context.getInputFormatInfo(operator.getName, operator.getInfo).get
val builder =
new DirectInputClassBuilder(
operator,
inputFormatInfo.getFormatClass.asType,
inputFormatInfo.getKeyClass.asType,
inputFormatInfo.getValueClass.asType,
inputFormatInfo.getExtraConfiguration.toMap)(
subplan.label,
subplan.getOutputs.toSeq) with CacheOnce
context.addClass(builder)
}
}
| asakusafw/asakusafw-spark | compiler/src/main/scala/com/asakusafw/spark/compiler/graph/DirectInputCompiler.scala | Scala | apache-2.0 | 2,653 |
package models
import play.api.libs.json.Json
case class DigitalInternetResponse(city: String, UHD3: String);
object DigitalInternet {
implicit val responseFormatter = Json.format[DigitalInternetResponse]
} | CamilleTh/goc_backend | app/models/DigitalInternetResponse.scala | Scala | mit | 210 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.producer
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.{LinkedBlockingQueue, TimeUnit}
import kafka.common.{AppInfo, QueueFullException}
import kafka.common.security.LoginManager
import kafka.metrics._
import kafka.producer.async.{DefaultEventHandler, EventHandler, ProducerSendThread}
import kafka.serializer.Encoder
import kafka.utils._
import org.apache.kafka.common.protocol.SecurityProtocol
import org.apache.kafka.common.security.JaasUtils
import org.apache.kafka.common.config.SaslConfigs
class Producer[K,V](val config: ProducerConfig,
private val eventHandler: EventHandler[K,V]) // only for unit testing
extends Logging {
private val hasShutdown = new AtomicBoolean(false)
private val queue = new LinkedBlockingQueue[KeyedMessage[K,V]](config.queueBufferingMaxMessages)
private var sync: Boolean = true
private var producerSendThread: ProducerSendThread[K,V] = null
private val lock = new Object()
private val protocol = SecurityProtocol.valueOf(config.securityProtocol)
if(CoreUtils.isSaslProtocol(protocol)) {
val saslConfigs = new java.util.HashMap[String, Any]()
saslConfigs.put(SaslConfigs.SASL_KERBEROS_KINIT_CMD, config.saslKerberosKinitCmd)
saslConfigs.put(SaslConfigs.SASL_KERBEROS_TICKET_RENEW_JITTER, config.saslKerberosTicketRenewJitter.toDouble)
saslConfigs.put(SaslConfigs.SASL_KERBEROS_TICKET_RENEW_WINDOW_FACTOR, config.saslKerberosTicketRenewWindowFactor.toDouble)
saslConfigs.put(SaslConfigs.SASL_KERBEROS_MIN_TIME_BEFORE_RELOGIN, config.saslKerberosMinTimeBeforeRelogin.toLong)
LoginManager.init(JaasUtils.LOGIN_CONTEXT_CLIENT, saslConfigs)
}
config.producerType match {
case "sync" =>
case "async" =>
sync = false
producerSendThread = new ProducerSendThread[K,V]("ProducerSendThread-" + config.clientId,
queue,
eventHandler,
config.queueBufferingMaxMs,
config.batchNumMessages,
config.clientId)
producerSendThread.start()
}
private val producerTopicStats = ProducerTopicStatsRegistry.getProducerTopicStats(config.clientId)
KafkaMetricsReporter.startReporters(config.props)
AppInfo.registerInfo()
def this(config: ProducerConfig) =
this(config,
new DefaultEventHandler[K,V](config,
CoreUtils.createObject[Partitioner](config.partitionerClass, config.props),
CoreUtils.createObject[Encoder[V]](config.serializerClass, config.props),
CoreUtils.createObject[Encoder[K]](config.keySerializerClass, config.props),
new ProducerPool(config)))
/**
* Sends the data, partitioned by key to the topic using either the
* synchronous or the asynchronous producer
* @param messages the producer data object that encapsulates the topic, key and message data
*/
def send(messages: KeyedMessage[K,V]*) {
lock synchronized {
if (hasShutdown.get)
throw new ProducerClosedException
recordStats(messages)
sync match {
case true => eventHandler.handle(messages)
case false => asyncSend(messages)
}
}
}
private def recordStats(messages: Seq[KeyedMessage[K,V]]) {
for (message <- messages) {
producerTopicStats.getProducerTopicStats(message.topic).messageRate.mark()
producerTopicStats.getProducerAllTopicsStats.messageRate.mark()
}
}
private def asyncSend(messages: Seq[KeyedMessage[K,V]]) {
for (message <- messages) {
val added = config.queueEnqueueTimeoutMs match {
case 0 =>
queue.offer(message)
case _ =>
try {
config.queueEnqueueTimeoutMs < 0 match {
case true =>
queue.put(message)
true
case _ =>
queue.offer(message, config.queueEnqueueTimeoutMs, TimeUnit.MILLISECONDS)
}
}
catch {
case e: InterruptedException =>
false
}
}
if(!added) {
producerTopicStats.getProducerTopicStats(message.topic).droppedMessageRate.mark()
producerTopicStats.getProducerAllTopicsStats.droppedMessageRate.mark()
throw new QueueFullException("Event queue is full of unsent messages, could not send event: " + message.toString)
}else {
trace("Added to send queue an event: " + message.toString)
trace("Remaining queue size: " + queue.remainingCapacity)
}
}
}
/**
* Close API to close the producer pool connections to all Kafka brokers. Also closes
* the zookeeper client connection if one exists
*/
def close() = {
lock synchronized {
val canShutdown = hasShutdown.compareAndSet(false, true)
if(canShutdown) {
info("Shutting down producer")
val startTime = System.nanoTime()
KafkaMetricsGroup.removeAllProducerMetrics(config.clientId)
if (producerSendThread != null)
producerSendThread.shutdown
eventHandler.close
LoginManager.shutdown
info("Producer shutdown completed in " + (System.nanoTime() - startTime) / 1000000 + " ms")
}
}
}
}
| OpenPOWER-BigData/HDP-kafka | core/src/main/scala/kafka/producer/Producer.scala | Scala | apache-2.0 | 6,300 |
/*
Copyright 2016-17, Hasso-Plattner-Institut fuer Softwaresystemtechnik GmbH
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package de.hpi.ingestion.deduplication.models
import java.util.UUID
import com.datastax.driver.core.utils.UUIDs
/**
* Contains statistical information about a block generated by a blocking scheme.
* @param jobid TimeUUID used as primary key identifying the job
* @param schemetag Blocking Scheme tag used as primary key identifying the blocking scheme
* @param data contains the statistical data about each block generates by this scheme
* @param comment contains extra information about the data
* @param pairscompleteness pairs completeness measure of this blocking scheme (% of duplicates that can be found with
* this blocking scheme.
* @param blockcount total number of blocks created
* @param comparisoncount total number of comparisons done with this blocking scheme
* @param xaxis label of the x-axis
* @param yaxis label of the y-axis
*/
case class BlockEvaluation(
jobid: UUID = UUIDs.timeBased(),
schemetag: String,
data: Set[BlockStats] = Set[BlockStats](),
comment: Option[String] = None,
pairscompleteness: Double,
blockcount: Int,
comparisoncount: BigInt,
xaxis: Option[String] = Option("block keys"),
yaxis: Option[String] = Option("# comparisons")
)
| bpn1/ingestion | src/main/scala/de/hpi/ingestion/deduplication/models/BlockEvaluation.scala | Scala | apache-2.0 | 1,832 |
/**
* Licensed to the Minutemen Group under one or more contributor license
* agreements. See the COPYRIGHT file distributed with this work for
* additional information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package silhouette.services
import silhouette.{ Identity, LoginInfo }
import scala.concurrent.Future
/**
* A trait that provides the means to retrieve identities for the Silhouette module.
*/
trait IdentityService[T <: Identity] {
/**
* Retrieves an identity that matches the specified login info.
*
* @param loginInfo The login info to retrieve an identity.
* @return The retrieved identity or None if no identity could be retrieved for the given login info.
*/
def retrieve(loginInfo: LoginInfo): Future[Option[T]]
}
| datalek/silhouette | silhouette/src/main/scala/silhouette/services/IdentityService.scala | Scala | apache-2.0 | 1,309 |
package com.outr.arango
import com.outr.arango.api.model.{CollectionInfo, PostAPICollection, PostAPICollectionOpts}
import com.outr.arango.api.{APICollection, APICollectionCollectionName, APICollectionCollectionNameLoad, APICollectionCollectionNameTruncate, APICollectionCollectionNameUnload, APIWalTail, WALOperations}
import com.outr.arango.model.ArangoResponse
import io.youi.client.HttpClient
import profig.JsonUtil
import scala.concurrent.{ExecutionContext, Future}
class ArangoCollection(client: HttpClient, dbName: String, collectionName: String) {
def create(distributeShardsLike: Option[String] = None,
doCompact: Option[Boolean] = None,
indexBuckets: Option[Long] = None,
isSystem: Option[Boolean] = None,
isVolatile: Option[Boolean] = None,
journalSize: Option[Long] = None,
keyOptions: Option[PostAPICollectionOpts] = None,
numberOfShards: Long = 1L,
replicationFactor: Long = 1L,
shardKeys: List[String] = List("_key"),
shardingStrategy: Option[String] = None,
smartJoinAttribute: Option[String] = None,
`type`: CollectionType = CollectionType.Document,
waitForSync: Option[Boolean] = None,
waitForSyncReplication: Option[Int] = None,
enforceReplicationFactor: Option[Int] = None)
(implicit ec: ExecutionContext): Future[CollectionInfo] = {
val collectionType = `type` match {
case CollectionType.Document => 2L
case CollectionType.Edge => 3L
}
APICollection.post(
client = client,
body = PostAPICollection(
name = collectionName,
distributeShardsLike = distributeShardsLike,
doCompact = doCompact,
indexBuckets = indexBuckets,
isSystem = isSystem,
isVolatile = isVolatile,
journalSize = journalSize,
keyOptions = keyOptions,
numberOfShards = numberOfShards,
replicationFactor = replicationFactor,
shardKeys = shardKeys,
shardingStrategy = shardingStrategy,
smartJoinAttribute = smartJoinAttribute,
`type` = Some(collectionType),
waitForSync = waitForSync
),
waitForSyncReplication = waitForSyncReplication,
enforceReplicationFactor = enforceReplicationFactor
)
}
lazy val index: ArangoIndex = new ArangoIndex(client, dbName, collectionName)
lazy val document: ArangoDocument = new ArangoDocument(client, dbName, collectionName)
def unload()(implicit ec: ExecutionContext): Future[CollectionLoad] = {
APICollectionCollectionNameUnload.put(client, collectionName).map(JsonUtil.fromJson[CollectionLoad](_))
}
def load()(implicit ec: ExecutionContext): Future[CollectionLoad] = {
APICollectionCollectionNameLoad.put(client, collectionName).map(JsonUtil.fromJson[CollectionLoad](_))
}
def truncate()(implicit ec: ExecutionContext): Future[TruncateCollectionResponse] = {
APICollectionCollectionNameTruncate.put(client, collectionName).map(JsonUtil.fromJson[TruncateCollectionResponse](_))
}
def drop(isSystem: Boolean = false)(implicit ec: ExecutionContext): Future[Boolean] = APICollectionCollectionName
.delete(client, collectionName, isSystem = Some(isSystem))
.map(JsonUtil.fromJson[ArangoResponse[Option[Boolean]]](_))
.map(!_.error)
} | outr/arangodb-scala | driver/src/main/scala/com/outr/arango/ArangoCollection.scala | Scala | mit | 3,388 |
package com.fsist.subscala
import scala.language.higherKinds
/** Restrictions on Scala syntax features */
sealed trait Syntax
object Syntax {
sealed trait +[A <: Syntax, B <: Syntax] <: Syntax
sealed trait -[A <: Syntax, B <: Syntax] <: Syntax
sealed trait If <: Syntax
sealed trait While <: Syntax
sealed trait Apply <: Syntax
sealed trait Def <: Syntax
sealed trait AbstractClass <: Syntax
sealed trait ConcreteClass <: Syntax
sealed trait Class extends (AbstractClass + ConcreteClass)
sealed trait Trait <: Syntax
sealed trait Object <: Syntax
sealed trait Import <: Syntax
sealed trait Val <: Syntax
sealed trait Var <: Syntax
sealed trait Lazy <: Syntax
sealed trait LazyVal extends (Lazy + Val)
sealed trait LazyVar extends (Lazy + Var)
sealed trait All extends (If + While + Apply + Def + Class + Trait + Object + Import + Val + Var + Lazy)
}
| fsist/subscala | src/main/scala/com/fsist/subscala/Syntax.scala | Scala | apache-2.0 | 899 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn.ops
import com.intel.analytics.bigdl.nn.Utils
import com.intel.analytics.bigdl.tensor.{DoubleType, FloatType, Tensor}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.Table
import scala.reflect.ClassTag
/**
* Computes the grayscale dilation of 4-D `input` and 3-D `filter` tensors.
*
* This layer takes a Table of two tensors as inputs, namely `input` and `filter`.
* The `input` tensor has shape `[batch, in_height, in_width, depth]` and the `filter`
* tensor has shape `[filter_height, filter_width, depth]`, i.e., each input channel is
* processed independently of the others with its own structing fucntion. The `output` tensor
* has shape `[batch, out_height, out_width, depth]`. The spatial dimensions of the output
* tensor depend on the `padding` algorithm. We currently only support the "NHWC" DataFormat.
*
* In detail, the grayscale morphological 2-D dilation is the max-sum correlation
*
* output[b, y, x, c] =
* max_{dy, dx} input[b,
* strides[1] * y + rates[1] * dy,
* strides[2] * x + rates[2] * dx,
* c] +
* filter[dy, dx, c]
*
* Max-pooling is a special case when the filter has size equal to the pooling kernel size and
* contains all zeros.
*
* Note on duality: The dilation of `input` by the `filter` is equal to the negation of the
* erosion of `-input` by the reflected `filter`.
*
*/
class Dilation2D[T: ClassTag, D: ClassTag](val strides: Array[Int],
val rates: Array[Int],
val padding: String)
(implicit ev: TensorNumeric[T], ev2: TensorNumeric[D])
extends Operation[Table, Tensor[D], T] {
output = Tensor[D]()
require(strides.length == 4, s"strides must have a length of 4, but got ${strides.length}")
require(rates.length == 4, s"rates must have a lenght of 4, but got ${rates.length}")
require(padding.toLowerCase() == "same" || padding.toLowerCase() == "valid",
s"padding must be one of same or valid, but got $padding")
private def getOutputSize(inputSize: Int, filterSize: Int, stride: Int, padding: String) = {
padding.toLowerCase() match {
case "valid" =>
val outputSize = (inputSize - filterSize + stride) / stride
(outputSize, 0, 0)
case "same" =>
val outputSize = (inputSize + stride - 1) / stride
val paddingNeeded = math.max(0, (outputSize - 1) * stride + filterSize - inputSize)
val padBefore = paddingNeeded / 2
val padAfter = paddingNeeded - padBefore
(outputSize, padBefore, padAfter)
}
}
private def dilationFloat(input: Tensor[Float], filter: Tensor[Float], output: Tensor[Float],
strideRows: Int, strideCols: Int,
rateRows: Int, rateCols: Int) = {
val batch = input.size(1)
val inputRows = input.size(2)
val inputCols = input.size(3)
val depth = input.size(4)
val filterRows = filter.size(1)
val filterCols = filter.size(2)
val filterRowsEff = filterRows + (filterRows - 1) * (rateRows - 1)
val filterColsEff = filterCols + (filterCols - 1) * (rateCols - 1)
val (outputRows, padTop, _) =
getOutputSize(inputRows, filterRowsEff, strideRows, padding)
val (outputCols, padLeft, _) =
getOutputSize(inputCols, filterColsEff, strideCols, padding)
output.resize(Array(batch, outputRows, outputCols, depth))
val inputData = input.storage().array()
val inputDataOffset = input.storageOffset() - 1
val filterData = filter.storage().array()
val filterDataOffset = filter.storageOffset() - 1
val outputData = output.storage().array()
val outputDataOffset = output.storageOffset() - 1
var b = 0
while(b < batch) {
var hOut = 0
while (hOut < outputRows) {
val hBeg = hOut * strideRows - padTop
var wOut = 0
while (wOut < outputCols) {
val wBeg = wOut * strideCols - padLeft
var d = 0
while (d < depth) {
var curVal: Float = Float.MinValue
var h = 0
while(h < filterRows) {
val hIn = hBeg + h * rateRows
if (hIn >= 0 && hIn < inputRows) {
var w = 0
while (w < filterCols) {
val wIn = wBeg + w * rateCols
if (wIn >= 0 && wIn < inputCols) {
val inputIndex = ((b * inputRows + hIn) * inputCols + wIn) * depth + d
val inputValue = inputData(inputDataOffset + inputIndex)
val filterIndex = (h * filterCols + w) * depth + d
val filterValue = filterData(filterDataOffset + filterIndex)
val value = inputValue + filterValue
if (value > curVal) {
curVal = value
}
}
w += 1
}
}
h += 1
}
val outputIndex = ((b * outputRows + hOut) * outputCols + wOut) * depth + d
outputData(outputDataOffset + outputIndex) = curVal
d += 1
}
wOut += 1
}
hOut += 1
}
b += 1
}
}
private def dilationDouble(input: Tensor[Double], filter: Tensor[Double], output: Tensor[Double],
strideRows: Int, strideCols: Int,
rateRows: Int, rateCols: Int) = {
val batch = input.size(1)
val inputRows = input.size(2)
val inputCols = input.size(3)
val depth = input.size(4)
val filterRows = filter.size(1)
val filterCols = filter.size(2)
val filterRowsEff = filterRows + (filterRows - 1) * (rateRows - 1)
val filterColsEff = filterCols + (filterCols - 1) * (rateCols - 1)
val (outputRows, padTop, _) =
getOutputSize(inputRows, filterRowsEff, strideRows, padding)
val (outputCols, padLeft, _) =
getOutputSize(inputCols, filterColsEff, strideCols, padding)
output.resize(Array(batch, outputRows, outputCols, depth))
val inputData = input.storage().array()
val inputDataOffset = input.storageOffset() - 1
val filterData = filter.storage().array()
val filterDataOffset = filter.storageOffset() - 1
val outputData = output.storage().array()
val outputDataOffset = output.storageOffset() - 1
var b = 0
while(b < batch) {
var hOut = 0
while (hOut < outputRows) {
val hBeg = hOut * strideRows - padTop
var wOut = 0
while (wOut < outputCols) {
val wBeg = wOut * strideCols - padLeft
var d = 0
while (d < depth) {
var curVal: Double = Double.MinValue
var h = 0
while(h < filterRows) {
val hIn = hBeg + h * rateRows
if (hIn >= 0 && hIn < inputRows) {
var w = 0
while (w < filterCols) {
val wIn = wBeg + w * rateCols
if (wIn >= 0 && wIn < inputCols) {
val inputIndex = ((b * inputRows + hIn) * inputCols + wIn) * depth + d
val inputValue = inputData(inputDataOffset + inputIndex)
val filterIndex = (h * filterCols + w) * depth + d
val filterValue = filterData(filterDataOffset + filterIndex)
val value = inputValue + filterValue
if (value > curVal) {
curVal = value
}
}
w += 1
}
}
h += 1
}
val outputIndex = ((b * outputRows + hOut) * outputCols + wOut) * depth + d
outputData(outputDataOffset + outputIndex) = curVal
d += 1
}
wOut += 1
}
hOut += 1
}
b += 1
}
}
override def updateOutput(inputs: Table): Tensor[D] = {
val input = inputs[Tensor[D]](1)
val filter = inputs[Tensor[D]](2)
require(input.dim() == 4, "input must have 4 dims")
require(filter.dim() == 3, "filter must have 3 dims")
val strideRows = strides(1)
val strideCols = strides(2)
val rateRows = rates(1)
val rateCols = rates(2)
if (ev2.getType() == FloatType) {
val inputTensor = input.asInstanceOf[Tensor[Float]]
val filterTensor = filter.asInstanceOf[Tensor[Float]]
val outputTensor = output.asInstanceOf[Tensor[Float]]
dilationFloat(inputTensor, filterTensor, outputTensor,
strideRows, strideCols, rateRows, rateCols)
} else if (ev2.getType() == DoubleType) {
val inputTensor = input.asInstanceOf[Tensor[Double]]
val filterTensor = filter.asInstanceOf[Tensor[Double]]
val outputTensor = output.asInstanceOf[Tensor[Double]]
dilationDouble(inputTensor, filterTensor, outputTensor,
strideRows, strideCols, rateRows, rateCols)
} else {
throw new IllegalArgumentException(s"does not support datatype ${ev2.getType()}")
}
output
}
override def getClassTagNumerics() : (Array[ClassTag[_]], Array[TensorNumeric[_]]) = {
(Array(scala.reflect.classTag[T], scala.reflect.classTag[D]), Array(ev, ev2))
}
}
object Dilation2D {
def apply[T: ClassTag, D: ClassTag](strides: Array[Int], rates: Array[Int], padding: String)
(implicit ev: TensorNumeric[T], ev2: TensorNumeric[D]): Dilation2D[T, D] =
new Dilation2D(strides, rates, padding)
}
private[bigdl] class Dilation2DBackpropFilter[T: ClassTag, D: ClassTag](
strides: Array[Int],
rates: Array[Int],
padding: String)(implicit ev: TensorNumeric[T], ev2: TensorNumeric[D])
extends Operation[Table, Tensor[D], T]{
output = Tensor[D]()
private def dilation2DBackpropFilterFloat(
input: Tensor[Float],
filter: Tensor[Float],
outBackprop: Tensor[Float],
filterBackprop: Tensor[Float],
strideRows: Int, strideCols: Int,
rateRows: Int, rateCols: Int) = {
val batch = input.size(1)
val inputRows = input.size(2)
val inputCols = input.size(3)
val depth = input.size(4)
val filterRows = filter.size(1)
val filterCols = filter.size(2)
val filterRowsEff = filterRows + (filterRows - 1) * (rateRows - 1)
val filterColsEff = filterCols + (filterCols - 1) * (rateCols - 1)
val (outputRows, padTop, _) =
Utils.getOutputSize(inputRows, filterRowsEff, strideRows, padding)
val (outputCols, padLeft, _) =
Utils.getOutputSize(inputCols, filterColsEff, strideCols, padding)
filterBackprop.resizeAs(filter)
val inputData = input.storage().array()
val inputDataOffset = input.storageOffset() - 1
val filterData = filter.storage().array()
val filterDataOffset = filter.storageOffset() - 1
val outBackpropData = outBackprop.storage().array()
val outBackpropDataOffset = outBackprop.storageOffset() - 1
val filterBackpropData = filterBackprop.storage().array()
val filterBackpropDataOffset = filterBackprop.storageOffset() - 1
var b = 0
while (b < batch) {
var h_out = 0
while (h_out < outputRows) {
val h_beg = h_out * strideRows - padTop
var w_out = 0
while (w_out < outputCols) {
val w_beg = w_out * strideCols - padLeft
var d = 0
while (d < depth) {
var cur_val = Float.MinValue
var h_max = 0
var w_max = 0
var h = 0
while (h < filterRows) {
val h_in = h_beg + h * rateRows
if (h_in >= 0 && h_in < inputRows) {
var w = 0
while (w < filterCols) {
val w_in = w_beg + w * rateCols
if (w_in >= 0 && w_in < inputCols) {
val inputIndex = ((b * inputRows + h_in) * inputCols + w_in) * depth + d
val inputValue = inputData(inputDataOffset + inputIndex)
val filterIndex = (h * filterCols + w) * depth + d
val filterValue = filterData(filterDataOffset + filterIndex)
val value: Float = inputValue + filterValue
if (value > cur_val) {
cur_val = value
h_max = h
w_max = w
}
}
w += 1
}
}
h += 1
}
val filterBackPropIndex =
(h_max * filterCols + w_max) * depth + d
val outputBackPropIndex =
((b * outputRows + h_out) * outputCols + w_out) * depth + d
filterBackpropData(filterBackpropDataOffset + filterBackPropIndex) +=
outBackpropData(outBackpropDataOffset + outputBackPropIndex)
d += 1
}
w_out += 1
}
h_out += 1
}
b += 1
}
}
private def dilation2DBackpropFilterDouble(input: Tensor[Double],
filter: Tensor[Double],
outBackprop: Tensor[Double],
filterBackprop: Tensor[Double],
strideRows: Int, strideCols: Int,
rateRows: Int, rateCols: Int) = {
val batch = input.size(1)
val inputRows = input.size(2)
val inputCols = input.size(3)
val depth = input.size(4)
val filterRows = filter.size(1)
val filterCols = filter.size(2)
val filterRowsEff = filterRows + (filterRows - 1) * (rateRows - 1)
val filterColsEff = filterCols + (filterCols - 1) * (rateCols - 1)
val (outputRows, padTop, _) =
Utils.getOutputSize(inputRows, filterRowsEff, strideRows, padding)
val (outputCols, padLeft, _) =
Utils.getOutputSize(inputCols, filterColsEff, strideCols, padding)
filterBackprop.resizeAs(filter)
val inputData = input.storage().array()
val inputDataOffset = input.storageOffset() - 1
val filterData = filter.storage().array()
val filterDataOffset = filter.storageOffset() - 1
val outBackpropData = outBackprop.storage().array()
val outBackpropDataOffset = outBackprop.storageOffset() - 1
val filterBackpropData = filterBackprop.storage().array()
val filterBackpropDataOffset = filterBackprop.storageOffset() - 1
var b = 0
while (b < batch) {
var h_out = 0
while (h_out < outputRows) {
val h_beg = h_out * strideRows - padTop
var w_out = 0
while (w_out < outputCols) {
val w_beg = w_out * strideCols - padLeft
var d = 0
while (d < depth) {
var cur_val = Double.MinValue
var h_max = 0
var w_max = 0
var h = 0
while (h < filterRows) {
val h_in = h_beg + h * rateRows
if (h_in >= 0 && h_in < inputRows) {
var w = 0
while (w < filterCols) {
val w_in = w_beg + w * rateCols
if (w_in >= 0 && w_in < inputCols) {
val inputIndex = ((b * inputRows + h_in) * inputCols + w_in) * depth + d
val inputValue = inputData(inputDataOffset + inputIndex)
val filterIndex = (h * filterCols + w) * depth + d
val filterValue = filterData(filterDataOffset + filterIndex)
val value: Double = inputValue + filterValue
if (value > cur_val) {
cur_val = value
h_max = h
w_max = w
}
}
w += 1
}
}
h += 1
}
val filterBackPropIndex =
(h_max * filterCols + w_max) * depth + d
val outputBackPropIndex =
((b * outputRows + h_out) * outputCols + w_out) * depth + d
filterBackpropData(filterBackpropDataOffset + filterBackPropIndex) +=
outBackpropData(outBackpropDataOffset + outputBackPropIndex)
d += 1
}
w_out += 1
}
h_out += 1
}
b += 1
}
}
override def updateOutput(inputs: Table): Tensor[D] = {
val input = inputs[Tensor[D]](1)
val filter = inputs[Tensor[D]](2)
val outBackprop = inputs[Tensor[D]](3)
require(input.dim() == 4, "input must have 4 dims")
require(filter.dim() == 3, "filter must have 3 dims")
val strideRows = strides(1)
val strideCols = strides(2)
val rateRows = rates(1)
val rateCols = rates(2)
if (ev2.getType() == FloatType) {
val inputTensor = input.asInstanceOf[Tensor[Float]]
val filterTensor = filter.asInstanceOf[Tensor[Float]]
val outBackpropTensor = outBackprop.asInstanceOf[Tensor[Float]]
val outputTensor = output.asInstanceOf[Tensor[Float]]
dilation2DBackpropFilterFloat(inputTensor, filterTensor, outBackpropTensor, outputTensor,
strideRows, strideCols, rateRows, rateCols)
} else if (ev2.getType() == DoubleType) {
val inputTensor = input.asInstanceOf[Tensor[Double]]
val filterTensor = filter.asInstanceOf[Tensor[Double]]
val outBackpropTensor = output.asInstanceOf[Tensor[Double]]
val outputTensor = output.asInstanceOf[Tensor[Double]]
dilation2DBackpropFilterDouble(inputTensor, filterTensor, outBackpropTensor, outputTensor,
strideRows, strideCols, rateRows, rateCols)
} else {
throw new IllegalArgumentException(s"does not support datatype ${ev2.getType()}")
}
output
}
override def getClassTagNumerics() : (Array[ClassTag[_]], Array[TensorNumeric[_]]) = {
(Array(scala.reflect.classTag[T], scala.reflect.classTag[D]), Array(ev, ev2))
}
}
private[bigdl] object Dilation2DBackpropFilter {
def apply[T: ClassTag, D: ClassTag](strides: Array[Int], rates: Array[Int], padding: String)
(implicit ev: TensorNumeric[T], ev2: TensorNumeric[D]): Dilation2DBackpropFilter[T, D] =
new Dilation2DBackpropFilter(strides, rates, padding)
}
private[bigdl] class Dilation2DBackpropInput[T: ClassTag, D: ClassTag](strides: Array[Int],
rates: Array[Int],
padding: String)
(implicit ev: TensorNumeric[T], ev2: TensorNumeric[D])
extends Operation[Table, Tensor[D], T]{
output = Tensor[D]
private def dilationBackpropInputFloat(input: Tensor[Float],
filter: Tensor[Float],
outBackprop: Tensor[Float],
inputBackprop: Tensor[Float],
strideRows: Int, strideCols: Int,
rateRows: Int, rateCols: Int) = {
val batch = input.size(1)
val inputRows = input.size(2)
val inputCols = input.size(3)
val depth = input.size(4)
val filterRows = filter.size(1)
val filterCols = filter.size(2)
val filterRowsEff = filterRows + (filterRows - 1) * (rateRows - 1)
val filterColsEff = filterCols + (filterCols - 1) * (rateCols - 1)
val (outputRows, padTop, _) =
Utils.getOutputSize(inputRows, filterRowsEff, strideRows, padding)
val (outputCols, padLeft, _) =
Utils.getOutputSize(inputCols, filterColsEff, strideCols, padding)
inputBackprop.resizeAs(input)
val inputData = input.storage().array()
val inputDataOffset = input.storageOffset() - 1
val filterData = filter.storage().array()
val filterDataOffset = filter.storageOffset() - 1
val outBackpropData = outBackprop.storage().array()
val outBackpropDataOffset = outBackprop.storageOffset() - 1
val inputBackpropData = inputBackprop.storage().array()
val inputBackpropDataOffset = inputBackprop.storageOffset() - 1
var b = 0
while (b < batch) {
var h_out = 0
while (h_out < outputRows) {
val h_beg = h_out * strideRows - padTop
var w_out = 0
while (w_out < outputCols) {
val w_beg = w_out * strideCols - padLeft
var d = 0
while (d < depth) {
var cur_val = Float.MinValue
var h_in_max = if (h_beg < 0) 0 else h_beg
var w_in_max = if (w_beg < 0) 0 else w_beg
var h = 0
while (h < filterRows) {
val h_in = h_beg + h * rateRows
if (h_in >= 0 && h_in < inputRows) {
var w = 0
while (w < filterCols) {
val w_in = w_beg + w * rateCols
if (w_in >= 0 && w_in < inputCols) {
val inputIndex = ((b * inputRows + h_in) * inputCols + w_in) * depth + d
val inputValue = inputData(inputDataOffset + inputIndex)
val filterIndex = (h * filterCols + w) * depth + d
val filterValue = filterData(filterDataOffset + filterIndex)
val value: Float = inputValue + filterValue
if (value > cur_val) {
cur_val = value
h_in_max = h_in
w_in_max = w_in
}
}
w += 1
}
}
h += 1
}
val inputBackPropIndex =
((b * inputRows + h_in_max) * inputCols + w_in_max) * depth + d
val outputBackPropIndex =
((b * outputRows + h_out) * outputCols + w_out) * depth + d
inputBackpropData(inputBackpropDataOffset + inputBackPropIndex) +=
outBackpropData(outBackpropDataOffset + outputBackPropIndex)
d += 1
}
w_out += 1
}
h_out += 1
}
b += 1
}
}
private def dilationBackpropInputDouble(input: Tensor[Double],
filter: Tensor[Double],
outBackprop: Tensor[Double],
inputBackprop: Tensor[Double],
strideRows: Int, strideCols: Int,
rateRows: Int, rateCols: Int) = {
val batch = input.size(1)
val inputRows = input.size(2)
val inputCols = input.size(3)
val depth = input.size(4)
val filterRows = filter.size(1)
val filterCols = filter.size(2)
val outputRows = outBackprop.size(2)
val outputCols = outBackprop.size(3)
val (padTop, padLeft) = padding.toLowerCase() match {
case "same" =>
val top = (outputRows - inputRows) / 2
val left = (outputCols - inputCols) / 2
(top, left)
case "valid" =>
(0, 0)
}
inputBackprop.resizeAs(input)
val inputData = input.storage().array()
val inputDataOffset = input.storageOffset() - 1
val filterData = filter.storage().array()
val filterDataOffset = filter.storageOffset() - 1
val outBackpropData = outBackprop.storage().array()
val outBackpropDataOffset = outBackprop.storageOffset() - 1
val inputBackpropData = inputBackprop.storage().array()
val inputBackpropDataOffset = inputBackprop.storageOffset() - 1
var b = 0
while (b < batch) {
var h_out = 0
while (h_out < outputRows) {
val h_beg = h_out * strideRows - padTop
var w_out = 0
while (w_out < outputCols) {
val w_beg = w_out * strideCols - padLeft
var d = 0
while (d < depth) {
var cur_val = Double.MinValue
var h_in_max = if (h_beg < 0) 0 else h_beg
var w_in_max = if (w_beg < 0) 0 else w_beg
var h = 0
while (h < filterRows) {
val h_in = h_beg + h * rateRows
if (h_in >= 0 && h_in < inputRows) {
var w = 0
while (w < filterCols) {
val w_in = w_beg + w * rateCols
if (w_in >= 0 && w_in < inputCols) {
val inputIndex = ((b * inputRows + h_in) * inputCols + w_in) * depth + d
val inputValue = inputData(inputDataOffset + inputIndex)
val filterIndex = (h * filterCols + w) * depth + d
val filterValue = filterData(filterDataOffset + filterIndex)
val value: Double = inputValue + filterValue
if (value > cur_val) {
cur_val = value
h_in_max = h_in
w_in_max = w_in
}
}
w += 1
}
}
h += 1
}
val inputBackPropIndex =
((b * inputRows + h_in_max) * inputCols + w_in_max) * depth + d
val outputBackPropIndex =
((b * outputRows + h_out) * outputCols + w_out) * depth + d
inputBackpropData(inputBackpropDataOffset + inputBackPropIndex) +=
outBackpropData(outBackpropDataOffset + outputBackPropIndex)
d += 1
}
w_out += 1
}
h_out += 1
}
b += 1
}
}
override def updateOutput(inputs: Table): Tensor[D] = {
val input = inputs[Tensor[D]](1)
val filter = inputs[Tensor[D]](2)
val outBackprop = inputs[Tensor[D]](3)
require(input.dim() == 4, "input must have 4 dims")
require(filter.dim() == 3, "filter must have 3 dims")
val strideRows = strides(1)
val strideCols = strides(2)
val rateRows = rates(1)
val rateCols = rates(2)
if (ev2.getType() == FloatType) {
val inputTensor = input.asInstanceOf[Tensor[Float]]
val filterTensor = filter.asInstanceOf[Tensor[Float]]
val outBackpropTensor = outBackprop.asInstanceOf[Tensor[Float]]
val outputTensor = output.asInstanceOf[Tensor[Float]]
dilationBackpropInputFloat(inputTensor, filterTensor, outBackpropTensor, outputTensor,
strideRows, strideCols, rateRows, rateCols)
} else if (ev2.getType() == DoubleType) {
val inputTensor = input.asInstanceOf[Tensor[Double]]
val filterTensor = filter.asInstanceOf[Tensor[Double]]
val outBackpropTensor = output.asInstanceOf[Tensor[Double]]
val outputTensor = output.asInstanceOf[Tensor[Double]]
dilationBackpropInputDouble(inputTensor, filterTensor, outBackpropTensor, outputTensor,
strideRows, strideCols, rateRows, rateCols)
} else {
throw new IllegalArgumentException(s"does not support datatype ${ev2.getType()}")
}
output
}
override def getClassTagNumerics() : (Array[ClassTag[_]], Array[TensorNumeric[_]]) = {
(Array(scala.reflect.classTag[T], scala.reflect.classTag[D]), Array(ev, ev2))
}
}
private[bigdl] object Dilation2DBackpropInput {
def apply[T: ClassTag, D: ClassTag](strides: Array[Int], rates: Array[Int], padding: String)
(implicit ev: TensorNumeric[T], ev2: TensorNumeric[D]): Dilation2DBackpropInput[T, D] =
new Dilation2DBackpropInput(strides, rates, padding)
}
| yiheng/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/ops/Dilation2D.scala | Scala | apache-2.0 | 27,078 |
// Copyright 2014 Commonwealth Bank of Australia
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cba.omnia.edge
package source.psv
import com.twitter.scalding.{TypedPsv => STypedPsv, _}
import cascading.tuple.Fields
object TypedPsv {
@deprecated("Use TypedPsv in Scalding directly", "3.1.0")
def apply[T : Manifest : TupleConverter : TupleSetter](paths: Seq[String]): FixedPathTypedDelimited[T] = {
val f = Dsl.intFields(0 until implicitly[TupleConverter[T]].arity)
apply[T](paths, f)
}
@deprecated("Use TypedPsv in Scalding directly", "3.1.0")
def apply[T : Manifest : TupleConverter : TupleSetter](path: String): FixedPathTypedDelimited[T] =
apply[T](Seq(path))
@deprecated("Use TypedPsv in Scalding directly", "3.1.0")
def apply[T : Manifest : TupleConverter : TupleSetter](path: String, f: Fields): FixedPathTypedDelimited[T] =
apply[T](Seq(path), f)
@deprecated("Use TypedPsv in Scalding directly", "3.1.0")
def apply[T : Manifest : TupleConverter : TupleSetter](paths: Seq[String], f: Fields): FixedPathTypedDelimited[T] =
STypedPsv[T](paths, f)
}
| CommBank/edge | src/main/scala/com/cba/omnia/edge/source/psv/TypedPsv.scala | Scala | apache-2.0 | 1,639 |
package io.github.andrebeat.bytes.benchmark
import com.esotericsoftware.kryo.Kryo
import com.esotericsoftware.kryo.io.{Input, Output}
import com.google.caliper.{Param, SimpleBenchmark}
import io.github.andrebeat.bytes._
import java.io._
import org.objenesis.strategy.StdInstantiatorStrategy
import scala.util.Random
class WriteBenchmark extends SimpleBenchmark {
val foo = Foo(1, 42, '?', 13231, 9023.0f, 55554434L, 321.0)
val bytes: Bytes = HeapBytes(1024)
// Java Serialization
var javaBaos = new ByteArrayOutputStream(100)
var javaOos = new ObjectOutputStream(javaBaos)
// Kryo Serialization
val kryo = new Kryo()
kryo.setRegistrationRequired(false)
kryo.setInstantiatorStrategy(new StdInstantiatorStrategy())
var kryoBaos = new ByteArrayOutputStream(100)
var kryoOos = new Output(kryoBaos)
def timeBytes(reps: Int) = {
var i = 0
while (i < reps) {
Write(bytes, 0, foo)
i += 1
}
}
def timeJava(reps: Int) = {
var i = 0
while (i < reps) {
javaBaos = new ByteArrayOutputStream(100)
javaOos = new ObjectOutputStream(javaBaos)
javaOos.writeObject(foo)
javaOos.close()
i += 1
}
}
def timeKryo(reps: Int) = {
var i = 0
while (i < reps) {
kryoBaos = new ByteArrayOutputStream(100)
kryoOos = new Output(kryoBaos)
kryo.writeObject(kryoOos, foo)
kryoOos.close()
i += 1
}
}
}
| andrebeat/bytes | benchmark/src/main/scala/io/github/andrebeat/bytes/benchmark/WriteBenchmark.scala | Scala | mit | 1,419 |
package com.monovore.coast.samza.safe
import com.monovore.coast.samza.Path
import com.monovore.coast.samza.safe.Checkpoint._
import com.monovore.coast.wire.{Protocol, Serializer}
case class Checkpoint(
inputStreams: Map[(String, Int), InputState],
mergeOffset: Long,
outputStreams: Map[Path, Long]
)
object Checkpoint {
case class InputState(offset: Long, qualifiers: Map[Seq[Byte], Long])
object InputState {
val default = InputState(0L, Map.empty)
}
// FIXME: real serialization format for this. (JSON?)
val format: Serializer[Checkpoint] = Serializer.fromJavaSerialization[Checkpoint]
val keyFormat: Serializer[Unit] = Protocol.simple.unit
} | bkirwi/coast | samza/src/main/scala/com/monovore/coast/samza/safe/Checkpoint.scala | Scala | apache-2.0 | 673 |
package controllers.school
import controllers.helpers.CRUDController
import models.school
class Topic extends CRUDController[school.Topics, school.Topic] {
/**
* @inheritdoc
*/
def resourceCollection =
school.Topics
}
| Xanho/xanho-api | app/controllers/school/Topic.scala | Scala | apache-2.0 | 238 |
package de.htwg.zeta.parser
import scala.util.parsing.combinator.Parsers
trait UniteParsers extends Parsers {
override type Elem = Char
def include[T, UP <: UniteParsers](other: UP#Parser[T]) = new UniteParser[T, UP](other)
protected class UniteParser[T, UP <: UniteParsers] private[UniteParsers](other: UP#Parser[T]) extends Parser[T] {
override def apply(in: Input): ParseResult[T] = {
val result: UP#ParseResult[T] = other(in)
result match {
case e: UP#Error => Error(e.msg, e.next)
case f: UP#Failure => Failure(f.msg, f.next)
case s: UP#Success[T] => Success(s.get, s.next)
}
}
}
}
| Zeta-Project/zeta | api/parser/src/main/scala/de/htwg/zeta/parser/UniteParsers.scala | Scala | bsd-2-clause | 650 |
package org.decaf.httplist
sealed trait RequestPartial
sealed trait HttpVerb extends RequestPartial
case object Get extends HttpVerb
case object Put extends HttpVerb
case object Post extends HttpVerb
case object Delete extends HttpVerb
case class Url(url: String) extends HttpVerb
object Url {
def empty = Url("")
}
final case class Header(key: String, value: String) extends RequestPartial
final case class PostParam(key: String, value: String) extends RequestPartial
final case class QueryParam(key: String, value: String) extends RequestPartial
case class HttpRequest(
verb: HttpVerb,
url: Url = Url.empty,
headers: Set[Header] = Set.empty,
queryParams: Set[QueryParam] = Set.empty,
postParams: Set[PostParam] = Set.empty
) {
def add(verb: HttpVerb = this.verb,
url: Url = this.url,
headers: Set[Header] = this.headers,
queryParams: Set[QueryParam] = this.queryParams,
postParams: Set[PostParam] = this.postParams) =
this.copy(
verb = verb,
url = url,
headers = headers,
queryParams = queryParams,
postParams = postParams
)
}
| adamdecaf/httplist | src/main/scala/http.scala | Scala | apache-2.0 | 1,126 |
/**
* Copyright (C) 2017 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.builder
import org.orbeon.oxf.util.StringUtils._
import org.orbeon.xforms._
import org.scalajs.jquery.JQueryEventObject
import scala.scalajs.js.Dynamic.{global ⇒ g}
import scala.scalajs.js.timers._
object DialogItemset {
locally {
val LabelInputSelector = ".xforms-dialog.fb-dialog-itemsets .fb-itemset-label-input"
def suggestValueFromLabel(label: String): Option[String] =
label.trimAllToOpt map (_.replaceAll("""\\s+""", "-").toLowerCase)
// Automatically set a corresponding value when the user changes a label
$(g.window.document).on(
"change.orbeon",
LabelInputSelector,
(event: JQueryEventObject) ⇒ {
val labelXFormsInput = $(event.target).closest(".fb-itemset-label-input")(0)
val valueXFormsInput = $(labelXFormsInput).closest(".fr-grid-tr").find(".fb-itemset-value-input")(0)
if (DocumentAPI.getValue(valueXFormsInput).toOption exists (_.isBlank)) {
// If user pressed tab, after `change` on the label input, there is a `focus` on the value input,
// which stores the value as a server value, so if we set the value before the `focus`, the value
// isn't sent, hence the deferring.
DocumentAPI.getValue(labelXFormsInput).toOption flatMap suggestValueFromLabel foreach { suggestedValue ⇒
setTimeout(1) {
DocumentAPI.setValue(valueXFormsInput, suggestedValue)
}
}
}
}
)
}
}
| brunobuzzi/orbeon-forms | form-builder/js/src/main/scala/org/orbeon/builder/DialogItemset.scala | Scala | lgpl-2.1 | 2,148 |
/*
* Copyright 2014 Databricks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.databricks.spark.avro
import java.io.{IOException, File}
import java.nio.ByteBuffer
import java.util
import scala.collection.immutable.HashSet
import scala.collection.mutable.ArrayBuffer
import scala.util.Random
import com.google.common.io.Files
import org.apache.spark.sql.SQLContext
private[avro] object TestUtils {
/**
* This function checks that all records in a file match the original
* record.
*/
def checkReloadMatchesSaved(sqlContext: SQLContext, testFile: String, avroDir: String) = {
def convertToString(elem: Any): String = {
elem match {
case null => "NULL" // HashSets can't have null in them, so we use a string instead
case arrayBuf: ArrayBuffer[Any] => arrayBuf.toArray.deep.mkString(" ")
case arrayByte: Array[Byte] => arrayByte.deep.mkString(" ")
case other => other.toString
}
}
val originalEntries = sqlContext.read.avro(testFile).collect()
val newEntries = sqlContext.read.avro(avroDir).collect()
assert(originalEntries.length == newEntries.length)
val origEntrySet = Array.fill(originalEntries(0).size)(new HashSet[Any]())
for (origEntry <- originalEntries) {
var idx = 0
for (origElement <- origEntry.toSeq) {
origEntrySet(idx) += convertToString(origElement)
idx += 1
}
}
for (newEntry <- newEntries) {
var idx = 0
for (newElement <- newEntry.toSeq) {
assert(origEntrySet(idx).contains(convertToString(newElement)))
idx += 1
}
}
}
def withTempDir(f: File => Unit): Unit = {
val dir = Files.createTempDir()
dir.delete()
try f(dir) finally deleteRecursively(dir)
}
/**
* This function deletes a file or a directory with everything that's in it. This function is
* copied from Spark with minor modifications made to it. See original source at:
* github.com/apache/spark/blob/master/core/src/main/scala/org/apache/spark/util/Utils.scala
*/
def deleteRecursively(file: File) {
def listFilesSafely(file: File): Seq[File] = {
if (file.exists()) {
val files = file.listFiles()
if (files == null) {
throw new IOException("Failed to list files for dir: " + file)
}
files
} else {
List()
}
}
if (file != null) {
try {
if (file.isDirectory) {
var savedIOException: IOException = null
for (child <- listFilesSafely(file)) {
try {
deleteRecursively(child)
} catch {
// In case of multiple exceptions, only last one will be thrown
case ioe: IOException => savedIOException = ioe
}
}
if (savedIOException != null) {
throw savedIOException
}
}
} finally {
if (!file.delete()) {
// Delete can also fail if the file simply did not exist
if (file.exists()) {
throw new IOException("Failed to delete: " + file.getAbsolutePath)
}
}
}
}
}
/**
* This function generates a random map(string, int) of a given size.
*/
private[avro] def generateRandomMap(rand: Random, size: Int): java.util.Map[String, Int] = {
val jMap = new util.HashMap[String, Int]()
for (i <- 0 until size) {
jMap.put(rand.nextString(5), i)
}
jMap
}
/**
* This function generates a random array of booleans of a given size.
*/
private[avro] def generateRandomArray(rand: Random, size: Int): util.ArrayList[Boolean] = {
val vec = new util.ArrayList[Boolean]()
for (i <- 0 until size) {
vec.add(rand.nextBoolean())
}
vec
}
/**
* This function generates a random ByteBuffer of a given size.
*/
private[avro] def generateRandomByteBuffer(rand: Random, size: Int): ByteBuffer = {
val bb = ByteBuffer.allocate(size)
val arrayOfBytes = new Array[Byte](size)
rand.nextBytes(arrayOfBytes)
bb.put(arrayOfBytes)
}
}
| fvlankvelt/spark-avro | src/test/scala/com/databricks/spark/avro/TestUtils.scala | Scala | apache-2.0 | 4,612 |
package eu.brosbit.opos.model
import _root_.net.liftweb.mongodb._
import org.bson.types.ObjectId
import _root_.net.liftweb.json.JsonDSL._
object ThemesPlan extends MongoDocumentMeta[ThemesPlan] {
override def collectionName = "themesplan"
override def formats = super.formats + new ObjectIdSerializer + new DateSerializer
def create = ThemesPlan(ObjectId.get, Nil, "", "", "", 0L)
}
case class ThemesPlan(var _id: ObjectId, var classes: List[String],
var subjectStr: String, var urlLink: String,
var teacherName: String, var teacherId: Long)
extends MongoDocument[ThemesPlan] {
def meta = ThemesPlan
def isValid = this.urlLink.length > 20
} | mikolajs/osp | src/main/scala/eu/brosbit/opos/model/ThemesPlan.scala | Scala | agpl-3.0 | 702 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.component.metrics.messages
import com.webtrends.harness.component.metrics.metrictype.Metric
trait MetricMessage {
def metric: Metric
}
case class RemoveMetric(val metric: Metric) extends MetricMessage
| Webtrends/wookiee-metrics | src/main/scala/com/webtrends/harness/component/metrics/messages/MetricMessage.scala | Scala | apache-2.0 | 975 |
/*
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package com.typesafe.coffeescript
import com.typesafe.jse.{Rhino, CommonNode, Node, Engine}
import com.typesafe.jse.Engine.{ExecuteJs, JsExecutionResult}
import java.io.{InputStream, File}
import java.util.concurrent.TimeUnit
import _root_.sbt.IO
import scala.collection.immutable
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration._
import spray.json._
final case class CompileArgs(
coffeeScriptInputFile: File,
javaScriptOutputFile: File,
sourceMapOpts: Option[SourceMapOptions],
bare: Boolean,
literate: Boolean
)
/**
* @param sourceMapOutputFile The file to write the source map to.
* @param sourceMapRef A reference to .
* @param javaScriptURL The URL of the source CoffeeScript files when served; can be absolute or relative to the map file.
*/
final case class SourceMapOptions(
sourceMapOutputFile: File,
sourceMapRef: String,
javaScriptFileName: String,
coffeeScriptRootRef: String,
coffeeScriptPathRefs: List[String]
)
sealed trait CompileResult
final case object CompileSuccess extends CompileResult
final case class GenericError(message: String) extends CompileResult
final case class CodeError(
message: String,
lineContent: String,
lineNumber: Int,
lineOffset: Int
) extends CompileResult
final case class CoffeeScriptCompilerException(message: String) extends Exception(message)
object CoffeeScriptCompiler {
def shellContent[A](f: InputStream => A): A = {
val resource = "com/typesafe/coffeescript/driver.js"
val classLoader = CoffeeScriptCompiler.getClass.getClassLoader
val is = classLoader.getResourceAsStream(resource)
try f(is) finally is.close()
}
def withShellFileCopiedTo(file: File): CoffeeScriptCompiler = {
shellContent(is => IO.transfer(is, file))
new CoffeeScriptCompiler(file)
}
object JsonConversion {
import DefaultJsonProtocol._
def toJsonSeq(args: Seq[CompileArgs]): JsArray = {
JsArray(args.map(toJson).to[List])
}
def toJson(args: CompileArgs): JsObject = {
import args._
JsObject(
"coffeeScriptInputFile" -> JsString(coffeeScriptInputFile.getPath),
"javaScriptOutputFile" -> JsString(javaScriptOutputFile.getPath),
"sourceMapOpts" -> sourceMapOpts.fold[JsValue](JsNull)(toJson(_: SourceMapOptions)),
"bare" -> JsBoolean(bare),
"literate" -> JsBoolean(literate)
)
}
def toJson(opts: SourceMapOptions): JsObject = {
import opts._
JsObject(
"sourceMapOutputFile" -> JsString(sourceMapOutputFile.getPath),
"sourceMapRef" -> JsString(sourceMapRef),
"javaScriptFileName" -> JsString(javaScriptFileName),
"coffeeScriptRootRef" -> JsString(coffeeScriptRootRef),
"coffeeScriptPathRefs" -> JsArray(coffeeScriptPathRefs.map(JsString.apply))
)
}
def fromJson(json: JsObject): CompileResult = {
json.fields("result").asInstanceOf[JsString].value match {
case "CompileSuccess" =>
CompileSuccess
case "CodeError" =>
val message = json.fields("message").asInstanceOf[JsString].value
val lineCode = json.fields("lineContent").asInstanceOf[JsString].value
val lineNumber = json.fields("lineNumber").asInstanceOf[JsNumber].value.intValue
val lineOffset = json.fields("lineOffset").asInstanceOf[JsNumber].value.intValue
CodeError(message, lineCode, lineNumber, lineOffset)
case "GenericError" =>
GenericError(json.fields("message").asInstanceOf[JsString].value)
case _ =>
throw CoffeeScriptCompilerException(s"Unknown JSON result running CoffeeScript driver: $json")
}
}
def fromJsonSeq(json: JsArray): Seq[CompileResult] = {
json.elements.map(v => fromJson(v.asInstanceOf[JsObject]))
}
}
}
class CoffeeScriptCompiler(shellFile: File) {
def compileFile(jsExecutor: JsExecutor, compileArgs: CompileArgs)(implicit ec: ExecutionContext): Future[CompileResult] = {
compileBatch(jsExecutor, Seq(compileArgs)).map { resultsSeq =>
assert(resultsSeq.length == 1)
val result = resultsSeq(0)
result
}
}
def compileBatch(jsExecutor: JsExecutor, compileArgs: Seq[CompileArgs])(implicit ec: ExecutionContext): Future[Seq[CompileResult]] = {
import CoffeeScriptCompiler.JsonConversion
val arg = JsonConversion.toJsonSeq(compileArgs).compactPrint
jsExecutor.executeJs(shellFile, immutable.Seq(arg)).map {
case JsExecutionResult(0, stdoutBytes, stderrBytes) if stderrBytes.length == 0 =>
val jsonResult = (new String(stdoutBytes.toArray, "utf-8")).asJson.asInstanceOf[JsArray]
JsonConversion.fromJsonSeq(jsonResult)
case result =>
val exitValue = result.exitValue
val stdout = new String(result.output.toArray, "utf-8")
val stderr = new String(result.error.toArray, "utf-8")
throw CoffeeScriptCompilerException(s"Unexpected result running CoffeeScript driver: exit value: $exitValue, stdout: $stdout, stderr: $stderr")
}
}
} | richdougherty/sbt-coffeescript-plugin | src/main/scala/com/typesafe/coffeescript/CoffeeScriptCompiler.scala | Scala | apache-2.0 | 5,123 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.uuid
import java.util.UUID
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class TimeSortedUuidGeneratorTest extends Specification {
val time = 1435598908099L // System.currentTimeMillis()
"TimeSortedUuidGenerator" should {
"create uuids with correct formats" >> {
val id = TimeSortedUuidGenerator.createUuid(time).toString
id.substring(0, 18) mustEqual "000014e4-05ce-4ac3"
val uuid = UUID.fromString(id)
uuid.version() mustEqual 4
uuid.variant() mustEqual 2
}
"create uuids with time as the msb" >> {
val ids = Seq(time - 1, time, time + 1, time + 1000)
.map(TimeSortedUuidGenerator.createUuid).map(_.toString)
ids.sorted mustEqual ids
}
}
}
| locationtech/geomesa | geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/uuid/TimeSortedUuidGeneratorTest.scala | Scala | apache-2.0 | 1,319 |
package pl.newicom.dddd.process
import akka.actor.{ActorPath, ActorSystem}
import org.joda.time.DateTime.now
import org.joda.time.{DateTime, Period}
import pl.newicom.dddd.aggregate._
import pl.newicom.dddd.delivery.protocol.DeliveryHandler
import pl.newicom.dddd.messaging.{Message, MetaData, MetaDataPropagationPolicy}
import pl.newicom.dddd.messaging.command.CommandMessage
import pl.newicom.dddd.office.OfficeFactory._
import pl.newicom.dddd.office.{CommandHandlerResolver, Office, RemoteOfficeId}
import pl.newicom.dddd.scheduling.ScheduleEvent
trait SagaCollaboration {
this: SagaBase =>
protected def processCollaborators: List[RemoteOfficeId[_]]
protected def deliverMsg(target: ActorPath, msg: Message): Unit = {
deliver(target)(msg.withDeliveryId(_))
}
protected def deliverCommand(target: ActorPath, command: Command): Unit = {
deliverMsg(target, CommandMessage(command).withMetaData(
MetaDataPropagationPolicy.onCommandSentByPM(currentEventMsg.metadata, MetaData.empty))
)
}
protected def schedule(event: DomainEvent, deadline: DateTime, correlationId: EntityId = sagaId): Unit = {
val command = ScheduleEvent("global", officePath, deadline, event)
handlerOf(command) deliver {
CommandMessage(command)
.withCorrelationId(correlationId)
.withTag(officeId.id)
}
}
//
// DSL helpers
//
def ⟶[C <: Command](command: C): Unit =
handlerOf(command) deliver command
def ⟵(event: DomainEvent): ToBeScheduled = schedule(event)
implicit def deliveryHandler: DeliveryHandler = {
(ap: ActorPath, msg: Any) => msg match {
case c: Command => deliverCommand(ap, c)
case m: Message => deliverMsg(ap, m)
}
}.tupled
def schedule(event: DomainEvent) = new ToBeScheduled(event)
class ToBeScheduled(event: DomainEvent) {
def on(dateTime: DateTime): Unit = schedule(event, dateTime)
def at(dateTime: DateTime): Unit = on(dateTime)
def in(period: Period): Unit = on(now.plus(period))
def asap(): Unit = on(now)
}
//
// Private members
//
private implicit val as: ActorSystem = context.system
private val officeIdResolver = new CommandHandlerResolver(processCollaborators)
private def handlerOf(command: Command)(implicit as: ActorSystem): Office = office(officeIdResolver(command))
}
| AndreyLadniy/akka-ddd | akka-ddd-core/src/main/scala/pl/newicom/dddd/process/SagaCollaboration.scala | Scala | mit | 2,336 |
package domain
import play.api.libs.json.Json
case class User(id: String, username: String, full_name: String, profile_picture: String)
object User {
implicit val format = Json.format[User]
} | sofiaaacole/socialDashboard | app/domain/User.scala | Scala | mit | 195 |
package com.sksamuel.elastic4s.searches.queries
import com.sksamuel.elastic4s.DefinitionAttributes.{DefinitionAttributeBoost, DefinitionAttributeRewrite}
import com.sksamuel.elastic4s.searches.QueryDefinition
import org.elasticsearch.index.query.QueryBuilders
case class WildcardQueryDefinition(field: String, query: Any)
extends QueryDefinition with MultiTermQueryDefinition
with DefinitionAttributeRewrite
with DefinitionAttributeBoost {
val builder = QueryBuilders.wildcardQuery(field, query.toString)
val _builder = builder
def queryName(queryName: String): this.type = {
builder.queryName(queryName)
this
}
}
| ulric260/elastic4s | elastic4s-core/src/main/scala/com/sksamuel/elastic4s/searches/queries/WildcardQueryDefinition.scala | Scala | apache-2.0 | 644 |
package org.liquidizer.model
import _root_.net.liftweb.mapper._
import _root_.net.liftweb.util._
import _root_.net.liftweb.common._
import org.liquidizer.lib.ssl._
/** Representation of a user of the Liquidizer system */
object User extends User
with LongKeyedMetaMapper[User]
with MetaMegaProtoUser[User] {
override def dbTableName = "users"
override def signupFields = nick :: email :: password :: Nil
override def fieldOrder = List(id, email, nick, profile, password, validated)
// comment this line out to require email validations
override def skipEmailValidation = true
override def logUserIn(user: User) {
for {
val certs <- SSLClient.valid_certificates
val cert <-
certs if (Certificate.find(By(Certificate.id, SSLClient.certificate_id(cert))).isEmpty)
} Certificate.create.id(SSLClient.certificate_id(cert)).owner(user).save
super.logUserIn(user)
}
}
/** Representation of a user of the Liquidizer system */
class User extends LongKeyedMapper[User] with MegaProtoUser[User] {
def getSingleton = User
object nick extends MappedString(this,32)
object profile extends MappedText(this)
def getUserByNick(nick : String) : Option[User] = {
User.find(By(User.nick, nick))
}
override def toString() : String = nick.is
}
| liquidizer/liquidizer | src/main/scala/org/liquidizer/model/User.scala | Scala | mit | 1,376 |
Subsets and Splits