code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/**
* Copyright (c) 2002-2014, OnPoint Digital, Inc. All rights reserved
*
* THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
* @author Alex Westphal 29/May/2014
* @version 29/May/2014
*/
package timez.syntax.time
import java.time.YearMonth
import java.time.temporal.TemporalField
trait YearMonthOps extends Ops[YearMonth] {
def &(dayOfMonth: Int) = self.atDay(dayOfMonth)
def ~(dayOfMonth: Int) = self.atDay(dayOfMonth)
def apply(field: TemporalField) = self.get(field)
def month = self.getMonth
def year = self.getYear
}
trait YearMonthSyntax {
implicit def ToYearMonthOps(ym: YearMonth) = new YearMonthOps {
override def self: YearMonth = ym
}
}
| phantomspectre/timez | src/main/scala/timez/syntax/time/YearMonthSyntax.scala | Scala | bsd-3-clause | 1,113 |
/*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.example.loadmodel
import java.nio.file.Paths
import com.intel.analytics.bigdl.models.inception.Inception_v1_NoAuxClassifier
import com.intel.analytics.bigdl.nn.Module
import com.intel.analytics.bigdl.optim.{Top1Accuracy, Top5Accuracy, Validator}
import com.intel.analytics.bigdl.utils.Engine
import org.apache.log4j.Logger
import org.apache.spark.SparkContext
import scopt.OptionParser
import scala.language.existentials
/**
* ModelValidator provides an integrated example to load models,
* and test over imagenet validation dataset
* (running as a local Java program, or a standard Spark program).
*/
object ModelValidator {
val logger = Logger.getLogger(getClass)
sealed trait ModelType
case object TorchModel extends ModelType
case object CaffeModel extends ModelType
case object BigDlModel extends ModelType
case class TestLocalParams(
folder: String = "./",
modelType: ModelType = null,
modelName: String = "",
caffeDefPath: Option[String] = None,
modelPath: String = "",
batchSize: Int = 32,
meanFile: Option[String] = None,
coreNumber: Int = Runtime.getRuntime().availableProcessors() / 2,
nodeNumber: Int = -1,
env: String = "local"
)
val testLocalParser = new OptionParser[TestLocalParams]("BigDL Image Classifier Example") {
head("BigDL Image Classifier Example")
opt[String]('f', "folder")
.text("where you put your local image files")
.action((x, c) => c.copy(folder = x))
opt[String]('m', "modelName")
.text("the model name you want to test")
.required()
.action((x, c) => c.copy(modelName = x.toLowerCase()))
opt[String]('t', "modelType")
.text("torch, caffe or bigdl")
.required()
.action((x, c) =>
x.toLowerCase() match {
case "torch" => c.copy(modelType = TorchModel)
case "caffe" => c.copy(modelType = CaffeModel)
case "bigdl" => c.copy(modelType = BigDlModel)
case _ =>
throw new IllegalArgumentException("only torch, caffe or bigdl supported")
}
)
opt[String]("caffeDefPath")
.text("caffe define path")
.action((x, c) => c.copy(caffeDefPath = Some(x)))
opt[String]("modelPath")
.text("model path")
.action((x, c) => c.copy(modelPath = x))
opt[Int]('b', "batchSize")
.text("batch size")
.action((x, c) => c.copy(batchSize = x))
opt[String]("meanFile")
.text("mean file")
.action((x, c) => c.copy(meanFile = Some(x)))
opt[Int]('c', "core")
.text("cores number to test the model")
.action((x, c) => c.copy(coreNumber = x))
opt[Int]('n', "node")
.text("node number to test the model")
.action((x, c) => c.copy(nodeNumber = x))
opt[String]("env")
.text("execution environment")
.validate(x => {
if (Set("local", "spark").contains(x.toLowerCase)) {
success
} else {
failure("env only support local|spark")
}
})
.action((x, c) => c.copy(env = x.toLowerCase()))
.required()
}
def main(args: Array[String]): Unit = {
testLocalParser.parse(args, TestLocalParams()).foreach(param => {
Engine.setCoreNumber(param.coreNumber)
val sc = Engine.init(param.nodeNumber, param.coreNumber, param.env == "spark")
.map(conf => {
conf.setAppName("BigDL Image Classifier Example")
.set("spark.akka.frameSize", 64.toString)
new SparkContext(conf)
})
val valPath = Paths.get(param.folder, "val")
val (model, validateDataSet) = param.modelType match {
case CaffeModel =>
param.modelName match {
case "alexnet" =>
(Module.loadCaffe[Float](AlexNet(1000),
param.caffeDefPath.get, param.modelPath),
AlexNetPreprocessor(valPath, param.batchSize, param.meanFile.get, sc))
case "inception" =>
(Module.loadCaffe[Float](Inception_v1_NoAuxClassifier(1000),
param.caffeDefPath.get, param.modelPath),
InceptionPreprocessor(valPath, param.batchSize, sc))
}
case TorchModel =>
param.modelName match {
case "resnet" =>
(Module.loadTorch[Float](param.modelPath),
ResNetPreprocessor(valPath, param.batchSize, sc))
}
case _ => throw new IllegalArgumentException(s"${ param.modelType } is not" +
s"supported in this example, please use alexnet/inception/resnet")
}
println(model)
val validator = Validator(model, validateDataSet)
val evaluator = Array(new Top1Accuracy[Float](), new Top5Accuracy[Float]())
val result = validator.test(evaluator)
result.foreach(r => {
logger.info(s"${ r._2 } is ${ r._1 }")
})
})
}
}
| SeaOfOcean/BigDL | dl/src/main/scala/com/intel/analytics/bigdl/example/loadmodel/ModelValidator.scala | Scala | apache-2.0 | 5,674 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler
import java.nio.ByteBuffer
import java.util.{Locale, Timer, TimerTask}
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicLong
import scala.collection.Set
import scala.collection.mutable.{ArrayBuffer, HashMap, HashSet}
import scala.util.Random
import org.apache.spark._
import org.apache.spark.TaskState.TaskState
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config
import org.apache.spark.scheduler.SchedulingMode.SchedulingMode
import org.apache.spark.scheduler.TaskLocality.TaskLocality
import org.apache.spark.storage.BlockManagerId
import org.apache.spark.util.{AccumulatorV2, ThreadUtils, Utils}
/**
* Schedules tasks for multiple types of clusters by acting through a SchedulerBackend.
* It can also work with a local setup by using a `LocalSchedulerBackend` and setting
* isLocal to true. It handles common logic, like determining a scheduling order across jobs, waking
* up to launch speculative tasks, etc.
*
* Clients should first call initialize() and start(), then submit task sets through the
* runTasks method.
*
* THREADING: [[SchedulerBackend]]s and task-submitting clients can call this class from multiple
* threads, so it needs locks in public API methods to maintain its state. In addition, some
* [[SchedulerBackend]]s synchronize on themselves when they want to send events here, and then
* acquire a lock on us, so we need to make sure that we don't try to lock the backend while
* we are holding a lock on ourselves.
*/
private[spark] class TaskSchedulerImpl(
val sc: SparkContext,
val maxTaskFailures: Int,
isLocal: Boolean = false)
extends TaskScheduler with Logging {
import TaskSchedulerImpl._
def this(sc: SparkContext) = {
this(sc, sc.conf.get(config.MAX_TASK_FAILURES))
}
// Lazily initializing blackListTrackOpt to avoid getting empty ExecutorAllocationClient,
// because ExecutorAllocationClient is created after this TaskSchedulerImpl.
private[scheduler] lazy val blacklistTrackerOpt = maybeCreateBlacklistTracker(sc)
val conf = sc.conf
// How often to check for speculative tasks
val SPECULATION_INTERVAL_MS = conf.getTimeAsMs("spark.speculation.interval", "100ms")
// Duplicate copies of a task will only be launched if the original copy has been running for
// at least this amount of time. This is to avoid the overhead of launching speculative copies
// of tasks that are very short.
val MIN_TIME_TO_SPECULATION = 100
private val speculationScheduler =
ThreadUtils.newDaemonSingleThreadScheduledExecutor("task-scheduler-speculation")
// Threshold above which we warn user initial TaskSet may be starved
val STARVATION_TIMEOUT_MS = conf.getTimeAsMs("spark.starvation.timeout", "15s")
// CPUs to request per task
val CPUS_PER_TASK = conf.getInt("spark.task.cpus", 1)
// TaskSetManagers are not thread safe, so any access to one should be synchronized
// on this class.
private val taskSetsByStageIdAndAttempt = new HashMap[Int, HashMap[Int, TaskSetManager]]
// Protected by `this`
private[scheduler] val taskIdToTaskSetManager = new HashMap[Long, TaskSetManager]
val taskIdToExecutorId = new HashMap[Long, String]
@volatile private var hasReceivedTask = false
@volatile private var hasLaunchedTask = false
private val starvationTimer = new Timer(true)
// Incrementing task IDs
val nextTaskId = new AtomicLong(0)
// IDs of the tasks running on each executor
private val executorIdToRunningTaskIds = new HashMap[String, HashSet[Long]]
def runningTasksByExecutors: Map[String, Int] = synchronized {
executorIdToRunningTaskIds.toMap.mapValues(_.size)
}
// The set of executors we have on each host; this is used to compute hostsAlive, which
// in turn is used to decide when we can attain data locality on a given host
protected val hostToExecutors = new HashMap[String, HashSet[String]]
protected val hostsByRack = new HashMap[String, HashSet[String]]
protected val executorIdToHost = new HashMap[String, String]
// Listener object to pass upcalls into
var dagScheduler: DAGScheduler = null
var backend: SchedulerBackend = null
val mapOutputTracker = SparkEnv.get.mapOutputTracker.asInstanceOf[MapOutputTrackerMaster]
private var schedulableBuilder: SchedulableBuilder = null
// default scheduler is FIFO
private val schedulingModeConf = conf.get(SCHEDULER_MODE_PROPERTY, SchedulingMode.FIFO.toString)
val schedulingMode: SchedulingMode =
try {
SchedulingMode.withName(schedulingModeConf.toUpperCase(Locale.ROOT))
} catch {
case e: java.util.NoSuchElementException =>
throw new SparkException(s"Unrecognized $SCHEDULER_MODE_PROPERTY: $schedulingModeConf")
}
val rootPool: Pool = new Pool("", schedulingMode, 0, 0)
// This is a var so that we can reset it for testing purposes.
private[spark] var taskResultGetter = new TaskResultGetter(sc.env, this)
override def setDAGScheduler(dagScheduler: DAGScheduler) {
this.dagScheduler = dagScheduler
}
def initialize(backend: SchedulerBackend) {
this.backend = backend
schedulableBuilder = {
schedulingMode match {
case SchedulingMode.FIFO =>
new FIFOSchedulableBuilder(rootPool)
case SchedulingMode.FAIR =>
new FairSchedulableBuilder(rootPool, conf)
case _ =>
throw new IllegalArgumentException(s"Unsupported $SCHEDULER_MODE_PROPERTY: " +
s"$schedulingMode")
}
}
schedulableBuilder.buildPools()
}
def newTaskId(): Long = nextTaskId.getAndIncrement()
override def start() {
backend.start()
if (!isLocal && conf.getBoolean("spark.speculation", false)) {
logInfo("Starting speculative execution thread")
speculationScheduler.scheduleWithFixedDelay(new Runnable {
override def run(): Unit = Utils.tryOrStopSparkContext(sc) {
checkSpeculatableTasks()
}
}, SPECULATION_INTERVAL_MS, SPECULATION_INTERVAL_MS, TimeUnit.MILLISECONDS)
}
}
override def postStartHook() {
waitBackendReady()
}
override def submitTasks(taskSet: TaskSet) {
val tasks = taskSet.tasks
logInfo("Adding task set " + taskSet.id + " with " + tasks.length + " tasks")
this.synchronized {
val manager = createTaskSetManager(taskSet, maxTaskFailures)
val stage = taskSet.stageId
val stageTaskSets =
taskSetsByStageIdAndAttempt.getOrElseUpdate(stage, new HashMap[Int, TaskSetManager])
stageTaskSets(taskSet.stageAttemptId) = manager
val conflictingTaskSet = stageTaskSets.exists { case (_, ts) =>
ts.taskSet != taskSet && !ts.isZombie
}
if (conflictingTaskSet) {
throw new IllegalStateException(s"more than one active taskSet for stage $stage:" +
s" ${stageTaskSets.toSeq.map{_._2.taskSet.id}.mkString(",")}")
}
schedulableBuilder.addTaskSetManager(manager, manager.taskSet.properties)
if (!isLocal && !hasReceivedTask) {
starvationTimer.scheduleAtFixedRate(new TimerTask() {
override def run() {
if (!hasLaunchedTask) {
logWarning("Initial job has not accepted any resources; " +
"check your cluster UI to ensure that workers are registered " +
"and have sufficient resources")
} else {
this.cancel()
}
}
}, STARVATION_TIMEOUT_MS, STARVATION_TIMEOUT_MS)
}
hasReceivedTask = true
}
backend.reviveOffers()
}
// Label as private[scheduler] to allow tests to swap in different task set managers if necessary
private[scheduler] def createTaskSetManager(
taskSet: TaskSet,
maxTaskFailures: Int): TaskSetManager = {
new TaskSetManager(this, taskSet, maxTaskFailures, blacklistTrackerOpt)
}
override def cancelTasks(stageId: Int, interruptThread: Boolean): Unit = synchronized {
logInfo("Cancelling stage " + stageId)
taskSetsByStageIdAndAttempt.get(stageId).foreach { attempts =>
attempts.foreach { case (_, tsm) =>
// There are two possible cases here:
// 1. The task set manager has been created and some tasks have been scheduled.
// In this case, send a kill signal to the executors to kill the task and then abort
// the stage.
// 2. The task set manager has been created but no tasks has been scheduled. In this case,
// simply abort the stage.
tsm.runningTasksSet.foreach { tid =>
taskIdToExecutorId.get(tid).foreach(execId =>
backend.killTask(tid, execId, interruptThread, reason = "Stage cancelled"))
}
tsm.abort("Stage %s cancelled".format(stageId))
logInfo("Stage %d was cancelled".format(stageId))
}
}
}
override def killTaskAttempt(taskId: Long, interruptThread: Boolean, reason: String): Boolean = {
logInfo(s"Killing task $taskId: $reason")
val execId = taskIdToExecutorId.get(taskId)
if (execId.isDefined) {
backend.killTask(taskId, execId.get, interruptThread, reason)
true
} else {
logWarning(s"Could not kill task $taskId because no task with that ID was found.")
false
}
}
/**
* Called to indicate that all task attempts (including speculated tasks) associated with the
* given TaskSetManager have completed, so state associated with the TaskSetManager should be
* cleaned up.
*/
def taskSetFinished(manager: TaskSetManager): Unit = synchronized {
taskSetsByStageIdAndAttempt.get(manager.taskSet.stageId).foreach { taskSetsForStage =>
taskSetsForStage -= manager.taskSet.stageAttemptId
if (taskSetsForStage.isEmpty) {
taskSetsByStageIdAndAttempt -= manager.taskSet.stageId
}
}
manager.parent.removeSchedulable(manager)
logInfo(s"Removed TaskSet ${manager.taskSet.id}, whose tasks have all completed, from pool" +
s" ${manager.parent.name}")
}
private def resourceOfferSingleTaskSet(
taskSet: TaskSetManager,
maxLocality: TaskLocality,
shuffledOffers: Seq[WorkerOffer],
availableCpus: Array[Int],
tasks: IndexedSeq[ArrayBuffer[TaskDescription]]) : Boolean = {
var launchedTask = false
// nodes and executors that are blacklisted for the entire application have already been
// filtered out by this point
for (i <- 0 until shuffledOffers.size) {
val execId = shuffledOffers(i).executorId
val host = shuffledOffers(i).host
if (availableCpus(i) >= CPUS_PER_TASK) {
try {
for (task <- taskSet.resourceOffer(execId, host, maxLocality)) {
tasks(i) += task
val tid = task.taskId
taskIdToTaskSetManager(tid) = taskSet
taskIdToExecutorId(tid) = execId
executorIdToRunningTaskIds(execId).add(tid)
availableCpus(i) -= CPUS_PER_TASK
assert(availableCpus(i) >= 0)
launchedTask = true
}
} catch {
case e: TaskNotSerializableException =>
logError(s"Resource offer failed, task set ${taskSet.name} was not serializable")
// Do not offer resources for this task, but don't throw an error to allow other
// task sets to be submitted.
return launchedTask
}
}
}
return launchedTask
}
/**
* Called by cluster manager to offer resources on slaves. We respond by asking our active task
* sets for tasks in order of priority. We fill each node with tasks in a round-robin manner so
* that tasks are balanced across the cluster.
*/
def resourceOffers(offers: IndexedSeq[WorkerOffer]): Seq[Seq[TaskDescription]] = synchronized {
// Mark each slave as alive and remember its hostname
// Also track if new executor is added
var newExecAvail = false
for (o <- offers) {
if (!hostToExecutors.contains(o.host)) {
hostToExecutors(o.host) = new HashSet[String]()
}
if (!executorIdToRunningTaskIds.contains(o.executorId)) {
hostToExecutors(o.host) += o.executorId
executorAdded(o.executorId, o.host)
executorIdToHost(o.executorId) = o.host
executorIdToRunningTaskIds(o.executorId) = HashSet[Long]()
newExecAvail = true
}
for (rack <- getRackForHost(o.host)) {
hostsByRack.getOrElseUpdate(rack, new HashSet[String]()) += o.host
}
}
// Before making any offers, remove any nodes from the blacklist whose blacklist has expired. Do
// this here to avoid a separate thread and added synchronization overhead, and also because
// updating the blacklist is only relevant when task offers are being made.
blacklistTrackerOpt.foreach(_.applyBlacklistTimeout())
val filteredOffers = blacklistTrackerOpt.map { blacklistTracker =>
offers.filter { offer =>
!blacklistTracker.isNodeBlacklisted(offer.host) &&
!blacklistTracker.isExecutorBlacklisted(offer.executorId)
}
}.getOrElse(offers)
val shuffledOffers = shuffleOffers(filteredOffers)
// Build a list of tasks to assign to each worker.
val tasks = shuffledOffers.map(o => new ArrayBuffer[TaskDescription](o.cores / CPUS_PER_TASK))
val availableCpus = shuffledOffers.map(o => o.cores).toArray
val sortedTaskSets = rootPool.getSortedTaskSetQueue
for (taskSet <- sortedTaskSets) {
logDebug("parentName: %s, name: %s, runningTasks: %s".format(
taskSet.parent.name, taskSet.name, taskSet.runningTasks))
if (newExecAvail) {
taskSet.executorAdded()
}
}
// Take each TaskSet in our scheduling order, and then offer it each node in increasing order
// of locality levels so that it gets a chance to launch local tasks on all of them.
// NOTE: the preferredLocality order: PROCESS_LOCAL, NODE_LOCAL, NO_PREF, RACK_LOCAL, ANY
for (taskSet <- sortedTaskSets) {
var launchedAnyTask = false
var launchedTaskAtCurrentMaxLocality = false
for (currentMaxLocality <- taskSet.myLocalityLevels) {
do {
launchedTaskAtCurrentMaxLocality = resourceOfferSingleTaskSet(
taskSet, currentMaxLocality, shuffledOffers, availableCpus, tasks)
launchedAnyTask |= launchedTaskAtCurrentMaxLocality
} while (launchedTaskAtCurrentMaxLocality)
}
if (!launchedAnyTask) {
taskSet.abortIfCompletelyBlacklisted(hostToExecutors)
}
}
if (tasks.size > 0) {
hasLaunchedTask = true
}
return tasks
}
/**
* Shuffle offers around to avoid always placing tasks on the same workers. Exposed to allow
* overriding in tests, so it can be deterministic.
*/
protected def shuffleOffers(offers: IndexedSeq[WorkerOffer]): IndexedSeq[WorkerOffer] = {
Random.shuffle(offers)
}
def statusUpdate(tid: Long, state: TaskState, serializedData: ByteBuffer) {
var failedExecutor: Option[String] = None
var reason: Option[ExecutorLossReason] = None
synchronized {
try {
taskIdToTaskSetManager.get(tid) match {
case Some(taskSet) =>
if (state == TaskState.LOST) {
// TaskState.LOST is only used by the deprecated Mesos fine-grained scheduling mode,
// where each executor corresponds to a single task, so mark the executor as failed.
val execId = taskIdToExecutorId.getOrElse(tid, throw new IllegalStateException(
"taskIdToTaskSetManager.contains(tid) <=> taskIdToExecutorId.contains(tid)"))
if (executorIdToRunningTaskIds.contains(execId)) {
reason = Some(
SlaveLost(s"Task $tid was lost, so marking the executor as lost as well."))
removeExecutor(execId, reason.get)
failedExecutor = Some(execId)
}
}
if (TaskState.isFinished(state)) {
cleanupTaskState(tid)
taskSet.removeRunningTask(tid)
if (state == TaskState.FINISHED) {
taskResultGetter.enqueueSuccessfulTask(taskSet, tid, serializedData)
} else if (Set(TaskState.FAILED, TaskState.KILLED, TaskState.LOST).contains(state)) {
taskResultGetter.enqueueFailedTask(taskSet, tid, state, serializedData)
}
}
case None =>
logError(
("Ignoring update with state %s for TID %s because its task set is gone (this is " +
"likely the result of receiving duplicate task finished status updates) or its " +
"executor has been marked as failed.")
.format(state, tid))
}
} catch {
case e: Exception => logError("Exception in statusUpdate", e)
}
}
// Update the DAGScheduler without holding a lock on this, since that can deadlock
if (failedExecutor.isDefined) {
assert(reason.isDefined)
dagScheduler.executorLost(failedExecutor.get, reason.get)
backend.reviveOffers()
}
}
/**
* Update metrics for in-progress tasks and let the master know that the BlockManager is still
* alive. Return true if the driver knows about the given block manager. Otherwise, return false,
* indicating that the block manager should re-register.
*/
override def executorHeartbeatReceived(
execId: String,
accumUpdates: Array[(Long, Seq[AccumulatorV2[_, _]])],
blockManagerId: BlockManagerId): Boolean = {
// (taskId, stageId, stageAttemptId, accumUpdates)
val accumUpdatesWithTaskIds: Array[(Long, Int, Int, Seq[AccumulableInfo])] = synchronized {
accumUpdates.flatMap { case (id, updates) =>
val accInfos = updates.map(acc => acc.toInfo(Some(acc.value), None))
taskIdToTaskSetManager.get(id).map { taskSetMgr =>
(id, taskSetMgr.stageId, taskSetMgr.taskSet.stageAttemptId, accInfos)
}
}
}
dagScheduler.executorHeartbeatReceived(execId, accumUpdatesWithTaskIds, blockManagerId)
}
def handleTaskGettingResult(taskSetManager: TaskSetManager, tid: Long): Unit = synchronized {
taskSetManager.handleTaskGettingResult(tid)
}
def handleSuccessfulTask(
taskSetManager: TaskSetManager,
tid: Long,
taskResult: DirectTaskResult[_]): Unit = synchronized {
taskSetManager.handleSuccessfulTask(tid, taskResult)
}
def handleFailedTask(
taskSetManager: TaskSetManager,
tid: Long,
taskState: TaskState,
reason: TaskFailedReason): Unit = synchronized {
taskSetManager.handleFailedTask(tid, taskState, reason)
if (!taskSetManager.isZombie && !taskSetManager.someAttemptSucceeded(tid)) {
// Need to revive offers again now that the task set manager state has been updated to
// reflect failed tasks that need to be re-run.
backend.reviveOffers()
}
}
def error(message: String) {
synchronized {
if (taskSetsByStageIdAndAttempt.nonEmpty) {
// Have each task set throw a SparkException with the error
for {
attempts <- taskSetsByStageIdAndAttempt.values
manager <- attempts.values
} {
try {
manager.abort(message)
} catch {
case e: Exception => logError("Exception in error callback", e)
}
}
} else {
// No task sets are active but we still got an error. Just exit since this
// must mean the error is during registration.
// It might be good to do something smarter here in the future.
throw new SparkException(s"Exiting due to error from cluster scheduler: $message")
}
}
}
override def stop() {
speculationScheduler.shutdown()
if (backend != null) {
backend.stop()
}
if (taskResultGetter != null) {
taskResultGetter.stop()
}
starvationTimer.cancel()
}
override def defaultParallelism(): Int = backend.defaultParallelism()
// Check for speculatable tasks in all our active jobs.
def checkSpeculatableTasks() {
var shouldRevive = false
synchronized {
shouldRevive = rootPool.checkSpeculatableTasks(MIN_TIME_TO_SPECULATION)
}
if (shouldRevive) {
backend.reviveOffers()
}
}
override def executorLost(executorId: String, reason: ExecutorLossReason): Unit = {
var failedExecutor: Option[String] = None
synchronized {
if (executorIdToRunningTaskIds.contains(executorId)) {
val hostPort = executorIdToHost(executorId)
logExecutorLoss(executorId, hostPort, reason)
removeExecutor(executorId, reason)
failedExecutor = Some(executorId)
} else {
executorIdToHost.get(executorId) match {
case Some(hostPort) =>
// If the host mapping still exists, it means we don't know the loss reason for the
// executor. So call removeExecutor() to update tasks running on that executor when
// the real loss reason is finally known.
logExecutorLoss(executorId, hostPort, reason)
removeExecutor(executorId, reason)
case None =>
// We may get multiple executorLost() calls with different loss reasons. For example,
// one may be triggered by a dropped connection from the slave while another may be a
// report of executor termination from Mesos. We produce log messages for both so we
// eventually report the termination reason.
logError(s"Lost an executor $executorId (already removed): $reason")
}
}
}
// Call dagScheduler.executorLost without holding the lock on this to prevent deadlock
if (failedExecutor.isDefined) {
dagScheduler.executorLost(failedExecutor.get, reason)
backend.reviveOffers()
}
}
override def workerRemoved(workerId: String, host: String, message: String): Unit = {
logInfo(s"Handle removed worker $workerId: $message")
dagScheduler.workerRemoved(workerId, host, message)
}
private def logExecutorLoss(
executorId: String,
hostPort: String,
reason: ExecutorLossReason): Unit = reason match {
case LossReasonPending =>
logDebug(s"Executor $executorId on $hostPort lost, but reason not yet known.")
case ExecutorKilled =>
logInfo(s"Executor $executorId on $hostPort killed by driver.")
case _ =>
logError(s"Lost executor $executorId on $hostPort: $reason")
}
/**
* Cleans up the TaskScheduler's state for tracking the given task.
*/
private def cleanupTaskState(tid: Long): Unit = {
taskIdToTaskSetManager.remove(tid)
taskIdToExecutorId.remove(tid).foreach { executorId =>
executorIdToRunningTaskIds.get(executorId).foreach { _.remove(tid) }
}
}
/**
* Remove an executor from all our data structures and mark it as lost. If the executor's loss
* reason is not yet known, do not yet remove its association with its host nor update the status
* of any running tasks, since the loss reason defines whether we'll fail those tasks.
*/
private def removeExecutor(executorId: String, reason: ExecutorLossReason) {
// The tasks on the lost executor may not send any more status updates (because the executor
// has been lost), so they should be cleaned up here.
executorIdToRunningTaskIds.remove(executorId).foreach { taskIds =>
logDebug("Cleaning up TaskScheduler state for tasks " +
s"${taskIds.mkString("[", ",", "]")} on failed executor $executorId")
// We do not notify the TaskSetManager of the task failures because that will
// happen below in the rootPool.executorLost() call.
taskIds.foreach(cleanupTaskState)
}
val host = executorIdToHost(executorId)
val execs = hostToExecutors.getOrElse(host, new HashSet)
execs -= executorId
if (execs.isEmpty) {
hostToExecutors -= host
for (rack <- getRackForHost(host); hosts <- hostsByRack.get(rack)) {
hosts -= host
if (hosts.isEmpty) {
hostsByRack -= rack
}
}
}
if (reason != LossReasonPending) {
executorIdToHost -= executorId
rootPool.executorLost(executorId, host, reason)
}
blacklistTrackerOpt.foreach(_.handleRemovedExecutor(executorId))
}
def executorAdded(execId: String, host: String) {
dagScheduler.executorAdded(execId, host)
}
def getExecutorsAliveOnHost(host: String): Option[Set[String]] = synchronized {
hostToExecutors.get(host).map(_.toSet)
}
def hasExecutorsAliveOnHost(host: String): Boolean = synchronized {
hostToExecutors.contains(host)
}
def hasHostAliveOnRack(rack: String): Boolean = synchronized {
hostsByRack.contains(rack)
}
def isExecutorAlive(execId: String): Boolean = synchronized {
executorIdToRunningTaskIds.contains(execId)
}
def isExecutorBusy(execId: String): Boolean = synchronized {
executorIdToRunningTaskIds.get(execId).exists(_.nonEmpty)
}
/**
* Get a snapshot of the currently blacklisted nodes for the entire application. This is
* thread-safe -- it can be called without a lock on the TaskScheduler.
*/
def nodeBlacklist(): scala.collection.immutable.Set[String] = {
blacklistTrackerOpt.map(_.nodeBlacklist()).getOrElse(scala.collection.immutable.Set())
}
// By default, rack is unknown
def getRackForHost(value: String): Option[String] = None
private def waitBackendReady(): Unit = {
if (backend.isReady) {
return
}
while (!backend.isReady) {
// Might take a while for backend to be ready if it is waiting on resources.
if (sc.stopped.get) {
// For example: the master removes the application for some reason
throw new IllegalStateException("Spark context stopped while waiting for backend")
}
synchronized {
this.wait(100)
}
}
}
override def applicationId(): String = backend.applicationId()
override def applicationAttemptId(): Option[String] = backend.applicationAttemptId()
private[scheduler] def taskSetManagerForAttempt(
stageId: Int,
stageAttemptId: Int): Option[TaskSetManager] = {
for {
attempts <- taskSetsByStageIdAndAttempt.get(stageId)
manager <- attempts.get(stageAttemptId)
} yield {
manager
}
}
/**
* Marks the task has completed in all TaskSetManagers for the given stage.
*
* After stage failure and retry, there may be multiple TaskSetManagers for the stage.
* If an earlier attempt of a stage completes a task, we should ensure that the later attempts
* do not also submit those same tasks. That also means that a task completion from an earlier
* attempt can lead to the entire stage getting marked as successful.
*/
private[scheduler] def markPartitionCompletedInAllTaskSets(stageId: Int, partitionId: Int) = {
taskSetsByStageIdAndAttempt.getOrElse(stageId, Map()).values.foreach { tsm =>
tsm.markPartitionCompleted(partitionId)
}
}
}
private[spark] object TaskSchedulerImpl {
val SCHEDULER_MODE_PROPERTY = "spark.scheduler.mode"
/**
* Used to balance containers across hosts.
*
* Accepts a map of hosts to resource offers for that host, and returns a prioritized list of
* resource offers representing the order in which the offers should be used. The resource
* offers are ordered such that we'll allocate one container on each host before allocating a
* second container on any host, and so on, in order to reduce the damage if a host fails.
*
* For example, given {@literal <h1, [o1, o2, o3]>}, {@literal <h2, [o4]>} and
* {@literal <h3, [o5, o6]>}, returns {@literal [o1, o5, o4, o2, o6, o3]}.
*/
def prioritizeContainers[K, T] (map: HashMap[K, ArrayBuffer[T]]): List[T] = {
val _keyList = new ArrayBuffer[K](map.size)
_keyList ++= map.keys
// order keyList based on population of value in map
val keyList = _keyList.sortWith(
(left, right) => map(left).size > map(right).size
)
val retval = new ArrayBuffer[T](keyList.size * 2)
var index = 0
var found = true
while (found) {
found = false
for (key <- keyList) {
val containerList: ArrayBuffer[T] = map.getOrElse(key, null)
assert(containerList != null)
// Get the index'th entry for this host - if present
if (index < containerList.size) {
retval += containerList.apply(index)
found = true
}
}
index += 1
}
retval.toList
}
private def maybeCreateBlacklistTracker(sc: SparkContext): Option[BlacklistTracker] = {
if (BlacklistTracker.isBlacklistEnabled(sc.conf)) {
val executorAllocClient: Option[ExecutorAllocationClient] = sc.schedulerBackend match {
case b: ExecutorAllocationClient => Some(b)
case _ => None
}
Some(new BlacklistTracker(sc, executorAllocClient))
} else {
None
}
}
}
| szhem/spark | core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala | Scala | apache-2.0 | 29,777 |
package ostinato.chess
import ostinato.core.{BoardSize, XY}
import scala.util.control.NoStackTrace
import scala.util.{Failure, Success, Try}
package object core {
implicit val chessBoardSize = BoardSize(8, 8)
object CastlingSide extends Enumeration {
type CastlingSide = Value
val Queenside, Kingside = Value
}
object SquareColor extends Enumeration {
type SquareColor = Value
val Light, Dark = Value
}
lazy val chessPlayers: List[ChessPlayer] =
List(WhiteChessPlayer, BlackChessPlayer)
lazy val castlingSides = List(CastlingSide.Queenside, CastlingSide.Kingside)
lazy val castlingFullyAvailable
: Map[(ChessPlayer, CastlingSide.Value), Boolean] = (for {
chessPlayer β chessPlayers
castlingSide β castlingSides
} yield (chessPlayer, castlingSide) -> true).toMap
lazy val castlingFullyUnavailable = castlingFullyAvailable map (kv β
(kv._1, false))
lazy val castlingOnlyBlackAvailable = castlingFullyAvailable map {
case ((p, s), v) β ((p, s), p == BlackChessPlayer)
}
lazy val castlingOnlyWhiteAvailable = castlingFullyAvailable map {
case ((p, s), v) β ((p, s), p == WhiteChessPlayer)
}
def fenCastling(
castlingAvailable: Map[(ChessPlayer, CastlingSide.Value), Boolean]) =
if (castlingAvailable == castlingFullyUnavailable)
"-"
else
List(
"K" -> castlingAvailable((WhiteChessPlayer, CastlingSide.Kingside)),
"Q" -> castlingAvailable((WhiteChessPlayer, CastlingSide.Queenside)),
"k" -> castlingAvailable((BlackChessPlayer, CastlingSide.Kingside)),
"q" -> castlingAvailable((BlackChessPlayer, CastlingSide.Queenside))
).filter(_._2).map(_._1).mkString
object ChessXY {
lazy val chars = "abcdefgh"
def fromAn(string: String) = {
val s = string.filter(_ > ' ').toLowerCase
if (s.length == 2 && s.matches("""[a-h][1-8]"""))
Some(XY(chars.indexOf(s(0)), 7 - (s(1).asDigit - 1)))
else
None
}
}
object ChessGrid {
def fromGridString(s: String): Vector[Option[ChessPiece]] = {
charVector(s) map {
case ('β', i) β Some(β(XY.fromI(i), BlackChessPlayer))
case ('β', i) β Some(β(XY.fromI(i), BlackChessPlayer))
case ('β', i) β Some(β(XY.fromI(i), BlackChessPlayer))
case ('β', i) β Some(β(XY.fromI(i), BlackChessPlayer))
case ('β', i) β Some(β(XY.fromI(i), BlackChessPlayer))
case ('β', i) β Some(β(XY.fromI(i), BlackChessPlayer, 1))
case ('β', i) β Some(β(XY.fromI(i), WhiteChessPlayer))
case ('β', i) β Some(β(XY.fromI(i), WhiteChessPlayer))
case ('β', i) β Some(β(XY.fromI(i), WhiteChessPlayer))
case ('β', i) β Some(β(XY.fromI(i), WhiteChessPlayer))
case ('β', i) β Some(β(XY.fromI(i), WhiteChessPlayer))
case ('β', i) β Some(β(XY.fromI(i), WhiteChessPlayer, -1))
case _ β None
}
}
def charVector(s: String) = s.split('\\n').mkString.zipWithIndex.toVector
}
implicit class ChessXY(pos: XY) {
def squareColor =
if ((pos.x + pos.y) % 2 == 0) SquareColor.Light else SquareColor.Dark
val toAn = AnPos(ChessXY.chars(pos.x), 8 - pos.y)
val toIccf = IccfPos(iccfConversions(toAn.x), toAn.y)
lazy val dnConversions =
Map('a' -> Set("QR", "R"),
'b' -> Set("QN", "N", "QKt", "Kt"),
'c' -> Set("QB", "B"),
'd' -> Set("Q"),
'e' -> Set("K"),
'f' -> Set("KB", "B"),
'g' -> Set("KN", "N", "KKt", "Kt"),
'h' -> Set("KR", "R"))
lazy val iccfConversions = Map('a' -> 1,
'b' -> 2,
'c' -> 3,
'd' -> 4,
'e' -> 5,
'f' -> 6,
'g' -> 7,
'h' -> 8)
def toDn(turn: ChessPlayer) = {
(toAn, turn) match {
case (AnPos(x, y), WhiteChessPlayer) β
dnConversions(x) map (DnPos(_, y))
case (AnPos(x, y), BlackChessPlayer) β
dnConversions(x) map (DnPos(_, 9 - y))
}
}
}
case class AnPos(x: Char, y: Int) {
override def toString = s"$x$y"
}
case class DnPos(x: String, y: Int) {
override def toString = s"$x$y"
}
object IccfPos {
def fromString(s: String): Try[IccfPos] = {
Try {
(s(0).toString.toInt, s(1).toString.toInt)
} flatMap {
case (x, y) β
if (x >= 1 && x <= 8 && y >= 1 && y <= 8)
Success(IccfPos(x, y))
else
Failure(InvalidIccfPos(s))
} recoverWith {
case _ β
Failure(InvalidIccfPos(s))
}
}
}
case class IccfPos(x: Int, y: Int) {
override def toString = s"$x$y"
lazy val toXY = XY(x - 1, 8 - y)
}
object Fan {
def checkmate(winner: ChessPlayer) =
if (winner == WhiteChessPlayer) "1-0" else "0-1"
def check = "+"
def kingSideCastle = "0-0"
def queenSideCastle = "0-0-0"
def draw = "Β½βΒ½"
}
object OstinatoString {
val pattern =
"""\\s*([rnbqkpRNBQKP\\/\\d]+\\s+[wb]\\s+([KQkq]{1,4}|\\-)\\s*[\\-abcdefgh12345678]{1,2}\\s*[\\d]{1,2}\\s*[\\d]{1,2})\\s*([1-8 ]*)"""
def splitFenIccf(s: String): Option[(String, String)] =
pattern.r.findFirstMatchIn(s).map { m β
(m.group(1), m.group(3))
}
def calculateHistory(iccfString: String): Try[List[GameStep]] = {
val startBoard = ChessGame.defaultGame.board
val zero: (Try[List[GameStep]], ChessBoard) =
(Success(List(GameStep(None, startBoard))), startBoard)
val actionStrings = iccfString.split(" +").toList.filterNot(_.isEmpty)
actionStrings
.foldLeft(zero) {
case ((Success(gameSteps), currentBoard), s) β
IccfNotation.parseActionString(s, currentBoard) match {
case Success(gs @ GameStep(_, b)) β (Success(gs :: gameSteps), b)
case Failure(e) β
(Failure(InvalidIccfHistoryException(e)), startBoard)
}
case ((Failure(e), _), _) β
(Failure(InvalidIccfHistoryException(e)), startBoard)
}
._1
}
}
case class GameStep(action: Option[ChessAction], board: ChessBoard)
// N.B. this optimisation can appear ugly, but it's the only reason this library is fast
def posThreatenedBy(pos: XY,
player: ChessPlayer,
board: ChessBoard): Option[ChessPiece] = {
def isEnemyKnight(pos: XY) = board.get(pos) match {
case Some(Some(p)) if p.owner == player.enemy && p.isKnight β true;
case _ β false
}
def isEnemyQueenOrKingOrRook(pos: XY) = board.get(pos) match {
case Some(Some(p))
if p.owner == player.enemy && (p.isQueen || p.isKing || p.isRook) β
true;
case _ β false
}
def isEnemyQueenOrKingOrBishop(pos: XY) = board.get(pos) match {
case Some(Some(p))
if p.owner == player.enemy && (p.isQueen || p.isKing || p.isBishop) β
true;
case _ β false
}
def isEnemyQueenOrKingOrBishopOrPawn(pos: XY) = board.get(pos) match {
case Some(Some(p))
if p.owner == player.enemy && (p.isQueen || p.isKing || p.isBishop || p.isPawn) β
true;
case _ β false
}
def isEnemyQueenOrRook(pos: XY) = board.get(pos) match {
case Some(Some(p))
if p.owner == player.enemy && (p.isQueen || p.isRook) β
true;
case _ β false
}
def isEnemyQueenOrBishop(pos: XY) = board.get(pos) match {
case Some(Some(p))
if p.owner == player.enemy && (p.isQueen || p.isBishop) β
true;
case _ β false
}
def isPiece(pos: XY) = board.isPiece(board.get(pos))
if (isEnemyKnight(pos + XY(-1, -2)))
board.get(pos + XY(-1, -2)).flatten
else if (isEnemyKnight(pos + XY(1, -2)))
board.get(pos + XY(1, -2)).flatten
else if (isEnemyKnight(pos + XY(-1, 2)))
board.get(pos + XY(-1, 2)).flatten
else if (isEnemyKnight(pos + XY(1, 2)))
board.get(pos + XY(1, 2)).flatten
else if (isEnemyKnight(pos + XY(-2, -1)))
board.get(pos + XY(-2, -1)).flatten
else if (isEnemyKnight(pos + XY(-2, 1)))
board.get(pos + XY(-2, 1)).flatten
else if (isEnemyKnight(pos + XY(2, -1)))
board.get(pos + XY(2, -1)).flatten
else if (isEnemyKnight(pos + XY(2, 1)))
board.get(pos + XY(2, 1)).flatten
else {
if (isEnemyQueenOrKingOrRook(pos + XY(0, -1))) {
return board.get(pos + XY(0, -1)).flatten
} else if ((pos + XY(0, -1)).exists && !isPiece(pos + XY(0, -1))) {
if (isEnemyQueenOrRook(pos + XY(0, -2))) {
return board.get(pos + XY(0, -2)).flatten
} else if ((pos + XY(0, -2)).exists && !isPiece(pos + XY(0, -2))) {
if (isEnemyQueenOrRook(pos + XY(0, -3))) {
return board.get(pos + XY(0, -3)).flatten
} else if ((pos + XY(0, -3)).exists && !isPiece(pos + XY(0, -3))) {
if (isEnemyQueenOrRook(pos + XY(0, -4))) {
return board.get(pos + XY(0, -4)).flatten
} else if ((pos + XY(0, -4)).exists && !isPiece(pos + XY(0, -4))) {
if (isEnemyQueenOrRook(pos + XY(0, -5))) {
return board.get(pos + XY(0, -5)).flatten
} else if ((pos + XY(0, -5)).exists && !isPiece(pos + XY(0, -5))) {
if (isEnemyQueenOrRook(pos + XY(0, -6))) {
return board.get(pos + XY(0, -6)).flatten
} else if ((pos + XY(0, -6)).exists && !isPiece(
pos + XY(0, -6))) {
if (isEnemyQueenOrRook(pos + XY(0, -7)))
return board.get(pos + XY(0, -7)).flatten
}
}
}
}
}
}
if (isEnemyQueenOrKingOrRook(pos + XY(0, 1))) {
return board.get(pos + XY(0, 1)).flatten
} else if ((pos + XY(0, 1)).exists && !isPiece(pos + XY(0, 1))) {
if (isEnemyQueenOrRook(pos + XY(0, 2))) {
return board.get(pos + XY(0, 2)).flatten
} else if ((pos + XY(0, 2)).exists && !isPiece(pos + XY(0, 2))) {
if (isEnemyQueenOrRook(pos + XY(0, 3))) {
return board.get(pos + XY(0, 3)).flatten
} else if ((pos + XY(0, 3)).exists && !isPiece(pos + XY(0, 3))) {
if (isEnemyQueenOrRook(pos + XY(0, 4))) {
return board.get(pos + XY(0, 4)).flatten
} else if ((pos + XY(0, 4)).exists && !isPiece(pos + XY(0, 4))) {
if (isEnemyQueenOrRook(pos + XY(0, 5))) {
return board.get(pos + XY(0, 5)).flatten
} else if ((pos + XY(0, 5)).exists && !isPiece(pos + XY(0, 5))) {
if (isEnemyQueenOrRook(pos + XY(0, 6))) {
return board.get(pos + XY(0, 6)).flatten
} else if ((pos + XY(0, 6)).exists && !isPiece(pos + XY(0, 6))) {
if (isEnemyQueenOrRook(pos + XY(0, 7)))
return board.get(pos + XY(0, 7)).flatten
}
}
}
}
}
}
if (isEnemyQueenOrKingOrBishopOrPawn(pos + XY(1, 1))) {
val enemyPos = pos + XY(1, 1)
val enemy = player.enemy
board.get(enemyPos) match {
case Some(Some(p)) if !p.isPawn β
return board.get(enemyPos).flatten
case Some(Some(β(`enemyPos`, `enemy`, -1))) β
return board.get(enemyPos).flatten
case _ β
}
} else if ((pos + XY(1, 1)).exists && !isPiece(pos + XY(1, 1))) {
if (isEnemyQueenOrBishop(pos + XY(2, 2))) {
return board.get(pos + XY(2, 2)).flatten
} else if ((pos + XY(2, 2)).exists && !isPiece(pos + XY(2, 2))) {
if (isEnemyQueenOrBishop(pos + XY(3, 3))) {
return board.get(pos + XY(3, 3)).flatten
} else if ((pos + XY(3, 3)).exists && !isPiece(pos + XY(3, 3))) {
if (isEnemyQueenOrBishop(pos + XY(4, 4))) {
return board.get(pos + XY(4, 4)).flatten
} else if ((pos + XY(4, 4)).exists && !isPiece(pos + XY(4, 4))) {
if (isEnemyQueenOrBishop(pos + XY(5, 5))) {
return board.get(pos + XY(5, 5)).flatten
} else if ((pos + XY(5, 5)).exists && !isPiece(pos + XY(5, 5))) {
if (isEnemyQueenOrBishop(pos + XY(6, 6))) {
return board.get(pos + XY(6, 6)).flatten
} else if ((pos + XY(6, 6)).exists && !isPiece(pos + XY(6, 6))) {
if (isEnemyQueenOrBishop(pos + XY(7, 7)))
return board.get(pos + XY(7, 7)).flatten
}
}
}
}
}
}
if (isEnemyQueenOrKingOrBishopOrPawn(pos + XY(-1, 1))) {
val enemyPos = pos + XY(-1, 1)
val enemy = player.enemy
board.get(enemyPos) match {
case Some(Some(p)) if !p.isPawn β
return board.get(enemyPos).flatten
case Some(Some(β(`enemyPos`, `enemy`, -1))) β
return board.get(enemyPos).flatten
case _ β
}
} else if ((pos + XY(-1, 1)).exists && !isPiece(pos + XY(-1, 1))) {
if (isEnemyQueenOrBishop(pos + XY(-2, 2))) {
return board.get(pos + XY(-2, 2)).flatten
} else if ((pos + XY(-2, 2)).exists && !isPiece(pos + XY(-2, 2))) {
if (isEnemyQueenOrBishop(pos + XY(-3, 3))) {
return board.get(pos + XY(-3, 3)).flatten
} else if ((pos + XY(-3, 3)).exists && !isPiece(pos + XY(-3, 3))) {
if (isEnemyQueenOrBishop(pos + XY(-4, 4))) {
return board.get(pos + XY(-4, 4)).flatten
} else if ((pos + XY(-4, 4)).exists && !isPiece(pos + XY(-4, 4))) {
if (isEnemyQueenOrBishop(pos + XY(-5, 5))) {
return board.get(pos + XY(-5, 5)).flatten
} else if ((pos + XY(-5, 5)).exists && !isPiece(pos + XY(-5, 5))) {
if (isEnemyQueenOrBishop(pos + XY(-6, 6))) {
return board.get(pos + XY(-6, 6)).flatten
} else if ((pos + XY(-6, 6)).exists && !isPiece(
pos + XY(-6, 6))) {
if (isEnemyQueenOrBishop(pos + XY(-7, 7)))
return board.get(pos + XY(-7, 7)).flatten
}
}
}
}
}
}
if (isEnemyQueenOrKingOrBishopOrPawn(pos + XY(-1, -1))) {
val enemyPos = pos + XY(-1, -1)
val enemy = player.enemy
board.get(enemyPos) match {
case Some(Some(p)) if !p.isPawn β
return board.get(enemyPos).flatten
case Some(Some(β(`enemyPos`, `enemy`, 1))) β
return board.get(enemyPos).flatten
case _ β
}
} else if ((pos + XY(-1, -1)).exists && !isPiece(pos + XY(-1, -1))) {
if (isEnemyQueenOrBishop(pos + XY(-2, -2))) {
return board.get(pos + XY(-2, -2)).flatten
} else if ((pos + XY(-2, -2)).exists && !isPiece(pos + XY(-2, -2))) {
if (isEnemyQueenOrBishop(pos + XY(-3, -3))) {
return board.get(pos + XY(-3, -3)).flatten
} else if ((pos + XY(-3, -3)).exists && !isPiece(pos + XY(-3, -3))) {
if (isEnemyQueenOrBishop(pos + XY(-4, -4))) {
return board.get(pos + XY(-4, -4)).flatten
} else if ((pos + XY(-4, -4)).exists && !isPiece(pos + XY(-4, -4))) {
if (isEnemyQueenOrBishop(pos + XY(-5, -5))) {
return board.get(pos + XY(-5, -5)).flatten
} else if ((pos + XY(-5, -5)).exists && !isPiece(
pos + XY(-5, -5))) {
if (isEnemyQueenOrBishop(pos + XY(-6, -6))) {
return board.get(pos + XY(-6, -6)).flatten
} else if ((pos + XY(-6, -6)).exists && !isPiece(
pos + XY(-6, -6))) {
if (isEnemyQueenOrBishop(pos + XY(-7, -7)))
return board.get(pos + XY(-7, -7)).flatten
}
}
}
}
}
}
if (isEnemyQueenOrKingOrBishopOrPawn(pos + XY(1, -1))) {
val enemyPos = pos + XY(1, -1)
val enemy = player.enemy
board.get(enemyPos) match {
case Some(Some(p)) if !p.isPawn β
return board.get(enemyPos).flatten
case Some(Some(β(`enemyPos`, `enemy`, 1))) β
return board.get(enemyPos).flatten
case _ β
}
} else if ((pos + XY(1, -1)).exists && !isPiece(pos + XY(1, -1))) {
if (isEnemyQueenOrBishop(pos + XY(2, -2))) {
return board.get(pos + XY(2, -2)).flatten
} else if ((pos + XY(2, -2)).exists && !isPiece(pos + XY(2, -2))) {
if (isEnemyQueenOrBishop(pos + XY(3, -3))) {
return board.get(pos + XY(3, -3)).flatten
} else if ((pos + XY(3, -3)).exists && !isPiece(pos + XY(3, -3))) {
if (isEnemyQueenOrBishop(pos + XY(4, -4))) {
return board.get(pos + XY(4, -4)).flatten
} else if ((pos + XY(4, -4)).exists && !isPiece(pos + XY(4, -4))) {
if (isEnemyQueenOrBishop(pos + XY(5, -5))) {
return board.get(pos + XY(5, -5)).flatten
} else if ((pos + XY(5, -5)).exists && !isPiece(pos + XY(5, -5))) {
if (isEnemyQueenOrBishop(pos + XY(6, -6))) {
return board.get(pos + XY(6, -6)).flatten
} else if ((pos + XY(6, -6)).exists && !isPiece(
pos + XY(6, -6))) {
if (isEnemyQueenOrBishop(pos + XY(7, -7)))
return board.get(pos + XY(7, -7)).flatten
}
}
}
}
}
}
if (isEnemyQueenOrKingOrRook(pos + XY(1, 0))) {
return board.get(pos + XY(1, 0)).flatten
} else if ((pos + XY(1, 0)).exists && !isPiece(pos + XY(1, 0))) {
if (isEnemyQueenOrRook(pos + XY(2, 0))) {
return board.get(pos + XY(2, 0)).flatten
} else if ((pos + XY(2, 0)).exists && !isPiece(pos + XY(2, 0))) {
if (isEnemyQueenOrRook(pos + XY(3, 0))) {
return board.get(pos + XY(3, 0)).flatten
} else if ((pos + XY(3, 0)).exists && !isPiece(pos + XY(3, 0))) {
if (isEnemyQueenOrRook(pos + XY(4, 0))) {
return board.get(pos + XY(4, 0)).flatten
} else if ((pos + XY(4, 0)).exists && !isPiece(pos + XY(4, 0))) {
if (isEnemyQueenOrRook(pos + XY(5, 0))) {
return board.get(pos + XY(5, 0)).flatten
} else if ((pos + XY(5, 0)).exists && !isPiece(pos + XY(5, 0))) {
if (isEnemyQueenOrRook(pos + XY(6, 0))) {
return board.get(pos + XY(6, 0)).flatten
} else if ((pos + XY(6, 0)).exists && !isPiece(pos + XY(6, 0))) {
if (isEnemyQueenOrRook(pos + XY(7, 0)))
return board.get(pos + XY(7, 0)).flatten
}
}
}
}
}
}
if (isEnemyQueenOrKingOrRook(pos + XY(-1, 0))) {
return board.get(pos + XY(-1, 0)).flatten
} else if ((pos + XY(-1, 0)).exists && !isPiece(pos + XY(-1, 0))) {
if (isEnemyQueenOrRook(pos + XY(-2, 0))) {
return board.get(pos + XY(-2, 0)).flatten
} else if ((pos + XY(-2, 0)).exists && !isPiece(pos + XY(-2, 0))) {
if (isEnemyQueenOrRook(pos + XY(-3, 0))) {
return board.get(pos + XY(-3, 0)).flatten
} else if ((pos + XY(-3, 0)).exists && !isPiece(pos + XY(-3, 0))) {
if (isEnemyQueenOrRook(pos + XY(-4, 0))) {
return board.get(pos + XY(-4, 0)).flatten
} else if ((pos + XY(-4, 0)).exists && !isPiece(pos + XY(-4, 0))) {
if (isEnemyQueenOrRook(pos + XY(-5, 0))) {
return board.get(pos + XY(-5, 0)).flatten
} else if ((pos + XY(-5, 0)).exists && !isPiece(pos + XY(-5, 0))) {
if (isEnemyQueenOrRook(pos + XY(-6, 0))) {
return board.get(pos + XY(-6, 0)).flatten
} else if ((pos + XY(-6, 0)).exists && !isPiece(
pos + XY(-6, 0))) {
if (isEnemyQueenOrRook(pos + XY(-7, 0)))
return board.get(pos + XY(-7, 0)).flatten
}
}
}
}
}
}
None
}
}
}
case class InvalidIccfPos(s: String)
extends RuntimeException(s"Invalid Iccf coordinates: [$s]")
with NoStackTrace
case class InvalidIccfHistoryException(e: Throwable)
extends RuntimeException(s"Invalid ICCF History given:", e)
with NoStackTrace
| MarianoGappa/ostinato | shared/src/main/scala/ostinato/chess/core/package.scala | Scala | mit | 20,936 |
final object Foo // error
| som-snytt/dotty | tests/neg-custom-args/fatal-warnings/i4936.scala | Scala | apache-2.0 | 26 |
package com.github.astonbitecode.zoocache
/**
* Does the needed actions (if needed) to notify the caller
*/
private[astonbitecode] trait MessageNotifyable {
/**
* Do stuff once the message handling was successful
* @param path The path for which the operation was successful
* @return Boolean True if the success logic was indeed invoked, false otherwise. This is needed because the
* method needs to be idempotent. In reality the message should notify the user for success or failure only once.
* However, the method may be called many times.
*/
def success(path: String): Boolean
/**
* Do stuff once the message handling encountered an error
* @param path The path for which the operation had failed
* @return Boolean True if the failure logic was indeed invoked, false otherwise. This is needed because the
* method needs to be idempotent. In reality the message should notify the user for success or failure only once.
* However, the method may be called many times.
*/
def failure(path: String, error: Throwable): Boolean
} | astonbitecode/scakka-zoo-cache | src/main/scala/com/github/astonbitecode/zoocache/MessageNotifyable.scala | Scala | mit | 1,075 |
package com.twitter.finagle.service
import com.twitter.finagle.{ServiceFactoryProxy, ServiceFactory}
import com.twitter.util.Updatable
/**
* An updatable service factory proxy.
*
* @param init The starting factory to proxy to
*/
private[finagle] class ServiceFactoryRef[Req, Rep](init: ServiceFactory[Req, Rep])
extends ServiceFactoryProxy[Req, Rep](init)
with Updatable[ServiceFactory[Req, Rep]] {
@volatile private[this] var cur: ServiceFactory[Req, Rep] = init
def update(newFactory: ServiceFactory[Req, Rep]): Unit = {
cur = newFactory
}
override def self: ServiceFactory[Req, Rep] = cur
}
| luciferous/finagle | finagle-core/src/main/scala/com/twitter/finagle/service/ServiceFactoryRef.scala | Scala | apache-2.0 | 622 |
/**
* Copyright (C) 2012 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.state
import collection.JavaConverters._
import sbinary.Operations._
import XFormsOperations._
import XFormsProtocols._
import org.orbeon.oxf.util.URLRewriterUtils.PathMatcher
import org.orbeon.oxf.xforms._
import control.Controls.ControlsIterator
import org.orbeon.oxf.xml.dom4j.Dom4jUtils
import org.orbeon.oxf.xml.{TransformerUtils, SAXStore}
import org.dom4j.Element
import org.orbeon.oxf.xforms.control.{XFormsComponentControl, XFormsControl}
// Immutable representation of the dynamic state
case class DynamicState(
uuid : String,
sequence : Long,
deploymentType : Option[String],
requestContextPath : Option[String],
requestPath : Option[String],
requestHeaders : List[(String, List[String])],
requestParameters : List[(String, List[String])],
containerType : Option[String],
containerNamespace : Option[String],
pathMatchers : Seq[Byte],
focusedControl : Option[String],
pendingUploads : Seq[Byte],
annotatedTemplate : Option[Seq[Byte]],
lastAjaxResponse : Seq[Byte],
instances : Seq[Byte],
controls : Seq[Byte]
) {
// Decode individual bits
def decodePathMatchers = fromByteSeq[List[PathMatcher]](pathMatchers)
def decodePendingUploads = fromByteSeq[Set[String]](pendingUploads)
def decodeAnnotatedTemplate = annotatedTemplate map (AnnotatedTemplate(_))
def decodeLastAjaxResponse = fromByteSeq[Option[SAXStore]](lastAjaxResponse)
def decodeInstances = fromByteSeq[List[InstanceState]](instances)
def decodeControls = fromByteSeq[List[ControlState]](controls)
// For Java callers
def decodeDeploymentTypeJava = deploymentType.orNull
def decodeRequestContextPathJava = requestContextPath.orNull
def decodeRequestPathJava = requestPath.orNull
def decodeContainerTypeJava = containerType.orNull
def decodeContainerNamespaceJava = containerNamespace.orNull
def decodePathMatchersJava = decodePathMatchers.asJava
def decodeFocusedControlJava = focusedControl.orNull
def decodePendingUploadsJava = decodePendingUploads.asJava
def decodeAnnotatedTemplateJava = decodeAnnotatedTemplate.orNull
def decodeLastAjaxResponseJava = decodeLastAjaxResponse.orNull
def decodeInstancesJava = decodeInstances.asJava
def decodeControlsJava = decodeControls.asJava
def decodeInstancesControls = InstancesControls(decodeInstances, decodeControls map (c β (c.effectiveId, c)) toMap)
// For tests only
def copyUpdateSequence(sequence: Int) = copy(sequence = sequence)
// Encode to a string representation
def encodeToString(compress: Boolean, isForceEncryption: Boolean): String =
XFormsUtils.encodeBytes(
toByteArray(this),
compress,
isForceEncryption
)
// Encode to an XML representation (as of 2012-02-05, used only by unit tests)
def toXML = {
val document = Dom4jUtils.createDocument
val rootElement = document.addElement("dynamic-state")
// Add UUIDs
rootElement.addAttribute("uuid", uuid)
rootElement.addAttribute("sequence", sequence.toString)
// Add request information
rootElement.addAttribute("deployment-type", deploymentType.orNull)
rootElement.addAttribute("request-context-path", requestContextPath.orNull)
rootElement.addAttribute("request-path", requestPath.orNull)
rootElement.addAttribute("container-type", containerType.orNull)
rootElement.addAttribute("container-namespace", containerNamespace.orNull)
// Add upload information
if (decodePendingUploads.nonEmpty)
rootElement.addAttribute("pending-uploads", decodePendingUploads mkString " ")
// Serialize instances
locally {
val instanceStates = decodeInstances
if (instanceStates.nonEmpty) {
val instancesElement = rootElement.addElement("instances")
// Encode to an XML representation (as of 2012-02-05, used only by unit tests)
def instanceToXML(instanceState: InstanceState): Element = {
val instanceElement = Dom4jUtils.createElement("instance")
def att(name: String, value: String): Unit = instanceElement.addAttribute(name, value)
att("id", XFormsUtils.getStaticIdFromId(instanceState.effectiveId))
att("model-id", instanceState.modelEffectiveId)
if (instanceState.readonly) att("readonly", "true")
instanceState.cachingOrContent match {
case Left(caching) β caching.writeAttributes(att)
case Right(content) β instanceElement.addText(content)
}
instanceElement
}
instanceStates foreach (instanceState β instancesElement.add(instanceToXML(instanceState)))
}
}
// Serialize controls
locally {
val controls = decodeControls
if (controls.nonEmpty) {
val controlsElement = rootElement.addElement("controls")
controls foreach {
case ControlState(effectiveId, visited, keyValues) β
val controlElement = controlsElement.addElement("control")
controlElement.addAttribute("effective-id", effectiveId)
if (visited)
controlElement.addAttribute("visited", "true")
for ((k, v) β keyValues)
controlElement.addAttribute(k, v)
}
}
}
// Template and Ajax response
Seq(("template", decodeAnnotatedTemplate map (_.saxStore)), ("response", decodeLastAjaxResponse)) collect {
case (elementName, Some(saxStore)) β
val templateElement = rootElement.addElement(elementName)
val document = TransformerUtils.saxStoreToDom4jDocument(saxStore)
templateElement.add(document.getRootElement.detach())
}
document
}
private def debug(): Unit = {
val bytes = toByteSeq(this)
println(" size: " + bytes.size)
println(" versionedPathMatchers: " + pathMatchers.size)
println(" pendingUploads: " + pendingUploads.size)
println(" instances: " + instances.size)
println(" controls: " + controls.size)
println(" annotatedTemplate: " + (annotatedTemplate map (_.size) getOrElse 1))
println(" lastAjaxResponse: " + lastAjaxResponse.size)
val decodedParts = Array(
decodePathMatchersJava.toArray,
decodeFocusedControlJava,
decodePendingUploadsJava,
decodeControlsJava,
decodeInstancesJava.toArray,
decodeAnnotatedTemplateJava,
decodeLastAjaxResponseJava
)
val deserialized = fromByteSeq[DynamicState](bytes)
assert(this == deserialized)
}
}
// Minimal immutable representation of a serialized control
case class ControlState(effectiveId: String, visited: Boolean, keyValues: Map[String, String]) {
def keyValuesJava = keyValues.asJava
}
// Minimal immutable representation of a serialized instance
// If there is caching information, don't include the actual content
case class InstanceState(
effectiveId: String,
modelEffectiveId: String,
cachingOrContent: InstanceCaching Either String,
readonly: Boolean,
modified: Boolean,
valid: Boolean) {
def this(instance: XFormsInstance) =
this(
instance.getEffectiveId,
instance.parent.getEffectiveId,
instance.instanceCaching.toLeft(instance.contentAsString),
instance.readonly,
instance.modified,
instance.valid)
}
case class InstancesControls(instances: List[InstanceState], controls: Map[String, ControlState]) {
def instancesJava = instances.asJava
}
object DynamicState {
// Create a DynamicState from a document
def apply(document: XFormsContainingDocument): DynamicState =
apply(document, Option(document.getControls.getCurrentControlTree.getRoot))
// Create a DynamicState from a control
def apply(document: XFormsContainingDocument, startOpt: Option[XFormsControl]): DynamicState = {
val startContainer = startOpt match {
case Some(componentControl: XFormsComponentControl) β componentControl.nestedContainer
case Some(other) β other.container
case None β document
}
// Serialize relevant controls that have data
//
// - Repeat, switch and dialogs controls serialize state (have been for a long time). The state of all the other
// controls is rebuilt from model data. This way we minimize the size of serialized controls. In the future,
// more information might be serialized.
// - VisitableTrait controls serialize state if `visited == true`
def controlsToSerialize = {
val iterator =
for {
start β startOpt.toList
control β ControlsIterator(start, includeSelf = false)
if control.isRelevant
controlState β control.controlState
} yield
controlState
iterator.toList
}
// Create the dynamic state object. A snapshot of the state is taken, whereby mutable parts of the state, such
// as instances, controls, HTML template, Ajax response, are first serialized to Seq[Byte]. A couple of notes:
//
// 1. We could serialize everything right away to a Seq[Byte] instead of a DynamicState instance, but in the
// scenario where the state is put in cache, then retrieved a bit later without having been pushed to
// external storage, this would be a waste.
//
// 2. Along the same lines, content that is already (conceptually) immutable, namely pathMatchers,
// annotatedTemplate, and lastAjaxResponse, could be serialized to bytes lazily.
//
// 3. In the cases where there is a large number of large instances or templates, parallel serialization might
// be something to experiment with.
DynamicState(
document.getUUID,
document.getSequence,
Option(document.getDeploymentType) map (_.toString),
Option(document.getRequestContextPath),
Option(document.getRequestPath),
document.getRequestHeaders mapValues (_.toList) toList, // mapValues ok because of toList
document.getRequestParameters mapValues (_.toList) toList, // mapValues ok because of toList
Option(document.getContainerType),
Option(document.getContainerNamespace),
toByteSeq(document.getVersionedPathMatchers.asScala.toList),
Option(document.getControls.getFocusedControl) map (_.getEffectiveId),
toByteSeq(document.getPendingUploads.asScala.toSet),
document.getTemplate map (_.asByteSeq), // template returns its own serialization
toByteSeq(Option(document.getLastAjaxResponse)),
toByteSeq(startContainer.allModels flatMap (_.getInstances.asScala) filter (_.mustSerialize) map (new InstanceState(_)) toList),
toByteSeq(controlsToSerialize)
)
}
// Create a DynamicState from an encoded string representation
def apply(encoded: String): DynamicState = {
val bytes = XFormsUtils.decodeBytes(encoded)
fromByteArray[DynamicState](bytes)
}
// Encode the given document to a string representation
def encodeDocumentToString(document: XFormsContainingDocument, compress: Boolean, isForceEncryption: Boolean): String =
DynamicState(document).encodeToString(compress, isForceEncryption || document.isClientStateHandling)
} | wesley1001/orbeon-forms | src/main/scala/org/orbeon/oxf/xforms/state/DynamicState.scala | Scala | lgpl-2.1 | 12,043 |
package org.scalajs.nio
import scala.scalajs.js
import js.Dynamic.{ global => g }
import org.scalajs.dom
class AdaptiveShortBuffer(cap: Int, lim: Int, pos: Int, mar: Int, mBuffer: dom.ArrayBuffer,
mBufferOffset: Int, mByteOrder: ByteOrder) extends NativeShortBuffer(cap, lim, pos, mar, mBuffer, mBufferOffset) {
protected val littleEndian: Boolean = mByteOrder == LittleEndian
override protected def iGet(index: Int): Short = {
this.dataView.getInt16(index * this.bytes_per_element, this.littleEndian).toShort
}
override protected def iSet(index: Int, value: Short): Unit = {
this.dataView.setInt16(index * this.bytes_per_element, value, this.littleEndian)
}
override def duplicate(): ShortBuffer = {
new AdaptiveShortBuffer(this.mCapacity, this.mLimit, this.mPosition, this.mMark,
this.mBuffer, this.mBufferOffset, mByteOrder)
}
override def slice(): ShortBuffer = {
new AdaptiveShortBuffer(this.remaining, this.remaining, 0, -1, this.mBuffer,
this.mBufferOffset + (this.mPosition * this.bytes_per_element), mByteOrder)
}
override def asReadOnlyBuffer(): ShortBuffer = {
new ReadOnlyShortBuffer(this.duplicate)
}
override def order(): ByteOrder = {
if (littleEndian)
LittleEndian
else
BigEndian
}
override val hasJsArray = order() == ByteOrder.nativeOrder
override def jsArray(): dom.Int16Array = if (!hasJsArray) throw new UnsupportedOperationException else super.jsArray
override def toString = "AdaptiveShortBuffer[pos=" + this.position + " lim=" + this.limit + " cap=" + this.capacity + "]"
}
object AdaptiveShortBuffer {
def allocate(capacity: Int): NativeShortBuffer = this.allocate(capacity, ByteOrder.nativeOrder)
def allocate(capacity: Int, byteOrder: ByteOrder): NativeShortBuffer = {
if (byteOrder == ByteOrder.nativeOrder) {
NativeShortBuffer.allocate(capacity)
} else {
val jsBuffer = js.Dynamic.newInstance(g.ArrayBuffer)(capacity * NativeShortBuffer.BYTES_PER_ELEMENT).asInstanceOf[dom.ArrayBuffer]
val shortBuffer = new AdaptiveShortBuffer(capacity, capacity, 0, -1, jsBuffer, 0, byteOrder)
shortBuffer
}
}
def wrap(array: Array[Short]): NativeShortBuffer = this.wrap(array, ByteOrder.nativeOrder)
def wrap(array: Array[Short], byteOrder: ByteOrder): NativeShortBuffer = wrap(array, 0, array.length, byteOrder)
def wrap(array: Array[Short], offset: Int, length: Int): NativeShortBuffer = this.wrap(array, offset, length, ByteOrder.nativeOrder)
def wrap(array: Array[Short], offset: Int, length: Int, byteOrder: ByteOrder): NativeShortBuffer = {
val shortBuffer = this.allocate(length, byteOrder)
var i = 0
while (i < length) {
shortBuffer.put(i, array(i + offset))
i += 1
}
shortBuffer
}
} | storm-enroute/macrogl | src/buffer/scala/org/scalajs/nio/AdaptiveShortBuffer.scala | Scala | bsd-3-clause | 2,784 |
package object tut {
import Zed.{IO, StateT}
type Tut[A] = StateT[IO, TutState, A]
}
| tpolecat/tut | modules/core/src/main/scala/tut/package.scala | Scala | mit | 90 |
/*
This file is part of Octetoscope.
Copyright (C) 2013-2014 Octetoscope contributors (see /AUTHORS.txt)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ru.corrigendum.octetoscope.core
import org.scalatest.FunSuite
import org.scalatest.MustMatchers._
class CommonConstraintsSuite extends FunSuite {
test("nonNegative") {
CommonConstraints.nonNegative[Double].check(5.0) mustBe true
CommonConstraints.nonNegative[Int].check(0) mustBe true
CommonConstraints.nonNegative[Long].check(-1L) mustBe false
}
test("positive") {
CommonConstraints.positive[Short].check(4) mustBe true
CommonConstraints.positive[Byte].check(0) mustBe false
CommonConstraints.positive[Float].check(-2.0f) mustBe false
}
test("equalTo") {
val c = CommonConstraints.equalTo(5, "FIVE")
c.check(5) mustBe true
c.check(3) mustBe false
c.shouldNote must (include ("5") and include ("FIVE"))
c.mustNote must (include ("5") and include ("FIVE"))
}
test("noMoreThan") {
val c = CommonConstraints.noMoreThan(-2, "MINUS TWO")
c.check(-5) mustBe true
c.check(-2) mustBe true
c.check(0) mustBe false
c.shouldNote must (include ("-2") and include ("MINUS TWO"))
c.mustNote must (include ("-2") and include ("MINUS TWO"))
}
test("lessThan") {
val c = CommonConstraints.lessThan(1, "ONE")
c.check(0) mustBe true
c.check(1) mustBe false
c.check(4) mustBe false
c.shouldNote must (include ("1") and include ("ONE"))
c.mustNote must (include ("1") and include ("ONE"))
}
test("divisibleBy") {
val c = CommonConstraints.divisibleBy(5)
c.check(0) mustBe true
c.check(1) mustBe false
c.check(-1) mustBe false
c.check(10) mustBe true
c.check(-10) mustBe true
c.shouldNote must include("5")
c.mustNote must include("5")
}
test("false") {
CommonConstraints.`false`.check(true) mustBe false
CommonConstraints.`false`.check(false) mustBe true
}
}
| SpecLad/Octetoscope | core/src/test/scala/ru/corrigendum/octetoscope/core/CommonConstraintsSuite.scala | Scala | gpl-3.0 | 2,560 |
/*
* Copyright (C) 2020 MapRoulette contributors (see CONTRIBUTORS.md).
* Licensed under the Apache License, Version 2.0 (see LICENSE).
*/
package org.maproulette.provider.osm.objects
import org.joda.time.DateTime
import org.maproulette.cache.CacheObject
import org.maproulette.provider.osm.OSMType
import org.maproulette.provider.osm.OSMType.OSMType
import scala.collection.mutable
import scala.xml.{Attribute, Elem, Null, Text}
/**
* A class of versioned objects. This is essentially the same object
*
* @author mcuthbert
*/
class VersionedObjects[T <: VersionedObject](override val id: Long, override val name: String)
extends CacheObject[Long] {
private val versions = mutable.Map[Int, T]()
def get(version: Int): Option[T] = this.versions.get(version)
def put(versionedObject: T): Unit = this.versions.put(versionedObject.version, versionedObject)
def getLatest: Option[T] = {
if (this.versions.isEmpty) {
None
} else {
Some(this.versions(this.versions.keys.max))
}
}
}
trait VersionedObject {
def name: String
def id: Long
def visible: Boolean
def version: Int
def changeset: Int
def timestamp: DateTime
def user: String
def uid: Long
def tags: Map[String, String]
def toChangeElement(changeSetId: Int): Elem
def getOSMType: OSMType
}
case class VersionedNode(
override val name: String,
override val id: Long,
override val visible: Boolean,
override val version: Int,
override val changeset: Int,
override val timestamp: DateTime,
override val user: String,
override val uid: Long,
override val tags: Map[String, String],
lat: Double,
lon: Double
) extends VersionedObject {
override def toChangeElement(changesetId: Int): Elem = {
<node>
{
for (tagKV <- tags)
yield <tag/> % Attribute("k", Text(tagKV._1), Attribute("v", Text(tagKV._2), Null))
}
</node> % Attribute(
"visible",
Text(visible.toString),
Attribute(
"changeset",
Text(changesetId.toString),
Attribute(
"version",
Text(version.toString),
Attribute(
"user",
Text(user),
Attribute(
"uid",
Text(uid.toString),
Attribute(
"id",
Text(id.toString),
Attribute("lat", Text(lat.toString), Attribute("lon", Text(lon.toString), Null))
)
)
)
)
)
)
}
override def getOSMType: OSMType = OSMType.NODE
}
case class VersionedWay(
override val name: String,
override val id: Long,
override val visible: Boolean,
override val version: Int,
override val changeset: Int,
override val timestamp: DateTime,
override val user: String,
override val uid: Long,
override val tags: Map[String, String],
nodes: List[Long]
) extends VersionedObject {
override def toChangeElement(changesetId: Int): Elem = {
<way>
{for (nodeRef <- nodes) yield <nd/> % Attribute("ref", Text(nodeRef.toString), Null)}
{
for (tagKV <- tags)
yield <tag/> % Attribute("k", Text(tagKV._1), Attribute("v", Text(tagKV._2), Null))
}
</way> % Attribute(
"visible",
Text(visible.toString),
Attribute(
"changeset",
Text(changesetId.toString),
Attribute(
"version",
Text(version.toString),
Attribute(
"user",
Text(user),
Attribute("uid", Text(uid.toString), Attribute("id", Text(id.toString), Null))
)
)
)
)
}
override def getOSMType: OSMType = OSMType.WAY
}
case class VersionedRelation(
override val name: String,
override val id: Long,
override val visible: Boolean,
override val version: Int,
override val changeset: Int,
override val timestamp: DateTime,
override val user: String,
override val uid: Long,
override val tags: Map[String, String],
members: List[RelationMember]
) extends VersionedObject {
override def toChangeElement(changesetId: Int): Elem = {
<way>
{
for (member <- members)
yield <member/> % Attribute(
"type",
Text(member.osmType),
Attribute("ref", Text(member.ref.toString), Attribute("role", Text(member.role), Null))
)
}{
for (tagKV <- tags)
yield <tag/> % Attribute("k", Text(tagKV._1), Attribute("v", Text(tagKV._2), Null))
}
</way> % Attribute(
"visible",
Text(visible.toString),
Attribute(
"changeset",
Text(changesetId.toString),
Attribute(
"version",
Text(version.toString),
Attribute(
"user",
Text(user),
Attribute("uid", Text(uid.toString), Attribute("id", Text(id.toString), Null))
)
)
)
)
}
override def getOSMType: OSMType = OSMType.RELATION
}
case class RelationMember(osmType: String, ref: Long, role: String)
| mgcuthbert/maproulette2 | app/org/maproulette/provider/osm/objects/VersionedObjects.scala | Scala | apache-2.0 | 5,087 |
package dotty.tools.dotc
package ast
import core._
import Types._, Names._, Flags._, util.Positions._, Contexts._, Constants._, SymDenotations._, Symbols._
import Denotations._, StdNames._
import annotation.tailrec
import language.higherKinds
import collection.IndexedSeqOptimized
import collection.immutable.IndexedSeq
import collection.mutable.ListBuffer
import parsing.Tokens.Token
import printing.Printer
import util.{Stats, Attachment, DotClass}
import annotation.unchecked.uncheckedVariance
object Trees {
// Note: it would be more logical to make Untyped = Nothing.
// However, this interacts in a bad way with Scala's current type inference.
// In fact, we cannot write soemthing like Select(pre, name), where pre is
// of type Tree[Nothing]; type inference will treat the Nothing as an uninstantited
// value and will not infer Nothing as the type parameter for Select.
// We should come back to this issue once type inference is changed.
type Untyped = Null
/** The total number of created tree nodes, maintained if Stats.enabled */
var ntrees = 0
/** A base class for things that have positions (currently: modifiers and trees)
*/
abstract class Positioned extends DotClass with Product {
private[this] var curPos: Position = _
setPos(initialPos)
/** The item's position.
*/
def pos: Position = curPos
/** Destructively update `curPos` to given position. Also, set any missing
* positions in children.
*/
protected def setPos(pos: Position): Unit = {
curPos = pos
if (pos.exists) setChildPositions(pos.toSynthetic)
}
/** The envelope containing the item in its entirety. Envelope is different from
* `pos` for definitions (instances of MemberDef).
*/
def envelope: Position = pos.toSynthetic
/** A positioned item like this one with the position set to `pos`.
* if the positioned item is source-derived, a clone is returned.
* If the positioned item is synthetic, the position is updated
* destructively and the item itself is returned.
*/
def withPos(pos: Position): this.type = {
val newpd = (if (pos == curPos || curPos.isSynthetic) this else clone).asInstanceOf[Positioned]
newpd.setPos(pos)
newpd.asInstanceOf[this.type]
}
def withPos(posd: Positioned): this.type =
if (posd == null) this else withPos(posd.pos)
/** This item with a position that's the union of the given `pos` and the
* current position.
*/
def addPos(pos: Position): this.type = withPos(pos union this.pos)
/** If any children of this node do not have positions, set them to the given position,
* and transitively visit their children.
*/
private def setChildPositions(pos: Position): Unit = {
def deepSetPos(x: Any): Unit = x match {
case p: Positioned =>
if (!p.pos.exists) p.setPos(pos)
case xs: List[_] =>
xs foreach deepSetPos
case _ =>
}
var n = productArity
while (n > 0) {
n -= 1
deepSetPos(productElement(n))
}
}
/** The initial, synthetic position. This is usually the union of all positioned children's
* envelopes.
*/
protected def initialPos: Position = {
var n = productArity
var pos = NoPosition
while (n > 0) {
n -= 1
productElement(n) match {
case p: Positioned => pos = pos union p.envelope
case xs: List[_] => pos = unionPos(pos, xs)
case _ =>
}
}
pos.toSynthetic
}
private def unionPos(pos: Position, xs: List[_]): Position = xs match {
case (t: Tree[_]) :: xs1 => unionPos(pos union t.envelope, xs1)
case _ => pos
}
def contains(that: Positioned): Boolean = {
def isParent(x: Any): Boolean = x match {
case x: Positioned =>
x contains that
case xs: List[_] =>
xs exists isParent
case _ =>
false
}
(this eq that) ||
(this.envelope contains that.pos) && {
var n = productArity
var found = false
while (n > 0 && !found) {
n -= 1
found = isParent(productElement(n))
}
found
}
}
}
/** Modifiers and annotations for definitions
* @param flags The set flags
* @param privateWithin If a private or protected has is followed by a
* qualifier [q], the name q, "" as a typename otherwise.
* @param annotations The annotations preceding the modifers
* @param positions A flagPositions structure that records the positions
* of et flags.
* @param pos The position of the modifiers. This should start with
* the first modifier or annotation and have as point
* the start of the opening keyword(s) of the definition.
* It should have as end the end of the opening keywords(s).
* If there is no opening keyword, point should equal end.
*/
case class Modifiers[-T >: Untyped] (
flags: FlagSet = EmptyFlags,
privateWithin: TypeName = tpnme.EMPTY,
annotations: List[Tree[T]] = Nil) extends Positioned with Cloneable {
def is(fs: FlagSet): Boolean = flags is fs
def is(fc: FlagConjunction): Boolean = flags is fc
def | (fs: FlagSet): Modifiers[T] = withFlags(flags | fs)
def & (fs: FlagSet): Modifiers[T] = withFlags(flags & fs)
def &~(fs: FlagSet): Modifiers[T] = withFlags(flags &~ fs)
def toTypeFlags: Modifiers[T] = withFlags(flags.toTypeFlags)
def toTermFlags: Modifiers[T] = withFlags(flags.toTermFlags)
private def withFlags(flags: FlagSet) =
if (this.flags == flags) this
else copy(flags = flags)
def withAnnotations[U >: Untyped <: T](annots: List[Tree[U]]): Modifiers[U] =
if (annots.isEmpty) this
else copy(annotations = annotations ++ annots)
def withPrivateWithin(pw: TypeName) =
if (pw.isEmpty) this
else copy(privateWithin = pw)
def hasFlags = flags != EmptyFlags
def hasAnnotations = annotations.nonEmpty
def hasPrivateWithin = privateWithin != tpnme.EMPTY
def tokenPos: Seq[(Token, Position)] = ???
}
/** Trees take a parameter indicating what the type of their `tpe` field
* is. Two choices: `Type` or `Untyped`.
* Untyped trees have type `Tree[Untyped]`.
*
* Tree typing uses a copy-on-write implementation:
*
* - You can never observe a `tpe` which is `null` (throws an exception)
* - So when creating a typed tree with `withType` we can re-use
* the existing tree transparently, assigning its `tpe` field,
* provided it was `null` before.
* - It is impossible to embed untyped trees in typed ones.
* - Typed trees can be embedded untyped ones provided they are rooted
* in a TypedSplice node.
* - Type checking an untyped tree should remove all embedded `TypedSplice`
* nodes.
*/
abstract class Tree[-T >: Untyped] extends Positioned
with Product
with Attachment.Container
with printing.Showable
with Cloneable {
if (Stats.enabled) ntrees += 1
/** The type constructor at the root of the tree */
type ThisTree[T >: Untyped] <: Tree[T]
private[this] var myTpe: T = _
/** Destructively set the type of the tree. This should be called only when it is known that
* it is safe under sharing to do so. One user-case is in the withType method below
* which implements copy-on-write. Another use-case is in method interpolateAndAdapt in Typer,
* where we overwrite with a simplified version of the type itself.
*/
private[dotc] def overwriteType(tpe: T) = myTpe = tpe
/** The type of the tree. In case of an untyped tree,
* an UnAssignedTypeException is thrown. (Overridden by empty trees)
*/
def tpe: T @uncheckedVariance = {
if (myTpe == null) throw new UnAssignedTypeException(this)
myTpe
}
/** Copy `tpe` attribute from tree `from` into this tree, independently
* whether it is null or not.
final def copyAttr[U >: Untyped](from: Tree[U]): ThisTree[T] = {
val t1 = this.withPos(from.pos)
val t2 =
if (from.myTpe != null) t1.withType(from.myTpe.asInstanceOf[Type])
else t1
t2.asInstanceOf[ThisTree[T]]
}
*/
/** Return a typed tree that's isomorphic to this tree, but has given
* type. (Overridden by empty trees)
*/
def withType(tpe: Type)(implicit ctx: Context): ThisTree[Type] = {
if (tpe == ErrorType) assert(ctx.errorsReported)
withTypeUnchecked(tpe)
}
def withTypeUnchecked(tpe: Type): ThisTree[Type] = {
val tree =
(if (myTpe == null ||
(myTpe.asInstanceOf[AnyRef] eq tpe.asInstanceOf[AnyRef])) this
else clone).asInstanceOf[Tree[Type]]
tree overwriteType tpe
tree.asInstanceOf[ThisTree[Type]]
}
/** Does the tree have its type field set? Note: this operation is not
* referentially transparent, because it can observe the withType
* modifications. Should be used only in special circumstances (we
* need it for printing trees with optional type info).
*/
final def hasType: Boolean = myTpe != null
final def typeOpt: Type = myTpe match {
case tp: Type => tp
case _ => NoType
}
/** The denotation referred tno by this tree.
* Defined for `DenotingTree`s and `ProxyTree`s, NoDenotation for other
* kinds of trees
*/
def denot(implicit ctx: Context): Denotation = NoDenotation
/** Shorthand for `denot.symbol`. */
final def symbol(implicit ctx: Context): Symbol = denot.symbol
/** Does this tree represent a type? */
def isType: Boolean = false
/** Does this tree represent a term? */
def isTerm: Boolean = false
/** Is this a legal part of a pattern which is not at the same time a term? */
def isPattern: Boolean = false
/** Does this tree define a new symbol that is not defined elsewhere? */
def isDef: Boolean = false
/** Is this tree either the empty tree or the empty ValDef? */
def isEmpty: Boolean = false
/** Convert tree to a list. Gives a singleton list, except
* for thickets which return their element trees.
*/
def toList: List[Tree[T]] = this :: Nil
/** if this tree is the empty tree, the alternative, else this tree */
def orElse[U >: Untyped <: T](that: => Tree[U]): Tree[U] =
if (this eq genericEmptyTree) that else this
/** The number of nodes in this tree */
def treeSize: Int = {
var s = 1
def addSize(elem: Any): Unit = elem match {
case t: Tree[_] => s += t.treeSize
case ts: List[_] => ts foreach addSize
case _ =>
}
productIterator foreach addSize
s
}
override def toText(printer: Printer) = printer.toText(this)
override def hashCode(): Int = System.identityHashCode(this)
override def equals(that: Any) = this eq that.asInstanceOf[AnyRef]
}
class UnAssignedTypeException[T >: Untyped](tree: Tree[T]) extends RuntimeException {
override def getMessage: String = s"type of $tree is not assigned"
}
// ------ Categories of trees -----------------------------------
/** Instances of this class are trees for which isType is definitely true.
* Note that some trees have isType = true without being TypTrees (e.g. Ident, AnnotatedTree)
*/
trait TypTree[-T >: Untyped] extends Tree[T] {
type ThisTree[-T >: Untyped] <: TypTree[T]
override def isType = true
}
/** Instances of this class are trees for which isTerm is definitely true.
* Note that some trees have isTerm = true without being TermTrees (e.g. Ident, AnnotatedTree)
*/
trait TermTree[-T >: Untyped] extends Tree[T] {
type ThisTree[-T >: Untyped] <: TermTree[T]
override def isTerm = true
}
/** Instances of this class are trees which are not terms but are legal
* parts of patterns.
*/
trait PatternTree[-T >: Untyped] extends Tree[T] {
type ThisTree[-T >: Untyped] <: PatternTree[T]
override def isPattern = true
}
/** Tree's denotation can be derived from its type */
abstract class DenotingTree[-T >: Untyped] extends Tree[T] {
type ThisTree[-T >: Untyped] <: DenotingTree[T]
override def denot(implicit ctx: Context) = tpe match {
case tpe: NamedType => tpe.denot
case _ => NoDenotation
}
}
/** Tree's denot/isType/isTerm properties come from a subtree
* identified by `forwardTo`.
*/
abstract class ProxyTree[-T >: Untyped] extends Tree[T] {
type ThisTree[-T >: Untyped] <: ProxyTree[T]
def forwardTo: Tree[T]
override def denot(implicit ctx: Context): Denotation = forwardTo.denot
override def isTerm = forwardTo.isTerm
override def isType = forwardTo.isType
}
/** Tree has a name */
abstract class NameTree[-T >: Untyped] extends DenotingTree[T] {
type ThisTree[-T >: Untyped] <: NameTree[T]
def name: Name
def withName(name1: Name)(implicit ctx: Context): untpd.NameTree
}
/** Tree refers by name to a denotation */
abstract class RefTree[-T >: Untyped] extends NameTree[T] {
type ThisTree[-T >: Untyped] <: RefTree[T]
def qualifier: Tree[T]
override def isType = name.isTypeName
override def isTerm = name.isTermName
}
/** Tree defines a new symbol */
trait DefTree[-T >: Untyped] extends DenotingTree[T] {
type ThisTree[-T >: Untyped] <: DefTree[T]
override def isDef = true
}
/** Tree defines a new symbol and carries modifiers.
* The position of a MemberDef contains only the defined identifier or pattern.
* The envelope of a MemberDef contains the whole definition and his its point
* on the opening keyword (or the next token after that if keyword is missing).
*/
trait MemberDef[-T >: Untyped] extends NameTree[T] with DefTree[T] {
type ThisTree[-T >: Untyped] <: MemberDef[T]
def mods: Modifiers[T]
override def envelope: Position = mods.pos union pos union initialPos
}
/** A ValDef or DefDef tree */
trait ValOrDefDef[-T >: Untyped] extends MemberDef[T] {
def tpt: Tree[T]
def rhs: Tree[T]
}
// ----------- Tree case classes ------------------------------------
/** name */
case class Ident[-T >: Untyped] private[ast] (name: Name)
extends RefTree[T] {
type ThisTree[-T >: Untyped] = Ident[T]
def withName(name: Name)(implicit ctx: Context): untpd.Ident = untpd.cpy.Ident(this, name)
def qualifier: Tree[T] = genericEmptyTree
}
class BackquotedIdent[-T >: Untyped] private[ast] (name: Name)
extends Ident[T](name)
/** qualifier.name */
case class Select[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)
extends RefTree[T] {
type ThisTree[-T >: Untyped] = Select[T]
def withName(name: Name)(implicit ctx: Context): untpd.Select = untpd.cpy.Select(this, qualifier, name)
}
class SelectWithSig[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name, val sig: Signature)
extends Select[T](qualifier, name)
/** qual.this */
case class This[-T >: Untyped] private[ast] (qual: TypeName)
extends DenotingTree[T] with TermTree[T] {
type ThisTree[-T >: Untyped] = This[T]
}
/** C.super[mix], where qual = C.this */
case class Super[-T >: Untyped] private[ast] (qual: Tree[T], mix: TypeName)
extends ProxyTree[T] with TermTree[T] {
type ThisTree[-T >: Untyped] = Super[T]
def forwardTo = qual
}
abstract class GenericApply[-T >: Untyped] extends ProxyTree[T] with TermTree[T] {
type ThisTree[-T >: Untyped] <: GenericApply[T]
val fun: Tree[T]
val args: List[Tree[T]]
def forwardTo = fun
}
/** fun(args) */
case class Apply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])
extends GenericApply[T] {
type ThisTree[-T >: Untyped] = Apply[T]
}
/** fun[args] */
case class TypeApply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])
extends GenericApply[T] {
type ThisTree[-T >: Untyped] = TypeApply[T]
}
/** const */
case class Literal[-T >: Untyped] private[ast] (const: Constant)
extends TermTree[T] {
type ThisTree[-T >: Untyped] = Literal[T]
}
/** new tpt, but no constructor call */
case class New[-T >: Untyped] private[ast] (tpt: Tree[T])
extends TermTree[T] {
type ThisTree[-T >: Untyped] = New[T]
}
/** (left, right) */
case class Pair[-T >: Untyped] private[ast] (left: Tree[T], right: Tree[T])
extends TermTree[T] {
type ThisTree[-T >: Untyped] = Pair[T]
override def isTerm = left.isTerm && right.isTerm
override def isType = left.isType && right.isType
override def isPattern = !isTerm && (left.isPattern || left.isTerm) && (right.isPattern || right.isTerm)
}
/** expr : tpt */
case class Typed[-T >: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])
extends ProxyTree[T] with TermTree[T] {
type ThisTree[-T >: Untyped] = Typed[T]
def forwardTo = expr
}
/** name = arg, in a parameter list */
case class NamedArg[-T >: Untyped] private[ast] (name: Name, arg: Tree[T])
extends Tree[T] {
type ThisTree[-T >: Untyped] = NamedArg[T]
}
/** name = arg, outside a parameter list */
case class Assign[-T >: Untyped] private[ast] (lhs: Tree[T], rhs: Tree[T])
extends TermTree[T] {
type ThisTree[-T >: Untyped] = Assign[T]
}
/** { stats; expr } */
case class Block[-T >: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])
extends TermTree[T] {
type ThisTree[-T >: Untyped] = Block[T]
}
/** if cond then thenp else elsep */
case class If[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])
extends TermTree[T] {
type ThisTree[-T >: Untyped] = If[T]
}
/** A closure with an environment and a reference to a method.
* @param env The captured parameters of the closure
* @param meth A ref tree that refers to the method of the closure.
* The first (env.length) parameters of that method are filled
* with env values.
* @param tpt Either EmptyTree or a TypeTree. If tpt is EmptyTree the type
* of the closure is a function type, otherwise it is the type
* given in `tpt`, which must be a SAM type.
*/
case class Closure[-T >: Untyped] private[ast] (env: List[Tree[T]], meth: Tree[T], tpt: Tree[T])
extends TermTree[T] {
type ThisTree[-T >: Untyped] = Closure[T]
}
/** selector match { cases } */
case class Match[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])
extends TermTree[T] {
type ThisTree[-T >: Untyped] = Match[T]
}
/** case pat if guard => body; only appears as child of a Match */
case class CaseDef[-T >: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])
extends Tree[T] {
type ThisTree[-T >: Untyped] = CaseDef[T]
}
/** return expr
* where `from` refers to the method from which the return takes place
* After program transformations this is not necessarily the enclosing method, because
* closures can intervene.
*/
case class Return[-T >: Untyped] private[ast] (expr: Tree[T], from: Tree[T] = genericEmptyTree)
extends TermTree[T] {
type ThisTree[-T >: Untyped] = Return[T]
}
/** try block catch handler finally finalizer
*
* Note: if the handler is a case block CASES of the form
*
* { case1 ... caseN }
*
* the parser returns Match(EmptyTree, CASES). Desugaring and typing this yields a closure
* node
*
* { def $anonfun(x: Throwable) = x match CASES; Closure(Nil, $anonfun) }
*
* At some later stage when we normalize the try we can revert this to
*
* Match(EmptyTree, CASES)
*
* or else if stack is non-empty
*
* Match(EmptyTree, <case x: Throwable => $anonfun(x)>)
*/
case class Try[-T >: Untyped] private[ast] (expr: Tree[T], handler: Tree[T], finalizer: Tree[T])
extends TermTree[T] {
type ThisTree[-T >: Untyped] = Try[T]
}
/** throw expr */
case class Throw[-T >: Untyped] private[ast] (expr: Tree[T])
extends TermTree[T] {
type ThisTree[-T >: Untyped] = Throw[T]
}
/** Seq(elems) */
case class SeqLiteral[-T >: Untyped] private[ast] (elems: List[Tree[T]])
extends Tree[T] {
type ThisTree[-T >: Untyped] = SeqLiteral[T]
}
/** Array(elems) */
class JavaSeqLiteral[T >: Untyped] private[ast] (elems: List[Tree[T]])
extends SeqLiteral(elems) {
}
/** A type tree that represents an existing or inferred type */
case class TypeTree[-T >: Untyped] private[ast] (original: Tree[T])
extends DenotingTree[T] with TypTree[T] {
type ThisTree[-T >: Untyped] = TypeTree[T]
override def initialPos = NoPosition
override def isEmpty = !hasType && original.isEmpty
override def toString =
s"TypeTree${if (hasType) s"[$typeOpt]" else s"($original)"}"
}
/** ref.type */
case class SingletonTypeTree[-T >: Untyped] private[ast] (ref: Tree[T])
extends DenotingTree[T] with TypTree[T] {
type ThisTree[-T >: Untyped] = SingletonTypeTree[T]
}
/** qualifier # name */
case class SelectFromTypeTree[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)
extends RefTree[T] {
type ThisTree[-T >: Untyped] = SelectFromTypeTree[T]
def withName(name: Name)(implicit ctx: Context): untpd.SelectFromTypeTree = untpd.cpy.SelectFromTypeTree(this, qualifier, name)
}
/** left & right */
case class AndTypeTree[-T >: Untyped] private[ast] (left: Tree[T], right: Tree[T])
extends TypTree[T] {
type ThisTree[-T >: Untyped] = AndTypeTree[T]
}
/** left | right */
case class OrTypeTree[-T >: Untyped] private[ast] (left: Tree[T], right: Tree[T])
extends TypTree[T] {
type ThisTree[-T >: Untyped] = OrTypeTree[T]
}
/** tpt { refinements } */
case class RefinedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], refinements: List[Tree[T]])
extends ProxyTree[T] with TypTree[T] {
type ThisTree[-T >: Untyped] = RefinedTypeTree[T]
def forwardTo = tpt
}
/** tpt[args] */
case class AppliedTypeTree[-T >: Untyped] private[ast] (tpt: Tree[T], args: List[Tree[T]])
extends ProxyTree[T] with TypTree[T] {
type ThisTree[-T >: Untyped] = AppliedTypeTree[T]
def forwardTo = tpt
}
/** => T */
case class ByNameTypeTree[-T >: Untyped] private[ast] (result: Tree[T])
extends Tree[T] {
type ThisTree[-T >: Untyped] = ByNameTypeTree[T]
}
/** >: lo <: hi */
case class TypeBoundsTree[-T >: Untyped] private[ast] (lo: Tree[T], hi: Tree[T])
extends TypTree[T] {
type ThisTree[-T >: Untyped] = TypeBoundsTree[T]
}
/** name @ body */
case class Bind[-T >: Untyped] private[ast] (name: Name, body: Tree[T])
extends NameTree[T] with DefTree[T] with PatternTree[T] {
type ThisTree[-T >: Untyped] = Bind[T]
override def envelope: Position = pos union initialPos
def withName(name: Name)(implicit ctx: Context): untpd.Bind = untpd.cpy.Bind(this, name, body)
}
/** tree_1 | ... | tree_n */
case class Alternative[-T >: Untyped] private[ast] (trees: List[Tree[T]])
extends PatternTree[T] {
type ThisTree[-T >: Untyped] = Alternative[T]
}
/** The typed translation of `extractor(patterns)` in a pattern. The translation has the following
* components:
*
* @param fun is `extractor.unapply` (or, for backwards compatibility, `extractor.unapplySeq`)
* possibly with type parameters
* @param implicits Any implicit parameters passed to the unapply after the selector
* @param patterns The argument patterns in the pattern match.
*
* Given a match selector `sel` a pattern UnApply(fun, implicits, patterns) is roughly translated as follows
*
* val result = fun(sel)(implicits)
* if (result.isDefined) "match patterns against result"
*/
case class UnApply[-T >: Untyped] private[ast] (fun: Tree[T], implicits: List[Tree[T]], patterns: List[Tree[T]])
extends PatternTree[T] {
type ThisTree[-T >: Untyped] = UnApply[T]
}
/** mods val name: tpt = rhs */
case class ValDef[-T >: Untyped] private[ast] (mods: Modifiers[T], name: TermName, tpt: Tree[T], rhs: Tree[T])
extends ValOrDefDef[T] {
type ThisTree[-T >: Untyped] = ValDef[T]
def withName(name: Name)(implicit ctx: Context): untpd.ValDef = untpd.cpy.ValDef(this, mods, name.toTermName, tpt, rhs)
assert(isEmpty || tpt != genericEmptyTree)
}
/** mods def name[tparams](vparams_1)...(vparams_n): tpt = rhs */
case class DefDef[-T >: Untyped] private[ast] (mods: Modifiers[T], name: TermName, tparams: List[TypeDef[T]], vparamss: List[List[ValDef[T]]], tpt: Tree[T], rhs: Tree[T])
extends ValOrDefDef[T] {
type ThisTree[-T >: Untyped] = DefDef[T]
def withName(name: Name)(implicit ctx: Context): untpd.DefDef = untpd.cpy.DefDef(this, mods, name.toTermName, tparams, vparamss, tpt, rhs)
assert(tpt != genericEmptyTree)
}
/** mods class name template or
* mods trait name template or
* mods type name = rhs or
* mods type name >: lo <: hi, if rhs = TypeBoundsTree(lo, hi) & (lo ne hi)
*/
case class TypeDef[-T >: Untyped] private[ast] (mods: Modifiers[T], name: TypeName, rhs: Tree[T])
extends MemberDef[T] {
type ThisTree[-T >: Untyped] = TypeDef[T]
def withName(name: Name)(implicit ctx: Context): untpd.TypeDef = untpd.cpy.TypeDef(this, mods, name.toTypeName, rhs, tparams)
/** Is this a definition of a class? */
def isClassDef = rhs.isInstanceOf[Template[_]]
/** If this a non-class type definition, its type parameters.
* Can be different from Nil only for PolyTypeDefs, which are always
* untyped and get eliminated during desugaring.
*/
def tparams: List[untpd.TypeDef] = Nil
}
/** extends parents { self => body } */
case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parents: List[Tree[T]], self: ValDef[T], body: List[Tree[T]])
extends DefTree[T] {
type ThisTree[-T >: Untyped] = Template[T]
}
/** import expr.selectors
* where a selector is either an untyped `Ident`, `name` or
* an untyped `Pair` `name => rename`
*/
case class Import[-T >: Untyped] private[ast] (expr: Tree[T], selectors: List[Tree[Untyped]])
extends DenotingTree[T] {
type ThisTree[-T >: Untyped] = Import[T]
}
/** package pid { stats } */
case class PackageDef[-T >: Untyped] private[ast] (pid: RefTree[T], stats: List[Tree[T]])
extends ProxyTree[T] {
type ThisTree[-T >: Untyped] = PackageDef[T]
def forwardTo = pid
}
/** arg @annot */
case class Annotated[-T >: Untyped] private[ast] (annot: Tree[T], arg: Tree[T])
extends ProxyTree[T] {
type ThisTree[-T >: Untyped] = Annotated[T]
def forwardTo = arg
}
trait WithoutTypeOrPos[-T >: Untyped] extends Tree[T] {
override def tpe: T @uncheckedVariance = NoType.asInstanceOf[T]
override def withTypeUnchecked(tpe: Type) = this.asInstanceOf[ThisTree[Type]]
override def pos = NoPosition
override def setPos(pos: Position) = {}
}
/** Temporary class that results from translation of ModuleDefs
* (and possibly other statements).
* The contained trees will be integrated when transformed with
* a `transform(List[Tree])` call.
*/
case class Thicket[-T >: Untyped](trees: List[Tree[T]])
extends Tree[T] with WithoutTypeOrPos[T] {
type ThisTree[-T >: Untyped] = Thicket[T]
override def isEmpty: Boolean = trees.isEmpty
override def toList: List[Tree[T]] = flatten(trees)
override def toString = if (isEmpty) "EmptyTree" else "Thicket(" + trees.mkString(", ") + ")"
}
class EmptyValDef[T >: Untyped] extends ValDef[T](
Modifiers[T](Private), nme.WILDCARD, genericEmptyTree[T], genericEmptyTree[T]) with WithoutTypeOrPos[T] {
override def isEmpty: Boolean = true
}
val theEmptyTree: Thicket[Type] = Thicket(Nil)
val theEmptyValDef = new EmptyValDef[Type]
def genericEmptyValDef[T >: Untyped]: ValDef[T] = theEmptyValDef.asInstanceOf[ValDef[T]]
def genericEmptyTree[T >: Untyped]: Thicket[T] = theEmptyTree.asInstanceOf[Thicket[T]]
def flatten[T >: Untyped](trees: List[Tree[T]]): List[Tree[T]] = {
var buf: ListBuffer[Tree[T]] = null
var xs = trees
while (xs.nonEmpty) {
xs.head match {
case Thicket(elems) =>
if (buf == null) {
buf = new ListBuffer
var ys = trees
while (ys ne xs) {
buf += ys.head
ys = ys.tail
}
}
for (elem <- elems) {
assert(!elem.isInstanceOf[Thicket[_]])
buf += elem
}
case tree =>
if (buf != null) buf += tree
}
xs = xs.tail
}
if (buf != null) buf.toList else trees
}
// ----- Generic Tree Instances, inherited from `tpt` and `untpd`.
abstract class Instance[T >: Untyped <: Type] extends DotClass { inst =>
type Modifiers = Trees.Modifiers[T]
type Tree = Trees.Tree[T]
type TypTree = Trees.TypTree[T]
type TermTree = Trees.TermTree[T]
type PatternTree = Trees.PatternTree[T]
type DenotingTree = Trees.DenotingTree[T]
type ProxyTree = Trees.ProxyTree[T]
type NameTree = Trees.NameTree[T]
type RefTree = Trees.RefTree[T]
type DefTree = Trees.DefTree[T]
type MemberDef = Trees.MemberDef[T]
type ValOrDefDef = Trees.ValOrDefDef[T]
type Ident = Trees.Ident[T]
type BackquotedIdent = Trees.BackquotedIdent[T]
type Select = Trees.Select[T]
type SelectWithSig = Trees.SelectWithSig[T]
type This = Trees.This[T]
type Super = Trees.Super[T]
type Apply = Trees.Apply[T]
type TypeApply = Trees.TypeApply[T]
type Literal = Trees.Literal[T]
type New = Trees.New[T]
type Pair = Trees.Pair[T]
type Typed = Trees.Typed[T]
type NamedArg = Trees.NamedArg[T]
type Assign = Trees.Assign[T]
type Block = Trees.Block[T]
type If = Trees.If[T]
type Closure = Trees.Closure[T]
type Match = Trees.Match[T]
type CaseDef = Trees.CaseDef[T]
type Return = Trees.Return[T]
type Try = Trees.Try[T]
type Throw = Trees.Throw[T]
type SeqLiteral = Trees.SeqLiteral[T]
type JavaSeqLiteral = Trees.JavaSeqLiteral[T]
type TypeTree = Trees.TypeTree[T]
type SingletonTypeTree = Trees.SingletonTypeTree[T]
type SelectFromTypeTree = Trees.SelectFromTypeTree[T]
type AndTypeTree = Trees.AndTypeTree[T]
type OrTypeTree = Trees.OrTypeTree[T]
type RefinedTypeTree = Trees.RefinedTypeTree[T]
type AppliedTypeTree = Trees.AppliedTypeTree[T]
type ByNameTypeTree = Trees.ByNameTypeTree[T]
type TypeBoundsTree = Trees.TypeBoundsTree[T]
type Bind = Trees.Bind[T]
type Alternative = Trees.Alternative[T]
type UnApply = Trees.UnApply[T]
type ValDef = Trees.ValDef[T]
type DefDef = Trees.DefDef[T]
type TypeDef = Trees.TypeDef[T]
type Template = Trees.Template[T]
type Import = Trees.Import[T]
type PackageDef = Trees.PackageDef[T]
type Annotated = Trees.Annotated[T]
type Thicket = Trees.Thicket[T]
val EmptyTree: Thicket = genericEmptyTree
val EmptyValDef: ValDef = genericEmptyValDef
// ----- Auxiliary creation methods ------------------
def Modifiers(flags: FlagSet = EmptyFlags,
privateWithin: TypeName = tpnme.EMPTY,
annotations: List[Tree] = Nil) = new Modifiers(flags, privateWithin, annotations)
def Thicket(trees: List[Tree]): Thicket = new Thicket(trees)
def Thicket(): Thicket = EmptyTree
def Thicket(x1: Tree, x2: Tree): Thicket = Thicket(x1 :: x2 :: Nil)
def Thicket(x1: Tree, x2: Tree, x3: Tree): Thicket = Thicket(x1 :: x2 :: x3 :: Nil)
def flatTree(xs: List[Tree]): Tree = flatten(xs) match {
case x :: Nil => x
case ys => Thicket(ys)
}
// ----- Position handling -----------------------------------------
def foreachSubTreeOf(tree: Tree)(f: Tree => Unit): Unit = {
val traverser = new TreeTraverser {
def traverse(tree: Tree) = foldOver(f(tree), tree)
}
traverser.traverse(tree)
}
// ----- Helper classes for copying, transforming, accumulating -----------------
val cpy: TreeCopier
abstract class TreeCopier {
def postProcess(tree: Tree, copied: untpd.Tree): copied.ThisTree[T]
def finalize(tree: Tree, copied: untpd.Tree): copied.ThisTree[T] =
postProcess(tree, copied withPos tree.pos)
def Ident(tree: Tree, name: Name): Ident = tree match {
case tree: BackquotedIdent =>
if (name == tree.name) tree
else finalize(tree, new BackquotedIdent(name))
case tree: Ident if (name == tree.name) => tree
case _ => finalize(tree, untpd.Ident(name))
}
def Select(tree: Tree, qualifier: Tree, name: Name): Select = tree match {
case tree: SelectWithSig =>
if ((qualifier eq tree.qualifier) && (name == tree.name)) tree
else finalize(tree, new SelectWithSig(qualifier, name, tree.sig))
case tree: Select if (qualifier eq tree.qualifier) && (name == tree.name) => tree
case _ => finalize(tree, untpd.Select(qualifier, name))
}
def This(tree: Tree, qual: TypeName): This = tree match {
case tree: This if (qual == tree.qual) => tree
case _ => finalize(tree, untpd.This(qual))
}
def Super(tree: Tree, qual: Tree, mix: TypeName): Super = tree match {
case tree: Super if (qual eq tree.qual) && (mix == tree.mix) => tree
case _ => finalize(tree, untpd.Super(qual, mix))
}
def Apply(tree: Tree, fun: Tree, args: List[Tree]): Apply = tree match {
case tree: Apply if (fun eq tree.fun) && (args eq tree.args) => tree
case _ => finalize(tree, untpd.Apply(fun, args))
}
def TypeApply(tree: Tree, fun: Tree, args: List[Tree]): TypeApply = tree match {
case tree: TypeApply if (fun eq tree.fun) && (args eq tree.args) => tree
case _ => finalize(tree, untpd.TypeApply(fun, args))
}
def Literal(tree: Tree, const: Constant): Literal = tree match {
case tree: Literal if (const == tree.const) => tree
case _ => finalize(tree, untpd.Literal(const))
}
def New(tree: Tree, tpt: Tree): New = tree match {
case tree: New if (tpt eq tree.tpt) => tree
case _ => finalize(tree, untpd.New(tpt))
}
def Pair(tree: Tree, left: Tree, right: Tree): Pair = tree match {
case tree: Pair if (left eq tree.left) && (right eq tree.right) => tree
case _ => finalize(tree, untpd.Pair(left, right))
}
def Typed(tree: Tree, expr: Tree, tpt: Tree): Typed = tree match {
case tree: Typed if (expr eq tree.expr) && (tpt eq tree.tpt) => tree
case _ => finalize(tree, untpd.Typed(expr, tpt))
}
def NamedArg(tree: Tree, name: Name, arg: Tree): NamedArg = tree match {
case tree: NamedArg if (name == tree.name) && (arg eq tree.arg) => tree
case _ => finalize(tree, untpd.NamedArg(name, arg))
}
def Assign(tree: Tree, lhs: Tree, rhs: Tree): Assign = tree match {
case tree: Assign if (lhs eq tree.lhs) && (rhs eq tree.rhs) => tree
case _ => finalize(tree, untpd.Assign(lhs, rhs))
}
def Block(tree: Tree, stats: List[Tree], expr: Tree): Block = tree match {
case tree: Block if (stats eq tree.stats) && (expr eq tree.expr) => tree
case _ => finalize(tree, untpd.Block(stats, expr))
}
def If(tree: Tree, cond: Tree, thenp: Tree, elsep: Tree): If = tree match {
case tree: If if (cond eq tree.cond) && (thenp eq tree.thenp) && (elsep eq tree.elsep) => tree
case _ => finalize(tree, untpd.If(cond, thenp, elsep))
}
def Closure(tree: Tree, env: List[Tree], meth: Tree, tpt: Tree): Closure = tree match {
case tree: Closure if (env eq tree.env) && (meth eq tree.meth) && (tpt eq tree.tpt) => tree
case _ => finalize(tree, untpd.Closure(env, meth, tpt))
}
def Match(tree: Tree, selector: Tree, cases: List[CaseDef]): Match = tree match {
case tree: Match if (selector eq tree.selector) && (cases eq tree.cases) => tree
case _ => finalize(tree, untpd.Match(selector, cases))
}
def CaseDef(tree: Tree, pat: Tree, guard: Tree, body: Tree): CaseDef = tree match {
case tree: CaseDef if (pat eq tree.pat) && (guard eq tree.guard) && (body eq tree.body) => tree
case _ => finalize(tree, untpd.CaseDef(pat, guard, body))
}
def Return(tree: Tree, expr: Tree, from: Tree): Return = tree match {
case tree: Return if (expr eq tree.expr) && (from eq tree.from) => tree
case _ => finalize(tree, untpd.Return(expr, from))
}
def Try(tree: Tree, expr: Tree, handler: Tree, finalizer: Tree): Try = tree match {
case tree: Try if (expr eq tree.expr) && (handler eq tree.handler) && (finalizer eq tree.finalizer) => tree
case _ => finalize(tree, untpd.Try(expr, handler, finalizer))
}
def Throw(tree: Tree, expr: Tree): Throw = tree match {
case tree: Throw if (expr eq tree.expr) => tree
case _ => finalize(tree, untpd.Throw(expr))
}
def SeqLiteral(tree: Tree, elems: List[Tree]): SeqLiteral = tree match {
case tree: JavaSeqLiteral =>
if (elems eq tree.elems) tree
else finalize(tree, new JavaSeqLiteral(elems))
case tree: SeqLiteral if (elems eq tree.elems) => tree
case _ => finalize(tree, untpd.SeqLiteral(elems))
}
def TypeTree(tree: Tree, original: Tree): TypeTree = tree match {
case tree: TypeTree if original eq tree.original => tree
case _ => finalize(tree, untpd.TypeTree(original))
}
def SingletonTypeTree(tree: Tree, ref: Tree): SingletonTypeTree = tree match {
case tree: SingletonTypeTree if (ref eq tree.ref) => tree
case _ => finalize(tree, untpd.SingletonTypeTree(ref))
}
def SelectFromTypeTree(tree: Tree, qualifier: Tree, name: Name): SelectFromTypeTree = tree match {
case tree: SelectFromTypeTree if (qualifier eq tree.qualifier) && (name == tree.name) => tree
case _ => finalize(tree, untpd.SelectFromTypeTree(qualifier, name))
}
def AndTypeTree(tree: Tree, left: Tree, right: Tree): AndTypeTree = tree match {
case tree: AndTypeTree if (left eq tree.left) && (right eq tree.right) => tree
case _ => finalize(tree, untpd.AndTypeTree(left, right))
}
def OrTypeTree(tree: Tree, left: Tree, right: Tree): OrTypeTree = tree match {
case tree: OrTypeTree if (left eq tree.left) && (right eq tree.right) => tree
case _ => finalize(tree, untpd.OrTypeTree(left, right))
}
def RefinedTypeTree(tree: Tree, tpt: Tree, refinements: List[Tree]): RefinedTypeTree = tree match {
case tree: RefinedTypeTree if (tpt eq tree.tpt) && (refinements eq tree.refinements) => tree
case _ => finalize(tree, untpd.RefinedTypeTree(tpt, refinements))
}
def AppliedTypeTree(tree: Tree, tpt: Tree, args: List[Tree]): AppliedTypeTree = tree match {
case tree: AppliedTypeTree if (tpt eq tree.tpt) && (args eq tree.args) => tree
case _ => finalize(tree, untpd.AppliedTypeTree(tpt, args))
}
def ByNameTypeTree(tree: Tree, result: Tree): ByNameTypeTree = tree match {
case tree: ByNameTypeTree if (result eq tree.result) => tree
case _ => finalize(tree, untpd.ByNameTypeTree(result))
}
def TypeBoundsTree(tree: Tree, lo: Tree, hi: Tree): TypeBoundsTree = tree match {
case tree: TypeBoundsTree if (lo eq tree.lo) && (hi eq tree.hi) => tree
case _ => finalize(tree, untpd.TypeBoundsTree(lo, hi))
}
def Bind(tree: Tree, name: Name, body: Tree): Bind = tree match {
case tree: Bind if (name eq tree.name) && (body eq tree.body) => tree
case _ => finalize(tree, untpd.Bind(name, body))
}
def Alternative(tree: Tree, trees: List[Tree]): Alternative = tree match {
case tree: Alternative if (trees eq tree.trees) => tree
case _ => finalize(tree, untpd.Alternative(trees))
}
def UnApply(tree: Tree, fun: Tree, implicits: List[Tree], patterns: List[Tree]): UnApply = tree match {
case tree: UnApply if (fun eq tree.fun) && (implicits eq tree.implicits) && (patterns eq tree.patterns) => tree
case _ => finalize(tree, untpd.UnApply(fun, implicits, patterns))
}
def ValDef(tree: Tree, mods: Modifiers, name: TermName, tpt: Tree, rhs: Tree): ValDef = tree match {
case tree: ValDef if (mods == tree.mods) && (name == tree.name) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree
case _ => finalize(tree, untpd.ValDef(mods, name, tpt, rhs))
}
def DefDef(tree: Tree, mods: Modifiers, name: TermName, tparams: List[TypeDef], vparamss: List[List[ValDef]], tpt: Tree, rhs: Tree): DefDef = tree match {
case tree: DefDef if (mods == tree.mods) && (name == tree.name) && (tparams eq tree.tparams) && (vparamss eq tree.vparamss) && (tpt eq tree.tpt) && (rhs eq tree.rhs) => tree
case _ => finalize(tree, untpd.DefDef(mods, name, tparams, vparamss, tpt, rhs))
}
def TypeDef(tree: Tree, mods: Modifiers, name: TypeName, rhs: Tree, tparams: List[untpd.TypeDef] = Nil): TypeDef = tree match {
case tree: TypeDef if (mods == tree.mods) && (name == tree.name) && (rhs eq tree.rhs) && (tparams eq tree.tparams) => tree
case _ => finalize(tree, untpd.TypeDef(mods, name, tparams, rhs))
}
def Template(tree: Tree, constr: DefDef, parents: List[Tree], self: ValDef, body: List[Tree]): Template = tree match {
case tree: Template if (constr eq tree.constr) && (parents eq tree.parents) && (self eq tree.self) && (body eq tree.body) => tree
case _ => finalize(tree, untpd.Template(constr, parents, self, body))
}
def Import(tree: Tree, expr: Tree, selectors: List[untpd.Tree]): Import = tree match {
case tree: Import if (expr eq tree.expr) && (selectors eq tree.selectors) => tree
case _ => finalize(tree, untpd.Import(expr, selectors))
}
def PackageDef(tree: Tree, pid: RefTree, stats: List[Tree]): PackageDef = tree match {
case tree: PackageDef if (pid eq tree.pid) && (stats eq tree.stats) => tree
case _ => finalize(tree, untpd.PackageDef(pid, stats))
}
def Annotated(tree: Tree, annot: Tree, arg: Tree): Annotated = tree match {
case tree: Annotated if (annot eq tree.annot) && (arg eq tree.arg) => tree
case _ => finalize(tree, untpd.Annotated(annot, arg))
}
def Thicket(tree: Tree, trees: List[Tree]): Thicket = tree match {
case tree: Thicket if (trees eq tree.trees) => tree
case _ => finalize(tree, untpd.Thicket(trees))
}
}
abstract class TreeTransformer(val cpy: TreeCopier = inst.cpy) {
def transform(tree: Tree)(implicit ctx: Context): Tree = tree match {
case Ident(name) =>
tree
case Select(qualifier, name) =>
cpy.Select(tree, transform(qualifier), name)
case This(qual) =>
tree
case Super(qual, mix) =>
cpy.Super(tree, transform(qual), mix)
case Apply(fun, args) =>
cpy.Apply(tree, transform(fun), transform(args))
case TypeApply(fun, args) =>
cpy.TypeApply(tree, transform(fun), transform(args))
case Literal(const) =>
tree
case New(tpt) =>
cpy.New(tree, transform(tpt))
case Pair(left, right) =>
cpy.Pair(tree, transform(left), transform(right))
case Typed(expr, tpt) =>
cpy.Typed(tree, transform(expr), transform(tpt))
case NamedArg(name, arg) =>
cpy.NamedArg(tree, name, transform(arg))
case Assign(lhs, rhs) =>
cpy.Assign(tree, transform(lhs), transform(rhs))
case Block(stats, expr) =>
cpy.Block(tree, transformStats(stats), transform(expr))
case If(cond, thenp, elsep) =>
cpy.If(tree, transform(cond), transform(thenp), transform(elsep))
case Closure(env, meth, tpt) =>
cpy.Closure(tree, transform(env), transform(meth), transform(tpt))
case Match(selector, cases) =>
cpy.Match(tree, transform(selector), transformSub(cases))
case CaseDef(pat, guard, body) =>
cpy.CaseDef(tree, transform(pat), transform(guard), transform(body))
case Return(expr, from) =>
cpy.Return(tree, transform(expr), transformSub(from))
case Try(block, handler, finalizer) =>
cpy.Try(tree, transform(block), transform(handler), transform(finalizer))
case Throw(expr) =>
cpy.Throw(tree, transform(expr))
case SeqLiteral(elems) =>
cpy.SeqLiteral(tree, transform(elems))
case TypeTree(original) =>
tree
case SingletonTypeTree(ref) =>
cpy.SingletonTypeTree(tree, transform(ref))
case SelectFromTypeTree(qualifier, name) =>
cpy.SelectFromTypeTree(tree, transform(qualifier), name)
case AndTypeTree(left, right) =>
cpy.AndTypeTree(tree, transform(left), transform(right))
case OrTypeTree(left, right) =>
cpy.OrTypeTree(tree, transform(left), transform(right))
case RefinedTypeTree(tpt, refinements) =>
cpy.RefinedTypeTree(tree, transform(tpt), transformSub(refinements))
case AppliedTypeTree(tpt, args) =>
cpy.AppliedTypeTree(tree, transform(tpt), transform(args))
case ByNameTypeTree(result) =>
cpy.ByNameTypeTree(tree, transform(result))
case TypeBoundsTree(lo, hi) =>
cpy.TypeBoundsTree(tree, transform(lo), transform(hi))
case Bind(name, body) =>
cpy.Bind(tree, name, transform(body))
case Alternative(trees) =>
cpy.Alternative(tree, transform(trees))
case UnApply(fun, implicits, patterns) =>
cpy.UnApply(tree, transform(fun), transform(implicits), transform(patterns))
case ValDef(mods, name, tpt, rhs) =>
cpy.ValDef(tree, mods, name, transform(tpt), transform(rhs))
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
cpy.DefDef(tree, mods, name, transformSub(tparams), vparamss mapConserve (transformSub(_)), transform(tpt), transform(rhs))
case tree @ TypeDef(mods, name, rhs) =>
cpy.TypeDef(tree, mods, name, transform(rhs), tree.tparams)
case Template(constr, parents, self, body) =>
cpy.Template(tree, transformSub(constr), transform(parents), transformSub(self), transformStats(body))
case Import(expr, selectors) =>
cpy.Import(tree, transform(expr), selectors)
case PackageDef(pid, stats) =>
cpy.PackageDef(tree, transformSub(pid), transformStats(stats))
case Annotated(annot, arg) =>
cpy.Annotated(tree, transform(annot), transform(arg))
case Thicket(trees) =>
val trees1 = transform(trees)
if (trees1 eq trees) tree else Thicket(trees1)
}
def transformStats(trees: List[Tree])(implicit ctx: Context): List[Tree] =
transform(trees)
def transform(trees: List[Tree])(implicit ctx: Context): List[Tree] =
flatten(trees mapConserve (transform(_)))
def transformSub[Tr <: Tree](tree: Tr)(implicit ctx: Context): Tr =
transform(tree).asInstanceOf[Tr]
def transformSub[Tr <: Tree](trees: List[Tr])(implicit ctx: Context): List[Tr] =
transform(trees).asInstanceOf[List[Tr]]
}
abstract class TreeAccumulator[X] extends ((X, Tree) => X) {
def apply(x: X, tree: Tree): X
def apply(x: X, trees: Traversable[Tree]): X = (x /: trees)(apply)
def foldOver(x: X, tree: Tree): X = tree match {
case Ident(name) =>
x
case Select(qualifier, name) =>
this(x, qualifier)
case This(qual) =>
x
case Super(qual, mix) =>
this(x, qual)
case Apply(fun, args) =>
this(this(x, fun), args)
case TypeApply(fun, args) =>
this(this(x, fun), args)
case Literal(const) =>
x
case New(tpt) =>
this(x, tpt)
case Pair(left, right) =>
this(this(x, left), right)
case Typed(expr, tpt) =>
this(this(x, expr), tpt)
case NamedArg(name, arg) =>
this(x, arg)
case Assign(lhs, rhs) =>
this(this(x, lhs), rhs)
case Block(stats, expr) =>
this(this(x, stats), expr)
case If(cond, thenp, elsep) =>
this(this(this(x, cond), thenp), elsep)
case Closure(env, meth, tpt) =>
this(this(this(x, env), meth), tpt)
case Match(selector, cases) =>
this(this(x, selector), cases)
case CaseDef(pat, guard, body) =>
this(this(this(x, pat), guard), body)
case Return(expr, from) =>
this(this(x, expr), from)
case Try(block, handler, finalizer) =>
this(this(this(x, block), handler), finalizer)
case Throw(expr) =>
this(x, expr)
case SeqLiteral(elems) =>
this(x, elems)
case TypeTree(original) =>
x
case SingletonTypeTree(ref) =>
this(x, ref)
case SelectFromTypeTree(qualifier, name) =>
this(x, qualifier)
case AndTypeTree(left, right) =>
this(this(x, left), right)
case OrTypeTree(left, right) =>
this(this(x, left), right)
case RefinedTypeTree(tpt, refinements) =>
this(this(x, tpt), refinements)
case AppliedTypeTree(tpt, args) =>
this(this(x, tpt), args)
case ByNameTypeTree(result) =>
this(x, result)
case TypeBoundsTree(lo, hi) =>
this(this(x, lo), hi)
case Bind(name, body) =>
this(x, body)
case Alternative(trees) =>
this(x, trees)
case UnApply(fun, implicits, patterns) =>
this(this(this(x, fun), implicits), patterns)
case ValDef(mods, name, tpt, rhs) =>
this(this(x, tpt), rhs)
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
this(this((this(x, tparams) /: vparamss)(apply), tpt), rhs)
case TypeDef(mods, name, rhs) =>
this(x, rhs)
case Template(constr, parents, self, body) =>
this(this(this(this(x, constr), parents), self), body)
case Import(expr, selectors) =>
this(x, expr)
case PackageDef(pid, stats) =>
this(this(x, pid), stats)
case Annotated(annot, arg) =>
this(this(x, annot), arg)
case Thicket(ts) =>
this(x, ts)
}
}
abstract class TreeTraverser extends TreeAccumulator[Unit] {
def traverse(tree: Tree): Unit
def apply(x: Unit, tree: Tree) = traverse(tree)
}
/** Fold `f` over all tree nodes, in depth-first, prefix order */
class DeepFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] {
def apply(x: X, tree: Tree): X = foldOver(f(x, tree), tree)
}
/** Fold `f` over all tree nodes, in depth-first, prefix order, but don't visit
* subtrees where `f` returns a different result for the root, i.e. `f(x, root) ne x`.
*/
class ShallowFolder[X](f: (X, Tree) => X) extends TreeAccumulator[X] {
def apply(x: X, tree: Tree): X = {
val x1 = f(x, tree)
if (x1.asInstanceOf[AnyRef] ne x1.asInstanceOf[AnyRef]) x1
else foldOver(x1, tree)
}
}
}
}
// ----- Helper functions and classes ---------------------------------------
/*
abstract class FullTreeTransformer[T >: Untyped, C] {
var sharedMemo: Map[SharedTree[T], SharedTree[T]] = Map()
def transform(tree: Tree[T], c: C): Tree[T] = tree match {
case Ident(name) =>
finishIdent(tree, tree, c, plugins)
case Select(qualifier, name) =>
finishSelect(tree.derivedSelect(transform(qualifier, c), name), tree, c, plugins)
case This(qual) =>
finishThis(tree, tree, c, plugins)
case Super(qual, mix) =>
finishSuper(tree.derivedSuper(transform(qual, c), mix), tree, c, plugins)
case Apply(fun, args) =>
finishApply(tree.derivedApply(transform(fun, c), transform(args, c)), tree, c, plugins)
case TypeApply(fun, args) =>
finishTypeApply(tree.derivedTypeApply(transform(fun, c), transform(args, c)), tree, c, plugins)
case Literal(const) =>
finishLiteral(tree, tree, c, plugins)
case New(tpt) =>
finishNew(tree.derivedNew(transform(tpt, c)), tree, c, plugins)
case Pair(left, right) =>
finishPair(tree.derivedPair(transform(left, c), transform(right, c)), tree, c, plugins)
case Typed(expr, tpt) =>
finishTyped(tree.derivedTyped(transform(expr, c), transform(tpt, c)), tree, c, plugins)
case NamedArg(name, arg) =>
finishNamedArg(tree.derivedNamedArg(name, transform(arg, c)), tree, c, plugins)
case Assign(lhs, rhs) =>
finishAssign(tree.derivedAssign(transform(lhs, c), transform(rhs, c)), tree, c, plugins)
case Block(stats, expr) =>
finishBlock(tree.derivedBlock(transform(stats, c), transform(expr, c)), tree, c, plugins)
case If(cond, thenp, elsep) =>
finishIf(tree.derivedIf(transform(cond, c), transform(thenp, c), transform(elsep, c)), tree, c, plugins)
case Closure(env, meth, tpt) =>
finishClosure(tree.derivedClosure(transform(env, c), transformSub(meth, c)), tree, c, plugins)
case Match(selector, cases) =>
finishMatch(tree.derivedMatch(transform(selector, c), transformSub(cases, c)), tree, c, plugins)
case CaseDef(pat, guard, body) =>
finishCaseDef(tree.derivedCaseDef(transform(pat, c), transform(guard, c), transform(body, c)), tree, c, plugins)
case Return(expr, from) =>
finishReturn(tree.derivedReturn(transform(expr, c), transform(from, c)), tree, c, plugins)
case Try(block, handler, finalizer) =>
finishTry(tree.derivedTry(transform(block, c), transform(handler, c), transform(finalizer, c)), tree, c, plugins)
case Throw(expr) =>
finishThrow(tree.derivedThrow(transform(expr, c)), tree, c, plugins)
case SeqLiteral(elems) =>
finishSeqLiteral(tree.derivedSeqLiteral(transform(elems, c)), tree, c, plugins)
case TypeTree(original) =>
finishTypeTree(tree, tree, c, plugins)
case SingletonTypeTree(ref) =>
finishSingletonTypeTree(tree.derivedSingletonTypeTree(transform(ref, c)), tree, c, plugins)
case SelectFromTypeTree(qualifier, name) =>
finishSelectFromTypeTree(tree.derivedSelectFromTypeTree(transform(qualifier, c), name), tree, c, plugins)
case AndTypeTree(left, right) =>
finishAndTypeTree(tree.derivedAndTypeTree(transform(left, c), transform(right, c)), tree, c, plugins)
case OrTypeTree(left, right) =>
finishOrTypeTree(tree.derivedOrTypeTree(transform(left, c), transform(right, c)), tree, c, plugins)
case RefinedTypeTree(tpt, refinements) =>
finishRefinedTypeTree(tree.derivedRefinedTypeTree(transform(tpt, c), transformSub(refinements, c)), tree, c, plugins)
case AppliedTypeTree(tpt, args) =>
finishAppliedTypeTree(tree.derivedAppliedTypeTree(transform(tpt, c), transform(args, c)), tree, c, plugins)
case TypeBoundsTree(lo, hi) =>
finishTypeBoundsTree(tree.derivedTypeBoundsTree(transform(lo, c), transform(hi, c)), tree, c, plugins)
case Bind(name, body) =>
finishBind(tree.derivedBind(name, transform(body, c)), tree, c, plugins)
case Alternative(trees) =>
finishAlternative(tree.derivedAlternative(transform(trees, c)), tree, c, plugins)
case UnApply(fun, args) =>
finishUnApply(tree.derivedUnApply(transform(fun, c), transform(args, c)), tree, c, plugins)
case ValDef(mods, name, tpt, rhs) =>
finishValDef(tree.derivedValDef(mods, name, transform(tpt, c), transform(rhs, c)), tree, c, plugins)
case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
finishDefDef(tree.derivedDefDef(mods, name, transformSub(tparams, c), vparamss mapConserve (transformSub(_, c)), transform(tpt, c), transform(rhs, c)), tree, c, plugins)
case tree @ TypeDef(mods, name, rhs) =>
finishTypeDef(tree.derivedTypeDef(mods, name, transform(rhs, c), tree.tparams), tree, c, plugins)
case Template(constr, parents, self, body) =>
finishTemplate(tree.derivedTemplate(transformSub(constr, c), transform(parents, c), transformSub(self, c), transform(body, c)), tree, c, plugins)
case Import(expr, selectors) =>
finishImport(tree.derivedImport(transform(expr, c), selectors), tree, c, plugins)
case PackageDef(pid, stats) =>
finishPackageDef(tree.derivedPackageDef(transformSub(pid, c), transform(stats, c)), tree, c, plugins)
case Annotated(annot, arg) =>
finishAnnotated(tree.derivedAnnotated(transform(annot, c), transform(arg, c)), tree, c, plugins)
case EmptyTree =>
finishEmptyTree(tree, tree, c, plugins)
case tree @ SharedTree(shared) =>
finishSharedTree(
sharedMemo get tree match {
case Some(tree1) => tree1
case None =>
val tree1 = tree.derivedSharedTree(transform(shared, c))
sharedMemo = sharedMemo.updated(tree, tree1)
tree1
},
tree, c, plugins)
}
def transform(trees: List[Tree[T]], c: C): List[Tree[T]] =
flatten(trees) mapConserve (transform(_, c))
def transformSub(tree: Tree[T], c: C): tree.ThisTree[T] =
transform(tree, c).asInstanceOf[tree.ThisTree[T]]
def transformSub[TT <: Tree[T]](trees: List[TT], c: C): List[TT] =
transform(trees, c).asInstanceOf[List[TT]]
type Plugins >: Null
def plugins: Plugins = null
def finishIdent(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishSelect(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishThis(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishSuper(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishApply(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishTypeApply(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishLiteral(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishNew(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishPair(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishTyped(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishNamedArg(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishAssign(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishFunction(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishBlock(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishIf(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishClosure(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishMatch(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishCaseDef(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishReturn(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishTry(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishThrow(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishSeqLiteral(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishTypeTree(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishSingletonTypeTree(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishSelectFromTypeTree(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishAndTypeTree(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishOrTypeTree(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishRefinedTypeTree(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishAppliedTypeTree(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishTypeBoundsTree(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishBind(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishAlternative(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishUnApply(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishValDef(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishDefDef(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishTypeDef(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishTemplate(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishImport(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishPackageDef(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishAnnotated(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishEmptyTree(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
def finishSharedTree(tree: Tree[T], old: Tree[T], c: C, plugins: Plugins) = tree
}
*/
| DarkDimius/dotty | src/dotty/tools/dotc/ast/Trees.scala | Scala | bsd-3-clause | 61,978 |
package org.akka.essentials.zeromq.example2
import akka.dispatch.Await
import akka.actor.ActorSystem
import akka.util.Timeout
import akka.actor.Props
object MyActorSystem {
def main(args: Array[String]): Unit = {
val system = ActorSystem("zeromqTest")
system.actorOf(Props[RouterActor], name = "router")
system.actorOf(Props[WorkerTaskA], name = "workerA")
system.actorOf(Props[WorkerTaskB], name = "workerB")
}
} | rokumar7/trial | AkkaWithZeroMQ/src/main/scala/org/akka/essentials/zeromq/example2/MyActorSystem.scala | Scala | unlicense | 435 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.execution.atomic
import minitest.SimpleTestSuite
import monix.execution.atomic.PaddingStrategy._
import scala.util.control.NonFatal
import scala.annotation.nowarn
abstract class GenericAtomicSuite[A, R <: Atomic[A]](
builder: AtomicBuilder[A, R],
strategy: PaddingStrategy,
valueFromInt: Int => A,
valueToInt: A => Int,
allowPlatformIntrinsics: Boolean,
allowUnsafe: Boolean)
extends SimpleTestSuite {
def Atomic(initial: A): R = {
if (allowUnsafe)
builder.buildInstance(initial, strategy, allowPlatformIntrinsics)
else
builder.buildSafeInstance(initial, strategy)
}
val zero = valueFromInt(0)
val one = valueFromInt(1)
val two = valueFromInt(2)
test("should set()") {
val r = Atomic(zero)
assertEquals(r.get(), zero)
r.set(one)
assert(r.get() == one)
}
test("should getAndSet()") {
val r = Atomic(zero)
assertEquals(r.get(), zero)
val old = r.getAndSet(one)
assert(old == zero)
assert(r.get() == one)
}
test("should compareAndSet()") {
val r = Atomic(zero)
assertEquals(r.get(), zero)
assert(r.compareAndSet(zero, one))
assert(r.get() == one)
assert(r.compareAndSet(one, zero))
assertEquals(r.get(), zero)
assert(!r.compareAndSet(one, one))
assertEquals(r.get(), zero)
}
test("should transform with clean arguments") {
val r = Atomic(zero)
assertEquals(r.get(), zero)
r.transform(x => valueFromInt(valueToInt(x) + 1))
assert(r.get() == one)
r.transform(x => valueFromInt(valueToInt(x) + 1))
assertEquals(r.get(), two)
}
test("should transform with dirty function #1") {
val r = Atomic(zero)
r.transform {
def increment(y: A): A = valueFromInt(valueToInt(y) + 1)
(x: A) => increment(x)
}
assert(r.get() == one)
}
test("should transform with dirty function #2") {
val r = Atomic(zero)
def increment(y: A): A = valueFromInt(valueToInt(y) + 1)
r.transform(increment)
assert(r.get() == one)
}
test("should transform with dirty function #3") {
val r = Atomic(zero)
r.transform { x =>
try valueFromInt(valueToInt(x) + 1)
catch {
case ex if NonFatal(ex) =>
x
}
}
assert(r.get() == one)
}
test("should transform with dirty self") {
val r = Atomic(zero)
def atomic = r
assertEquals(atomic.get(), zero)
atomic.transform(x => valueFromInt(valueToInt(x) + 1))
assertEquals(atomic.get(), one)
atomic.transform(x => valueFromInt(valueToInt(x) + 1))
assertEquals(atomic.get(), two)
}
test("should transformAndGet with clean arguments") {
val r = Atomic(zero)
assertEquals(r.get(), zero)
assert(r.transformAndGet(x => valueFromInt(valueToInt(x) + 1)) == one)
assert(r.transformAndGet(x => valueFromInt(valueToInt(x) + 1)) == two)
assertEquals(r.get(), two)
}
test("should transformAndGet with dirty function #1") {
val r = Atomic(zero)
val result = r.transformAndGet {
def increment(y: A): A = valueFromInt(valueToInt(y) + 1)
(x: A) => increment(x)
}
assertEquals(result, one)
}
test("should transformAndGet with dirty function #2") {
val r = Atomic(zero)
def increment(y: A): A = valueFromInt(valueToInt(y) + 1)
val result = r.transformAndGet(increment)
assertEquals(result, one)
}
test("should transformAndGet with dirty function #3") {
val r = Atomic(zero)
val result = r.transformAndGet { x =>
try valueFromInt(valueToInt(x) + 1)
catch {
case ex if NonFatal(ex) =>
x
}
}
assertEquals(result, one)
}
test("should transformAndGet with dirty self") {
@nowarn var inst = Atomic(zero)
def r = inst
assertEquals(r.get(), zero)
assert(r.transformAndGet(x => valueFromInt(valueToInt(x) + 1)) == one)
assert(r.transformAndGet(x => valueFromInt(valueToInt(x) + 1)) == two)
assertEquals(r.get(), two)
}
test("should getAndTransform with clean arguments") {
val r = Atomic(zero)
assertEquals(r.get(), zero)
assert(r.getAndTransform(x => valueFromInt(valueToInt(x) + 1)) == zero)
assert(r.getAndTransform(x => valueFromInt(valueToInt(x) + 1)) == one)
assertEquals(r.get(), two)
}
test("should getAndTransform with dirty function #1") {
val r = Atomic(zero)
val result = r.getAndTransform {
def increment(y: A): A = valueFromInt(valueToInt(y) + 1)
(x: A) => increment(x)
}
assertEquals(result, zero)
assertEquals(r.get(), one)
}
test("should getAndTransform with dirty function #2") {
val r = Atomic(zero)
def increment(y: A): A = valueFromInt(valueToInt(y) + 1)
val result = r.getAndTransform(increment)
assertEquals(result, zero)
assertEquals(r.get(), one)
}
test("should getAndTransform with dirty function #3") {
val r = Atomic(zero)
val result = r.getAndTransform { x =>
try valueFromInt(valueToInt(x) + 1)
catch {
case ex if NonFatal(ex) =>
x
}
}
assertEquals(result, zero)
assertEquals(r.get(), one)
}
test("should getAndTransform with dirty self") {
@nowarn var inst = Atomic(zero)
def r = inst
assertEquals(r.get(), zero)
assert(r.getAndTransform(x => valueFromInt(valueToInt(x) + 1)) == zero)
assert(r.getAndTransform(x => valueFromInt(valueToInt(x) + 1)) == one)
assertEquals(r.get(), two)
}
// --
test("should transformAndExtract with clean arguments") {
val r = Atomic(zero)
assertEquals(r.get(), zero)
assert(r.transformAndExtract(x => (x, valueFromInt(valueToInt(x) + 1))) == zero)
assert(r.transformAndExtract(x => (x, valueFromInt(valueToInt(x) + 1))) == one)
assertEquals(r.get(), two)
}
test("should transformAndExtract with dirty function #1") {
val r = Atomic(zero)
val result = r.transformAndExtract {
def increment(y: A): A = valueFromInt(valueToInt(y) + 1)
(x: A) => (x, increment(x))
}
assertEquals(result, zero)
assertEquals(r.get(), one)
}
test("should transformAndExtract with dirty function #2") {
val r = Atomic(zero)
def increment(y: A) = (y, valueFromInt(valueToInt(y) + 1))
val result = r.transformAndExtract(increment)
assertEquals(result, zero)
assertEquals(r.get(), one)
}
test("should transformAndExtract with dirty function #3") {
val r = Atomic(zero)
val result = r.transformAndExtract { x =>
try {
(x, valueFromInt(valueToInt(x) + 1))
} catch {
case ex if NonFatal(ex) =>
(x, x)
}
}
assertEquals(result, zero)
assertEquals(r.get(), one)
}
test("should transformAndExtract with dirty self") {
@nowarn var inst = Atomic(zero)
def r = inst
assertEquals(r.get(), zero)
assert(r.transformAndExtract(x => (x, valueFromInt(valueToInt(x) + 1))) == zero)
assert(r.transformAndExtract(x => (x, valueFromInt(valueToInt(x) + 1))) == one)
assertEquals(r.get(), two)
}
test("should lazySet") {
val r = Atomic(zero)
assertEquals(r.get(), zero)
r.lazySet(one)
assertEquals(r.get(), one)
}
}
// -- NoPadding (Java 8)
object GenericAtomicAnyNoPadding
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
NoPadding,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicBooleanNoPadding
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
NoPadding,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicNumberAnyNoPadding
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
NoPadding,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicFloatNoPadding
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
NoPadding,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicDoubleNoPadding
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
NoPadding,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicShortNoPadding
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
NoPadding,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicByteNoPadding
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
NoPadding,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicCharNoPadding
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
NoPadding,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicIntNoPadding
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
NoPadding,
x => x,
x => x,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicLongNoPadding
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
NoPadding,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
// -- Left64 (Java 8)
object GenericAtomicAnyLeft64
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Left64,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicBooleanLeft64
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Left64,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicNumberAnyLeft64
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Left64,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicFloatLeft64
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Left64,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicDoubleLeft64
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Left64,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicShortLeft64
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Left64,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicByteLeft64
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Left64,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicCharLeft64
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Left64,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicIntLeft64
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Left64,
x => x,
x => x,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicLongLeft64
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Left64,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
// -- Right64 (Java 8)
object GenericAtomicAnyRight64
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Right64,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicBooleanRight64
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Right64,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicNumberAnyRight64
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Right64,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicFloatRight64
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Right64,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicDoubleRight64
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Right64,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicShortRight64
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Right64,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicByteRight64
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Right64,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicCharRight64
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Right64,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicIntRight64
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Right64,
x => x,
x => x,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicLongRight64
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Right64,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
// -- LeftRight128 (Java 8)
object GenericAtomicAnyLeftRight128
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
LeftRight128,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicBooleanLeftRight128
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
LeftRight128,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicNumberAnyLeftRight128
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
LeftRight128,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicFloatLeftRight128
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
LeftRight128,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicDoubleLeftRight128
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
LeftRight128,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicShortLeftRight128
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
LeftRight128,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicByteLeftRight128
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
LeftRight128,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicCharLeftRight128
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
LeftRight128,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicIntLeftRight128
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
LeftRight128,
x => x,
x => x,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicLongLeftRight128
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
LeftRight128,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
// -- Left128 (Java 8)
object GenericAtomicAnyLeft128
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Left128,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicBooleanLeft128
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Left128,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicNumberAnyLeft128
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Left128,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicFloatLeft128
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Left128,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicDoubleLeft128
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Left128,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicShortLeft128
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Left128,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicByteLeft128
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Left128,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicCharLeft128
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Left128,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicIntLeft128
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Left128,
x => x,
x => x,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicLongLeft128
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Left128,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
// -- Right128 (Java 8)
object GenericAtomicAnyRight128
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Right128,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicBooleanRight128
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Right128,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicNumberAnyRight128
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Right128,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicFloatRight128
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Right128,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicDoubleRight128
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Right128,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicShortRight128
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Right128,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicByteRight128
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Right128,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicCharRight128
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Right128,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicIntRight128
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Right128,
x => x,
x => x,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicLongRight128
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Right128,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
// -- LeftRight256 (Java 8)
object GenericAtomicAnyLeftRight256
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
LeftRight256,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicBooleanLeftRight256
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
LeftRight256,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicNumberAnyLeftRight256
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
LeftRight256,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicFloatLeftRight256
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
LeftRight256,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicDoubleLeftRight256
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
LeftRight256,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicShortLeftRight256
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
LeftRight256,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicByteLeftRight256
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
LeftRight256,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicCharLeftRight256
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
LeftRight256,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicIntLeftRight256
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
LeftRight256,
x => x,
x => x,
allowPlatformIntrinsics = true,
allowUnsafe = true)
object GenericAtomicLongLeftRight256
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
LeftRight256,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = true,
allowUnsafe = true)
// ----------------- Java 7
// -- NoPadding (Java 7)
object GenericAtomicAnyNoPaddingJava7Suite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
NoPadding,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicBooleanNoPaddingJava7Suite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
NoPadding,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicNumberAnyNoPaddingJava7Suite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
NoPadding,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicFloatNoPaddingJava7Suite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
NoPadding,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicDoubleNoPaddingJava7Suite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
NoPadding,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicShortNoPaddingJava7Suite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
NoPadding,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicByteNoPaddingJava7Suite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
NoPadding,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicCharNoPaddingJava7Suite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
NoPadding,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicIntNoPaddingJava7Suite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
NoPadding,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicLongNoPaddingJava7Suite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
NoPadding,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
// -- Left64 (Java 7)
object GenericAtomicAnyLeft64Java7Suite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Left64,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicBooleanLeft64Java7Suite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Left64,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicNumberAnyLeft64Java7Suite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Left64,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicFloatLeft64Java7Suite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Left64,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicDoubleLeft64Java7Suite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Left64,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicShortLeft64Java7Suite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Left64,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicByteLeft64Java7Suite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Left64,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicCharLeft64Java7Suite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Left64,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicIntLeft64Java7Suite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Left64,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicLongLeft64Java7Suite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Left64,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
// -- Right64 (Java 7)
object GenericAtomicAnyRight64Java7Suite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Right64,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicBooleanRight64Java7Suite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Right64,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicNumberAnyRight64Java7Suite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Right64,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicFloatRight64Java7Suite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Right64,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicDoubleRight64Java7Suite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Right64,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicShortRight64Java7Suite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Right64,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicByteRight64Java7Suite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Right64,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicCharRight64Java7Suite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Right64,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicIntRight64Java7Suite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Right64,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicLongRight64Java7Suite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Right64,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
// -- LeftRight128 (Java 7)
object GenericAtomicAnyLeftRight128Java7Suite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
LeftRight128,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicBooleanLeftRight128Java7Suite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
LeftRight128,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicNumberAnyLeftRight128Java7Suite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
LeftRight128,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicFloatLeftRight128Java7Suite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
LeftRight128,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicDoubleLeftRight128Java7Suite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
LeftRight128,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicShortLeftRight128Java7Suite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
LeftRight128,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicByteLeftRight128Java7Suite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
LeftRight128,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicCharLeftRight128Java7Suite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
LeftRight128,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicIntLeftRight128Java7Suite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
LeftRight128,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicLongLeftRight128Java7Suite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
LeftRight128,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
// -- Left128 (Java 7)
object GenericAtomicAnyLeft128Java7Suite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Left128,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicBooleanLeft128Java7Suite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Left128,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicNumberAnyLeft128Java7Suite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Left128,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicFloatLeft128Java7Suite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Left128,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicDoubleLeft128Java7Suite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Left128,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicShortLeft128Java7Suite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Left128,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicByteLeft128Java7Suite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Left128,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicCharLeft128Java7Suite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Left128,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicIntLeft128Java7Suite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Left128,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicLongLeft128Java7Suite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Left128,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
// -- Right128 (Java 7)
object GenericAtomicAnyRight128Java7Suite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Right128,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicBooleanRight128Java7Suite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Right128,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicNumberAnyRight128Java7Suite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Right128,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicFloatRight128Java7Suite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Right128,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicDoubleRight128Java7Suite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Right128,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicShortRight128Java7Suite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Right128,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicByteRight128Java7Suite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Right128,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicCharRight128Java7Suite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Right128,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicIntRight128Java7Suite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Right128,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicLongRight128Java7Suite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Right128,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
// -- LeftRight256 (Java 7)
object GenericAtomicAnyLeftRight256Java7Suite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
LeftRight256,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicBooleanLeftRight256Java7Suite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
LeftRight256,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicNumberAnyLeftRight256Java7Suite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
LeftRight256,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicFloatLeftRight256Java7Suite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
LeftRight256,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicDoubleLeftRight256Java7Suite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
LeftRight256,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicShortLeftRight256Java7Suite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
LeftRight256,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicByteLeftRight256Java7Suite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
LeftRight256,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicCharLeftRight256Java7Suite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
LeftRight256,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicIntLeftRight256Java7Suite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
LeftRight256,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = true)
object GenericAtomicLongLeftRight256Java7Suite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
LeftRight256,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = true)
// ----------------- Java X
// -- NoPadding (Java X)
object GenericAtomicAnyNoPaddingJavaXSuite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
NoPadding,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicBooleanNoPaddingJavaXSuite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
NoPadding,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicNumberAnyNoPaddingJavaXSuite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
NoPadding,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicFloatNoPaddingJavaXSuite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
NoPadding,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicDoubleNoPaddingJavaXSuite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
NoPadding,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicShortNoPaddingJavaXSuite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
NoPadding,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicByteNoPaddingJavaXSuite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
NoPadding,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicCharNoPaddingJavaXSuite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
NoPadding,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicIntNoPaddingJavaXSuite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
NoPadding,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicLongNoPaddingJavaXSuite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
NoPadding,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
// -- Left64 (Java X)
object GenericAtomicAnyLeft64JavaXSuite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Left64,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicBooleanLeft64JavaXSuite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Left64,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicNumberAnyLeft64JavaXSuite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Left64,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicFloatLeft64JavaXSuite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Left64,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicDoubleLeft64JavaXSuite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Left64,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicShortLeft64JavaXSuite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Left64,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicByteLeft64JavaXSuite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Left64,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicCharLeft64JavaXSuite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Left64,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicIntLeft64JavaXSuite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Left64,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicLongLeft64JavaXSuite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Left64,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
// -- Right64 (Java X)
object GenericAtomicAnyRight64JavaXSuite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Right64,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicBooleanRight64JavaXSuite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Right64,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicNumberAnyRight64JavaXSuite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Right64,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicFloatRight64JavaXSuite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Right64,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicDoubleRight64JavaXSuite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Right64,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicShortRight64JavaXSuite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Right64,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicByteRight64JavaXSuite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Right64,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicCharRight64JavaXSuite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Right64,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicIntRight64JavaXSuite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Right64,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicLongRight64JavaXSuite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Right64,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
// -- LeftRight128 (Java X)
object GenericAtomicAnyLeftRight128JavaXSuite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
LeftRight128,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicBooleanLeftRight128JavaXSuite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
LeftRight128,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicNumberAnyLeftRight128JavaXSuite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
LeftRight128,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicFloatLeftRight128JavaXSuite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
LeftRight128,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicDoubleLeftRight128JavaXSuite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
LeftRight128,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicShortLeftRight128JavaXSuite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
LeftRight128,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicByteLeftRight128JavaXSuite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
LeftRight128,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicCharLeftRight128JavaXSuite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
LeftRight128,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicIntLeftRight128JavaXSuite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
LeftRight128,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicLongLeftRight128JavaXSuite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
LeftRight128,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
// -- Left128 (Java X)
object GenericAtomicAnyLeft128JavaXSuite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Left128,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicBooleanLeft128JavaXSuite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Left128,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicNumberAnyLeft128JavaXSuite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Left128,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicFloatLeft128JavaXSuite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Left128,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicDoubleLeft128JavaXSuite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Left128,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicShortLeft128JavaXSuite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Left128,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicByteLeft128JavaXSuite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Left128,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicCharLeft128JavaXSuite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Left128,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicIntLeft128JavaXSuite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Left128,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicLongLeft128JavaXSuite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Left128,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
// -- Right128 (Java X)
object GenericAtomicAnyRight128JavaXSuite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
Right128,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicBooleanRight128JavaXSuite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
Right128,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicNumberAnyRight128JavaXSuite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
Right128,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicFloatRight128JavaXSuite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
Right128,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicDoubleRight128JavaXSuite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
Right128,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicShortRight128JavaXSuite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
Right128,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicByteRight128JavaXSuite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
Right128,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicCharRight128JavaXSuite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
Right128,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicIntRight128JavaXSuite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
Right128,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicLongRight128JavaXSuite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
Right128,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
// -- LeftRight256 (Java X)
object GenericAtomicAnyLeftRight256JavaXSuite
extends GenericAtomicSuite[String, AtomicAny[String]](
Atomic.builderFor(""),
LeftRight256,
x => x.toString,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicBooleanLeftRight256JavaXSuite
extends GenericAtomicSuite[Boolean, AtomicBoolean](
Atomic.builderFor(true),
LeftRight256,
x => if (x == 1) true else false,
x => if (x) 1 else 0,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicNumberAnyLeftRight256JavaXSuite
extends GenericAtomicSuite[BoxedLong, AtomicNumberAny[BoxedLong]](
AtomicBuilder.AtomicNumberBuilder[BoxedLong],
LeftRight256,
x => BoxedLong(x.toLong),
x => x.value.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicFloatLeftRight256JavaXSuite
extends GenericAtomicSuite[Float, AtomicFloat](
Atomic.builderFor(0.0f),
LeftRight256,
x => x.toFloat,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicDoubleLeftRight256JavaXSuite
extends GenericAtomicSuite[Double, AtomicDouble](
Atomic.builderFor(0.toDouble),
LeftRight256,
x => x.toDouble,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicShortLeftRight256JavaXSuite
extends GenericAtomicSuite[Short, AtomicShort](
Atomic.builderFor(0.toShort),
LeftRight256,
x => x.toShort,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicByteLeftRight256JavaXSuite
extends GenericAtomicSuite[Byte, AtomicByte](
Atomic.builderFor(0.toByte),
LeftRight256,
x => x.toByte,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicCharLeftRight256JavaXSuite
extends GenericAtomicSuite[Char, AtomicChar](
Atomic.builderFor(0.toChar),
LeftRight256,
x => x.toChar,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicIntLeftRight256JavaXSuite
extends GenericAtomicSuite[Int, AtomicInt](
Atomic.builderFor(0),
LeftRight256,
x => x,
x => x,
allowPlatformIntrinsics = false,
allowUnsafe = false)
object GenericAtomicLongLeftRight256JavaXSuite
extends GenericAtomicSuite[Long, AtomicLong](
Atomic.builderFor(0.toLong),
LeftRight256,
x => x.toLong,
x => x.toInt,
allowPlatformIntrinsics = false,
allowUnsafe = false)
| monixio/monix | monix-execution/shared/src/test/scala/monix/execution/atomic/GenericAtomicSuite.scala | Scala | apache-2.0 | 59,665 |
import sbt._
class TestCompatProject(info: ProjectInfo) extends DefaultProject(info)
{
val testInterface = "org.scala-tools.testing" % "test-interface" % "0.4" % "provided"
val scalacheck = "org.scala-tools.testing" % "scalacheck" % "1.5" % "provided"
val scalatest = "org.scalatest" % "scalatest" % "1.0" % "provided"
val specs = "org.scala-tools.testing" % "specs" % "1.6.0" % "provided"
override def compatTestFramework = Set()
/* Additional resources to include in the produced jar.*/
def extraResources = descendents(info.projectPath / "licenses", "*") +++ "LICENSE" +++ "NOTICE"
override def mainResources = super.mainResources +++ extraResources
// publishing
override def managedStyle = ManagedStyle.Maven
val publishTo = "Scala Tools Nexus" at "http://nexus.scala-tools.org/content/repositories/releases/"
Credentials(Path.userHome / ".ivy2" / ".credentials", log)
} | harrah/test-compat | project/build/TestCompatProject.scala | Scala | bsd-3-clause | 891 |
package io.getquill.context.sql.dsl
import io.getquill.context.sql.SqlContext
trait SqlDsl {
this: SqlContext[_, _] =>
implicit class Like(s1: String) {
def like(s2: String) = quote(infix"$s1 like $s2".as[Boolean])
}
}
| mentegy/quill | quill-sql/src/main/scala/io/getquill/context/sql/dsl/SqlDsl.scala | Scala | apache-2.0 | 232 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
import scala.collection.{ mutable, immutable, StrictOptimizedIterableOps, SpecificIterableFactory, View }
import java.lang.reflect.{ Method => JMethod, Field => JField }
import scala.reflect.NameTransformer._
import scala.util.matching.Regex
/** Defines a finite set of values specific to the enumeration. Typically
* these values enumerate all possible forms something can take and provide
* a lightweight alternative to case classes.
*
* Each call to a `Value` method adds a new unique value to the enumeration.
* To be accessible, these values are usually defined as `val` members of
* the enumeration.
*
* All values in an enumeration share a common, unique type defined as the
* `Value` type member of the enumeration (`Value` selected on the stable
* identifier path of the enumeration instance).
*
* @example {{{
* // Define a new enumeration with a type alias and work with the full set of enumerated values
* object WeekDay extends Enumeration {
* type WeekDay = Value
* val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
* }
* import WeekDay._
*
* def isWorkingDay(d: WeekDay) = ! (d == Sat || d == Sun)
*
* WeekDay.values filter isWorkingDay foreach println
* // output:
* // Mon
* // Tue
* // Wed
* // Thu
* // Fri
* }}}
*
* @example {{{
* // Example of adding attributes to an enumeration by extending the Enumeration.Val class
* object Planet extends Enumeration {
* protected case class Val(mass: Double, radius: Double) extends super.Val {
* def surfaceGravity: Double = Planet.G * mass / (radius * radius)
* def surfaceWeight(otherMass: Double): Double = otherMass * surfaceGravity
* }
* implicit def valueToPlanetVal(x: Value): Val = x.asInstanceOf[Val]
*
* val G: Double = 6.67300E-11
* val Mercury = Val(3.303e+23, 2.4397e6)
* val Venus = Val(4.869e+24, 6.0518e6)
* val Earth = Val(5.976e+24, 6.37814e6)
* val Mars = Val(6.421e+23, 3.3972e6)
* val Jupiter = Val(1.9e+27, 7.1492e7)
* val Saturn = Val(5.688e+26, 6.0268e7)
* val Uranus = Val(8.686e+25, 2.5559e7)
* val Neptune = Val(1.024e+26, 2.4746e7)
* }
*
* println(Planet.values.filter(_.radius > 7.0e6))
* // output:
* // Planet.ValueSet(Jupiter, Saturn, Uranus, Neptune)
* }}}
*
* @param initial The initial value from which to count the integers that
* identifies values at run-time.
* @author Matthias Zenger
*/
@SerialVersionUID(8476000850333817230L)
abstract class Enumeration (initial: Int) extends Serializable {
thisenum =>
def this() = this(0)
/* Note that `readResolve` cannot be private, since otherwise
the JVM does not invoke it when deserializing subclasses. */
protected def readResolve(): AnyRef = thisenum.getClass.getField(MODULE_INSTANCE_NAME).get(null)
/** The name of this enumeration.
*/
override def toString =
((getClass.getName stripSuffix MODULE_SUFFIX_STRING split '.').last split
Regex.quote(NAME_JOIN_STRING)).last
/** The mapping from the integer used to identify values to the actual
* values. */
private val vmap: mutable.Map[Int, Value] = new mutable.HashMap
/** The cache listing all values of this enumeration. */
@transient private var vset: ValueSet = null
@transient @volatile private var vsetDefined = false
/** The mapping from the integer used to identify values to their
* names. */
private val nmap: mutable.Map[Int, String] = new mutable.HashMap
/** The values of this enumeration as a set.
*/
def values: ValueSet = {
if (!vsetDefined) {
vset = (ValueSet.newBuilder ++= vmap.values).result()
vsetDefined = true
}
vset
}
/** The integer to use to identify the next created value. */
protected var nextId: Int = initial
/** The string to use to name the next created value. */
protected var nextName: Iterator[String] = _
private def nextNameOrNull =
if (nextName != null && nextName.hasNext) nextName.next() else null
/** The highest integer amongst those used to identify values in this
* enumeration. */
private var topId = initial
/** The lowest integer amongst those used to identify values in this
* enumeration, but no higher than 0. */
private var bottomId = if(initial < 0) initial else 0
/** The one higher than the highest integer amongst those used to identify
* values in this enumeration. */
final def maxId = topId
/** The value of this enumeration with given id `x`
*/
final def apply(x: Int): Value = vmap(x)
/** Return a `Value` from this `Enumeration` whose name matches
* the argument `s`. The names are determined automatically via reflection.
*
* @param s an `Enumeration` name
* @return the `Value` of this `Enumeration` if its name matches `s`
* @throws NoSuchElementException if no `Value` with a matching
* name is in this `Enumeration`
*/
final def withName(s: String): Value = values.find(_.toString == s).getOrElse(
throw new NoSuchElementException(s"No value found for '$s'"))
/** Creates a fresh value, part of this enumeration. */
protected final def Value: Value = Value(nextId)
/** Creates a fresh value, part of this enumeration, identified by the
* integer `i`.
*
* @param i An integer that identifies this value at run-time. It must be
* unique amongst all values of the enumeration.
* @return Fresh value identified by `i`.
*/
protected final def Value(i: Int): Value = Value(i, nextNameOrNull)
/** Creates a fresh value, part of this enumeration, called `name`.
*
* @param name A human-readable name for that value.
* @return Fresh value called `name`.
*/
protected final def Value(name: String): Value = Value(nextId, name)
/** Creates a fresh value, part of this enumeration, called `name`
* and identified by the integer `i`.
*
* @param i An integer that identifies this value at run-time. It must be
* unique amongst all values of the enumeration.
* @param name A human-readable name for that value.
* @return Fresh value with the provided identifier `i` and name `name`.
*/
protected final def Value(i: Int, name: String): Value = new Val(i, name)
private def populateNameMap() {
val fields: Array[JField] = getClass.getDeclaredFields
def isValDef(m: JMethod): Boolean = fields exists (fd => fd.getName == m.getName && fd.getType == m.getReturnType)
// The list of possible Value methods: 0-args which return a conforming type
val methods: Array[JMethod] = getClass.getMethods filter (m => m.getParameterTypes.isEmpty &&
classOf[Value].isAssignableFrom(m.getReturnType) &&
m.getDeclaringClass != classOf[Enumeration] &&
isValDef(m))
methods foreach { m =>
val name = m.getName
// invoke method to obtain actual `Value` instance
val value = m.invoke(this).asInstanceOf[Value]
// verify that outer points to the correct Enumeration: ticket #3616.
if (value.outerEnum eq thisenum) {
val id = Int.unbox(classOf[Val] getMethod "id" invoke value)
nmap += ((id, name))
}
}
}
/* Obtains the name for the value with id `i`. If no name is cached
* in `nmap`, it populates `nmap` using reflection.
*/
private def nameOf(i: Int): String = synchronized { nmap.getOrElse(i, { populateNameMap() ; nmap(i) }) }
/** The type of the enumerated values. */
@SerialVersionUID(7091335633555234129L)
abstract class Value extends Ordered[Value] with Serializable {
/** the id and bit location of this enumeration value */
def id: Int
/** a marker so we can tell whose values belong to whom come reflective-naming time */
private[Enumeration] val outerEnum = thisenum
override def compare(that: Value): Int =
if (this.id < that.id) -1
else if (this.id == that.id) 0
else 1
override def equals(other: Any) = other match {
case that: Enumeration#Value => (outerEnum eq that.outerEnum) && (id == that.id)
case _ => false
}
override def hashCode: Int = id.##
/** Create a ValueSet which contains this value and another one */
def + (v: Value) = ValueSet(this, v)
}
/** A class implementing the [[scala.Enumeration.Value]] type. This class
* can be overridden to change the enumeration's naming and integer
* identification behaviour.
*/
@SerialVersionUID(0 - 3501153230598116017L)
protected class Val(i: Int, name: String) extends Value with Serializable {
def this(i: Int) = this(i, nextNameOrNull)
def this(name: String) = this(nextId, name)
def this() = this(nextId)
assert(!vmap.isDefinedAt(i), "Duplicate id: " + i)
vmap(i) = this
vsetDefined = false
nextId = i + 1
if (nextId > topId) topId = nextId
if (i < bottomId) bottomId = i
def id = i
override def toString() =
if (name != null) name
else try thisenum.nameOf(i)
catch { case _: NoSuchElementException => "<Invalid enum: no field for #" + i + ">" }
protected def readResolve(): AnyRef = {
val enum = thisenum.readResolve().asInstanceOf[Enumeration]
if (enum.vmap == null) this
else enum.vmap(i)
}
}
/** An ordering by id for values of this set */
object ValueOrdering extends Ordering[Value] {
def compare(x: Value, y: Value): Int = x compare y
}
/** A class for sets of values.
* Iterating through this set will yield values in increasing order of their ids.
*
* @param nnIds The set of ids of values (adjusted so that the lowest value does
* not fall below zero), organized as a `BitSet`.
* @define Coll `collection.immutable.SortedSet`
*/
class ValueSet private[ValueSet] (private[this] var nnIds: immutable.BitSet)
extends immutable.AbstractSet[Value]
with immutable.SortedSet[Value]
with immutable.SetOps[Value, immutable.Set, ValueSet]
with StrictOptimizedIterableOps[Value, immutable.Set, ValueSet]
with Serializable {
implicit def ordering: Ordering[Value] = ValueOrdering
def rangeImpl(from: Option[Value], until: Option[Value]): ValueSet =
new ValueSet(nnIds.rangeImpl(from.map(_.id - bottomId), until.map(_.id - bottomId)))
override def empty = ValueSet.empty
def contains(v: Value) = nnIds contains (v.id - bottomId)
def incl (value: Value) = new ValueSet(nnIds + (value.id - bottomId))
def excl (value: Value) = new ValueSet(nnIds - (value.id - bottomId))
def iterator = nnIds.iterator map (id => thisenum.apply(bottomId + id))
override def iteratorFrom(start: Value) = nnIds iteratorFrom start.id map (id => thisenum.apply(bottomId + id))
override def className = thisenum + ".ValueSet"
/** Creates a bit mask for the zero-adjusted ids in this set as a
* new array of longs */
def toBitMask: Array[Long] = nnIds.toBitMask
override protected def fromSpecificIterable(coll: Iterable[Value]) = ValueSet.fromSpecific(coll)
override protected def newSpecificBuilder() = ValueSet.newBuilder
def map(f: Value => Value): ValueSet = fromSpecificIterable(new View.Map(toIterable, f))
def flatMap(f: Value => IterableOnce[Value]): ValueSet = fromSpecificIterable(new View.FlatMap(toIterable, f))
}
/** A factory object for value sets */
object ValueSet extends SpecificIterableFactory[Value, ValueSet] {
/** The empty value set */
val empty = new ValueSet(immutable.BitSet.empty)
/** A value set containing all the values for the zero-adjusted ids
* corresponding to the bits in an array */
def fromBitMask(elems: Array[Long]): ValueSet = new ValueSet(immutable.BitSet.fromBitMask(elems))
/** A builder object for value sets */
def newBuilder: mutable.Builder[Value, ValueSet] = new mutable.Builder[Value, ValueSet] {
private[this] val b = new mutable.BitSet
def addOne (x: Value) = { b += (x.id - bottomId); this }
def clear() = b.clear()
def result() = new ValueSet(b.toImmutable)
}
def fromSpecific(it: IterableOnce[Value]): ValueSet =
newBuilder.addAll(it).result()
}
}
| rorygraves/perf_tester | corpus/scala-library/src/main/scala/Enumeration.scala | Scala | apache-2.0 | 12,889 |
package ca.pgx.common.events
import ca.pgx.common.mixins.callback.EnumeratedFunction
import net.liftweb.json.JsonAST.JValue
/**
* Type of action taken when event was processed.
*/
object EventAction extends EnumeratedFunction[String, Unit] {
type EventAction = Value
val EMAIL, LOG, REST_CALL, OS_CMD = Value
override val functionMappings =
Map(
EMAIL -> ((arg: String) => println("FROM EMAIL: " + arg)),
LOG -> ((arg: String) => println("FROM LOG: " + arg))
)
override def apply(enum: Value, arg: String): Unit =
functionMappings.getOrElse(enum, (arg: String) => ())(arg)
}
| pgxcentre/eventhub | common/src/main/scala/ca/pgx/common/events/EventAction.scala | Scala | apache-2.0 | 615 |
package com.payu.shorturl.util
import com.typesafe.scalalogging.LazyLogging
trait Logging extends LazyLogging | felipehaack/shorturl | payu-api/app/com/payu/shorturl/util/Logging.scala | Scala | gpl-3.0 | 111 |
/************************************************************************\\
** Project **
** ______ ______ __ ______ ____ **
** / ____/ / __ / / / / __ / / __/ (c) 2011-2014 **
** / /__ / /_/ / / / / /_/ / / /_ **
** /___ / / ____/ / / / __ / / __/ Erik Osheim, Tom Switzer **
** ____/ / / / / / / / | | / /__ **
** /_____/ /_/ /_/ /_/ |_| /____/ All rights reserved. **
** **
** Redistribution and use permitted under the MIT license. **
** **
\\************************************************************************/
package spire
package example
/**
* This is a Ziggurat generator of MATLAB files for data analysis via histogram and Distribution Fitting App (dfittool).
*
* <p><b>Reference: </b>
* George Marsaglia, Wai Wan Tsang:
* "The Ziggurat Method for Generating Random Variables"
* <i>Journal of Statistical Software,</i> Vol. 5, Issue 8, October 2000.
*
* @see <a href="http://www.jstatsoft.org/v05/i08">Ziggurat Paper</a>
* @see <a href="http://en.wikipedia.org/wiki/Ziggurat_algorithm">Ziggurat algorithm @ Wikipedia</a>
* @author <a href="mailto:[email protected]">Dušan Kysel</a>
*/
object ZigguratGenerator {
def main(a: Array[String]) = {
val g: spire.random.Generator = spire.random.rng.Well512a.fromTime()
def samples = 200000
def generate(d : spire.random.Generator => Double, n : Int) = Array.tabulate(n)(x => d(g))
val rnor = generate(spire.random.Ziggurat.rnor, samples)
val rexp = generate(spire.random.Ziggurat.rexp, samples)
val fnor = new java.io.PrintStream(new java.io.FileOutputStream("zigguratrnor.m"))
val fexp = new java.io.PrintStream(new java.io.FileOutputStream("zigguratrexp.m"))
fnor.print("rnor = [")
for (i <- rnor) { fnor.print(i); fnor.print(", ") }
fnor.println("]")
fnor.println("x = -5:0.05:5")
fnor.println("hist(rnor,x)")
fnor.println("dfittool(rnor)")
fnor.close()
fexp.print("rexp = [")
for (i <- rexp) { fexp.print(i); fexp.print(", ") }
fexp.println("]")
fexp.println("x = -1:0.05:10")
fexp.println("hist(rexp,x)")
fexp.println("dfittool(rexp)")
fexp.close()
}
} | lrytz/spire | examples/src/main/scala/spire/example/ZigguratGenerator.scala | Scala | mit | 2,514 |
/*
* Copyright 2015 Daniel Spiewak
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalaz.stream
package parsers
import org.specs2.matcher.Matcher
import org.specs2.mutable._
import scalaz._
import scalaz.std.anyVal._
import scalaz.std.string._
import scalaz.syntax.equal._
import scala.collection.SeqLike
import scala.collection.generic.CanBuildFrom
import scala.collection.immutable.StringOps
import scala.util.matching.Regex
object ParserSpecs extends Specification {
import Parser.{Error, Completed, literalRichParser}
"terminal parsers" should {
"parse the empty string" in {
val epsilon: Parser[Char, Unit] = Parser.completed(())
epsilon must parseComplete("").as(())
}
"parse a single token" in {
val a: Parser[Char, Char] = 'a'
a must parseComplete("a").as('a')
}
"produce an error when so defined" in {
val e: Parser[Char, Unit] = Parser.error("oogly boogly")
e must parseError("fubar").as("oogly boogly")
}
}
"parentheses matching" should {
lazy val grammar: Parser[Char, Int] = (
'(' ~> grammar <~ ')' ^^ (1 +)
| Parser.completed(0)
)
"parse the empty string" in {
grammar must parseComplete("").as(0)
}
"parse a single set of parentheses" in {
grammar must parseComplete("()").as(1)
}
"parse four nested sets of parentheses" in {
grammar must parseComplete("(((())))").as(4)
}
"fail to parse a single mismatched paren" in {
grammar must parseError("(").as("unexpected end of stream; expected )")
}
"fail to parse three mismatched parens with one match" in {
grammar must parseError("(((()").as("unexpected end of stream; expected )")
}
"fail to parse a mismatched closing paren" in {
grammar must parseError(")").as("expected (, got )")
}
}
"an expression evaluator" should {
sealed trait ExprToken
object ExprToken {
final case class Num(n: Int) extends ExprToken
case object Plus extends ExprToken
case object Minus extends ExprToken
case object Times extends ExprToken
case object Div extends ExprToken
case object LParen extends ExprToken
case object RParen extends ExprToken
}
implicit def exprTokenEq[T <: ExprToken]: Equal[T] = Equal.equalA // because I'm lazy
implicit def exprTokenShow[T <: ExprToken]: Show[T] = Show.showA // ditto!
import ExprToken._
val rules: Map[Regex, List[String] => ExprToken] = Map(
"""\\s*(\\d+)""".r -> { case ns :: Nil => Num(ns.toInt) },
"""\\s*\\+""".r -> { _ => Plus },
"""\\s*-""".r -> { _ => Minus },
"""\\s*\\*""".r -> { _ => Times },
"""\\s*/""".r -> { _ => Div },
"""\\s*\\(""".r -> { _ => LParen },
"""\\s*\\)""".r -> { _ => RParen })
def exprTokenize(str: String): Seq[ExprToken] =
regexTokenize(str, rules) collect { case \\/-(et) => et }
// %%
lazy val expr: Parser[ExprToken, Int] = (
expr ~ Plus ~ term ^^ { (e1, _, e2) => e1 + e2 }
| expr ~ Minus ~ term ^^ { (e1, _, e2) => e1 - e2 }
| term
)
lazy val term: Parser[ExprToken, Int] = (
term ~ Times ~ value ^^ { (e1, _, e2) => e1 * e2 }
| term ~ Div ~ value ^^ { (e1, _, e2) => e1 / e2 }
| value
)
// type inference and invariance sort of failed me here...
lazy val value: Parser[ExprToken, Int] = (
(LParen: Parser[ExprToken, ExprToken]) ~> expr <~ RParen
| (Parser pattern { case Num(n) => n })
)
// %%
"tokenize a number" in {
exprTokenize("42") mustEqual Seq(Num(42))
}
"parse a number" in {
expr must parseComplete(exprTokenize("42")).as(42)
expr must parseComplete(exprTokenize("12")).as(12)
}
"parse a simple addition expression" in {
expr must parseComplete(exprTokenize("1 + 2")).as(3)
}
"parse a complex composition of all four operators" in {
expr must parseComplete(exprTokenize("228 * 4 + 12")).as(924)
expr must parseComplete(exprTokenize("123 + 228 * 4 + 12")).as(1047)
expr must parseComplete(exprTokenize("123 - 2 + 228 * 4 + 12")).as(1045)
expr must parseComplete(exprTokenize("123 - 2 + 228 * 4 + 12 / 4 + 79")).as(1115)
expr must parseComplete(exprTokenize("123 - 2 + 228 * 4 + 12 / 4 + 79 * 5")).as(1431)
}
// TODO more expr tests
}
// TODO maybe move this to a Util object? seems useful
def parse[T, R](parser: Parser[T, R])(str: Seq[T]): Error[T, R] \\/ Completed[T, R] = {
def inner(str: Seq[T])(parser: Parser[T, R]): State[Parser.Cache[T], Error[T, R] \\/ Completed[T, R]] = {
if (str.isEmpty) {
State state parser.complete()
} else {
parser match {
case Completed(_) => State state -\\/(Error("unexpected end of stream"))
case e @ Error(_) => State state -\\/(e)
case parser: Parser.Incomplete[T, R] =>
parser derive str.head flatMap inner(str.tail)
}
}
}
inner(str)(parser) eval Parser.Cache[T]
}
// TODO this also seems useful...
def tokenize[Str[_] <: SeqLike[_, _], TokenIn, TokenOut, That <: TraversableOnce[TokenIn \\/ TokenOut]](str: Str[TokenIn])(f: Str[TokenIn] => (TokenIn \\/ TokenOut, Str[TokenIn]))(implicit cbf: CanBuildFrom[Str[TokenIn], TokenIn \\/ TokenOut, That]): That = {
if (str.isEmpty) {
cbf().result
} else {
val (token, tail) = f(str)
val builder = cbf()
builder += token
builder ++= tokenize(tail)(f) // TODO it's never worse, tail-recurse!
builder.result
}
}
// TODO oh look, more useful stuff!
def regexTokenize[T](str: String, rules: Map[Regex, List[String] => T]): Seq[Char \\/ T] = {
def iseqAsCharSeq(seq: IndexedSeq[Char]): CharSequence = new CharSequence {
def charAt(i: Int) = seq(i)
def length = seq.length
def subSequence(start: Int, end: Int) = iseqAsCharSeq(seq.slice(start, end))
override def toString = seq.mkString
}
tokenize(str: IndexedSeq[Char]) { seq =>
val str = iseqAsCharSeq(seq)
// find the "first" regex that matches and apply its transform
val tokenM: Option[(T, IndexedSeq[Char])] = rules collectFirst {
case (regex, f) if (regex findPrefixMatchOf str).isDefined => {
val m = (regex findPrefixMatchOf str).get
(f(m.subgroups), m.after.toString: IndexedSeq[Char])
}
}
tokenM map {
case (token, tail) => (\\/-(token), tail)
} getOrElse ((-\\/(seq.head), seq.tail))
}
}
//
// custom matchers
//
def parseComplete[T](str: Seq[T]) = new {
def as[R: Equal](result: R): Matcher[Parser[T, R]] = {
def body(parser: Parser[T, R]) = {
parse(parser)(str) match {
case \\/-(Completed(r)) => r === result
case -\\/(_) => false
}
}
def error(parser: Parser[T, R]) = parse(parser)(str) match {
case -\\/(Error(str)) => s"produces error: $str"
case \\/-(Completed(r)) => s"produces result $r rather than expected $result"
}
(body _,
Function.const("parses successfully") _,
error _)
}
}
def parseError[T](str: Seq[T]) = new {
def as[R](msg: String): Matcher[Parser[T, R]] = {
def body(parser: Parser[T, R]) = {
parse(parser)(str) match {
case \\/-(Completed(r)) => false
case -\\/(Error(msg2)) => msg === msg2
}
}
def error(parser: Parser[T, R]) = parse(parser)(str) match {
case -\\/(Error(msg2)) => s"produced error '$msg2' and not '$msg'"
case \\/-(_) => "completed and did not error"
}
(body _,
Function.const(s"produces error $msg") _,
error _)
}
}
}
| djspiewak/sparse | src/test/scala/scalaz/stream/parsers/ParserSpecs.scala | Scala | apache-2.0 | 8,287 |
/*
* Copyright 2014β2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.db
import slamdata.Predef._
/** Connection parameters for JDBC. */
final case class ConnectionInfo(
driverClassName: String,
url: String,
userName: String,
password: String)
| jedesah/Quasar | core/src/main/scala/quasar/db/ConnectionInfo.scala | Scala | apache-2.0 | 804 |
package controllers
import java.util.UUID
import org.specs2.matcher.JsonMatchers
import org.specs2.specification.Scope
import play.api.libs.json.Json
import scala.concurrent.Future
import controllers.backend.PluginBackend
import com.overviewdocs.models.Plugin
import com.overviewdocs.test.factories.PodoFactory
class PluginControllerSpec extends ControllerSpecification with JsonMatchers {
trait BaseScope extends Scope {
val mockBackend = mock[PluginBackend]
val controller = new PluginController(mockBackend, fakeControllerComponents)
val factory = PodoFactory
}
"#index" should {
trait IndexScope extends BaseScope
"return some JSON" in new IndexScope {
val plugin = factory.plugin(name="n", description="d", url="http://u.org")
mockBackend.index returns Future.successful(Vector(plugin))
val result = controller.index(fakeAuthorizedRequest)
factory.plugin(name="foo")
h.status(result) must beEqualTo(h.OK)
h.contentType(result) must beSome("application/json")
val json = h.contentAsString(result)
json must /#(0) /("name" -> "n")
json must /#(0) /("description" -> "d")
json must /#(0) /("url" -> "http://u.org")
json must /#(0) /("id" -> plugin.id.toString())
}
}
"#create" should {
trait CreateScope extends BaseScope {
val request = fakeAuthorizedRequest.withFormUrlEncodedBody(
"name" -> "foo",
"description" -> "bar",
"url" -> "http://baz.org",
"serverUrlFromPlugin" -> "http://overview-web"
)
lazy val result = controller.create(request)
val plugin = factory.plugin(name="n", description="d", url="http://u.org", serverUrlFromPlugin=Some("http://overview-web"))
mockBackend.create(any) returns Future.successful(plugin)
}
"create a Plugin in the database" in new CreateScope {
result
there was one(mockBackend).create(Plugin.CreateAttributes(
name="foo",
description="bar",
url="http://baz.org",
serverUrlFromPlugin=Some("http://overview-web"),
autocreate=false,
autocreateOrder=0
))
}
"return the created Plugin" in new CreateScope {
val json = h.contentAsString(result)
json must /("id" -> plugin.id.toString())
json must /("name" -> "n")
json must /("description" -> "d")
json must /("url" -> "http://u.org")
json must /("serverUrlFromPlugin" -> "http://overview-web")
}
"return BadRequest for an invalid request" in new CreateScope {
override val request = fakeAuthorizedRequest.withFormUrlEncodedBody("foo" -> "bar")
h.status(result) must beEqualTo(h.BAD_REQUEST)
}
}
"#update" should {
trait UpdateScope extends BaseScope {
val plugin = factory.plugin(name="n", description="d", url="http://u.org")
val plugin2 = factory.plugin(name="n2", description="d2", url="http://u2.org", serverUrlFromPlugin=Some("http://overview-web"))
val pluginId = plugin.id
val request = fakeAuthorizedRequest.withFormUrlEncodedBody(
"name" -> "foo",
"description" -> "bar",
"url" -> "http://baz.org",
"serverUrlFromPlugin" -> "http://overview-web"
)
lazy val result = controller.update(pluginId)(request)
}
"update a Plugin in the database" in new UpdateScope {
mockBackend.update(any, any) returns Future.successful(Some(plugin2))
result
there was one(mockBackend).update(pluginId, Plugin.UpdateAttributes(
name="foo",
description="bar",
url="http://baz.org",
serverUrlFromPlugin=Some("http://overview-web"),
autocreate=false,
autocreateOrder=0
))
}
"return the updated Plugin" in new UpdateScope {
mockBackend.update(any, any) returns Future.successful(Some(plugin2))
val json = h.contentAsString(result)
json must /("id" -> plugin2.id.toString())
json must /("name" -> "n2")
json must /("description" -> "d2")
json must /("url" -> "http://u2.org")
json must /("serverUrlFromPlugin" -> "http://overview-web")
}
"returns 404 Not Found on wrong ID" in new UpdateScope {
mockBackend.update(any, any) returns Future.successful(None)
h.status(result) must beEqualTo(h.NOT_FOUND)
}
"returns 400 Bad Request on invalid form" in new UpdateScope {
override val request = fakeAuthorizedRequest.withFormUrlEncodedBody("foo" -> "bar")
h.status(result) must beEqualTo(h.BAD_REQUEST)
}
}
"#destroy" should {
trait DestroyScope extends BaseScope {
mockBackend.destroy(any) returns Future.unit
val pluginId = new UUID(1L, 2L)
val result = controller.destroy(pluginId)(fakeAuthorizedRequest)
}
"return 200 Ok" in new DestroyScope {
h.status(result) must beEqualTo(h.OK)
}
"delete the Plugin from the database" in new DestroyScope {
there was one(mockBackend).destroy(pluginId)
}
}
}
| overview/overview-server | web/test/controllers/PluginControllerSpec.scala | Scala | agpl-3.0 | 5,006 |
object Test {
def main(args: Array[String]): Unit = {
println((new Foo: Bar[Boolean]).value1)
println((new Foo: Bar[Boolean]).value2())
}
}
class Foo extends Baz with Bar[Boolean]
trait Bar[T] {
def value1: T
def value2(): T
}
class Baz {
def value1: Boolean = true
def value2(): Boolean = true
}
| som-snytt/dotty | tests/run/mixin-primitive-on-generic-2.scala | Scala | apache-2.0 | 321 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author Vamsi Thummala {[email protected]}, Copyright (C) 2013-2015
*
*/
package safe.safesets
import akka.actor.ActorRef
import safe.safelog.{Credential, ProofSubContext, StrLit}
import safe.safelang.model.{Principal, Subject, SignedCredential, UnsignedCredential}
object SafeSetsMessageProtocol {
sealed trait SafeSetsMessage
type SetId = String
case class Fetch(link: SetId) extends SafeSetsMessage
case class Render(
link: SetId
, setRefs: Set[SetId]
) extends SafeSetsMessage
case class RenderCompleted(
link: SetId
, proofContext: ProofSubContext
) extends SafeSetsMessage
// 1. master -> worker
case class FetchSet(
link: SetId
, originLink: SetId
, speaksForOriginLink: SetId
, speakerId: SetId
, subjectId: SetId
, setName: String
) extends SafeSetsMessage
object FetchSet {
def apply(link: SetId, originLink: SetId) = {
new FetchSet(link, originLink, "nil", "nil", "nil", "nil")
}
}
// 2. worker -> master (for the leaf nodes)
case class FetchIdSet(
link: SetId
, originLink: SetId
) extends SafeSetsMessage
case class PostSetWithName(
name: String
, unsignedCertificate: UnsignedCredential
, principal: Principal
) extends SafeSetsMessage
case class SimplePost(
id: SetId
, content: String
) extends SafeSetsMessage
case class SimpleGet(
id: SetId
) extends SafeSetsMessage
case class SimpleDelete(
id: SetId
) extends SafeSetsMessage
case class DeleteSet(
link: SetId
, principal: Principal
) extends SafeSetsMessage
case class DeleteSetWithName(
name: String
, principal: Principal
) extends SafeSetsMessage
// 3. master -> worker (if subject is already present in cache)
case class IdSet(
link: SetId
, subject: Subject
, originLink: SetId
) extends SafeSetsMessage
// 4. master -> worker (after fetch/post/delete is done)
case class IOCompleted(
setMap: Map[SetId, SignedCredential]
, originLink: SetId
) extends SafeSetsMessage
case class VerificationFailed(link: SetId) extends SafeSetsMessage
case class ParseFailed(link: SetId, msg: String) extends SafeSetsMessage
}
| wowmsi/safe | safe-lang/src/main/scala/safe/safesets/SafeSetsMessageProtocol.scala | Scala | apache-2.0 | 3,034 |
package dotty.tools.lispify
import dotty.tools.dotc.ast.untpd._
import dotty.tools.dotc.core.Constants.Constant
object Lispyfy {
var shift = 0
def debug(tree: Any) = {
print(tree + " ")
}
def shiftLn(): Unit = {
println()
print(" " * shift)
}
def shiftLeft(): Unit = {
shift -= 4
}
def shiftRight(): Unit = {
shift += 4
}
// case class If[-T >: Untyped] private[ast] (cond: Tree[T], thenp: Tree[T], elsep: Tree[T])
def procIf(tree: If) = {
debug("(if")
expression(tree.cond)
shiftRight()
shiftLn()
expression(tree.thenp)
shiftLn()
expression(tree.elsep)
shiftLeft()
debug(")")
}
// case class Block[-T >: Untyped] private[ast] (stats: List[Tree[T]], expr: Tree[T])
def procBlock(xx: Block) = {
if (xx.stats.nonEmpty) {
debug("(begin")
shiftRight()
for (el <- xx.stats) {
shiftLn()
expression(el)
}
shiftLn()
expression(xx.expr)
shiftLeft()
debug(")")
} else {
expression(xx.expr)
}
}
// case class InfixOp(left: Tree, op: Name, right: Tree) extends OpTree
def procInfixOp(tree: InfixOp) = {
debug("(" + tree.op)
shiftRight()
expression(tree.left)
expression(tree.right)
shiftLeft()
debug(")")
}
// case class Apply[-T >: Untyped] private[ast] (fun: Tree[T], args: List[Tree[T]])
def procApply(tree: Apply) = {
debug("(" + (tree.fun match {case xx: Ident => xx.name}))
for (arg <- tree.args) {
expression(arg)
}
debug(")")
}
// case class ValDef[-T >: Untyped] private[ast] (name: TermName, tpt: Tree[T], rhs: Tree[T])
def procValDef(tree: ValDef) = {
debug("(ValDef " + tree.name)
expression(tree.rhs)
debug(")")
}
// case class Function(args: List[Tree], body: Tree) extends Tree {
def procFunction(tree: Function) = {
shiftLn()
debug("(Function (")
for (param <- tree.args) {
param match {
case xx: ValDef => debug(xx.name)
case _ => throw new Exception("procDefDef bad function param")
}
}
debug(")")
shiftRight()
shiftLn()
expression(tree.body)
shiftLeft()
shiftLn()
print(")")
}
// case class CaseDef[-T >: Untyped] private[ast] (pat: Tree[T], guard: Tree[T], body: Tree[T])
def procCaseDef(tree: CaseDef) = {
debug("(case ")
tree.pat match {
case xx: Typed => procTyped(xx)
case xx: Ident => debug(xx.name)
}
shiftLn()
expression(tree.body)
debug(")")
shiftLn()
}
// case class Match[-T >: Untyped] private[ast] (selector: Tree[T], cases: List[CaseDef[T]])
def procMatch(tree: Match) = {
shiftLn()
debug("(Match ")
expression(tree.selector)
shiftRight()
shiftLn()
for (caseDef <- tree.cases) {
procCaseDef(caseDef)
}
debug(")")
shiftLeft()
shiftLn()
}
// case class Typed[-T >: Untyped] private[ast] (expr: Tree[T], tpt: Tree[T])
def procTyped(tree: Typed) = {
debug("(isInstanceOf")
debug(tree.tpt)
expression(tree.expr)
debug(")")
}
// case class Select[-T >: Untyped] private[ast] (qualifier: Tree[T], name: Name)
def procSelect(tree: Select): Unit = {
debug("(Select")
expression(tree.qualifier)
debug(tree.name)
debug(")")
}
def expression(tree: Tree): Unit = {
tree match {
case xx: Parens => expression(xx.t)
case xx: Block => procBlock(xx)
case xx: If => procIf(xx)
case xx: InfixOp => procInfixOp(xx)
case xx: Apply => procApply(xx)
case xx: Ident => debug(xx.name)
case xx: Select => procSelect(xx)
case xx: Function => procFunction(xx)
case xx: Match => procMatch(xx)
case xx: Tree if xx.isEmpty => debug(xx)
case xx: ValDef => procValDef(xx)
case xx: Literal => xx.const match {
case vv: Constant if vv.isNumeric => debug(vv.stringValue)
case vv: Constant if !vv.isNumeric => debug("\\"" + vv.stringValue + "\\"")
}
}
}
def procParamsList(vparamss: List[List[ValDef]]) = {
print("(")
for (arg <- vparamss) {
for (param <- arg) {
param match {
case xx: ValDef => debug(xx.name)
case _ => throw new Exception("procDefDef bad function param")
}
}
}
print(")")
}
// case class DefDef(name: TermName, tparams: List[TypeDef[T]], vparamss: List[List[ValDef[T]]], tpt: Tree[T], rhs: Tree[T])
def procDefDef(tree: DefDef) = {
// println("fun tparams: " + tree.tparams) // U dont need this
// println("fun vparams: " + tree.vparamss)
// println("fun tpt: " + tree.tpt)
shiftLn()
debug("(defun " + tree.name)
procParamsList(tree.vparamss)
shiftRight()
shiftLn()
// println("fun rhs: " + tree.rhs) // body block
tree.rhs match {
case xx: Block => procBlock(xx)
case xx: Match => procMatch(xx); shiftLn
}
// procBlock(.asInstanceOf[Block])
print(")")
shiftLeft()
shiftLn()
}
// case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parents: List[Tree[T]], self: ValDef[T], body: List[Tree[T]])
def procTemplate(xx: Template) = {
// debug("constr: " + xx.constr.name)
procParamsList(xx.constr.vparamss)
shiftRight()
shiftLn()
debug("(extends")
for(parent <- xx.parents) {
debug(parent match {case xx: Ident => xx.name})
}
debug(")")
shiftLn()
for (tmp <- xx.body) {
tmp match {
case z: DefDef => procDefDef(z)
}
}
shiftLeft()
shiftLn()
}
// case class TypeDef[-T >: Untyped] private[ast] (name: TypeName, rhs: Tree[T])
def procTypeDef(tree: TypeDef) = {
shiftLn()
debug("(TypeDef " + tree.name )
tree.rhs match {
// case class Template[-T >: Untyped] private[ast] (constr: DefDef[T], parents: List[Tree[T]], self: ValDef[T], body: List[Tree[T]])
case xx: Template => {
procTemplate(xx)
}
}
debug(")")
shiftLn()
}
def procObjectDef(modulo: ModuleDef) {
shiftLn()
debug("(ModuleDef " + modulo.name )
procTemplate(modulo.impl)
debug(")")
shiftLn()
}
def procTopModuleDef(modulo: ModuleDef) {
debug("(module " + modulo.name )
modulo.impl match {
case tmpl: Template => {
for (x <- tmpl.body) {
x match {
case z: DefDef => procDefDef(z)
case z: TypeDef => procTypeDef(z)
case z: ModuleDef => procObjectDef(z)
}
}
}
}
shiftLeft()
shiftLn()
debug(")")
shiftLn()
}
}
| spetz911/dotty | src/dotty/tools/lispify/Lispyfy.scala | Scala | bsd-3-clause | 6,620 |
package com.twitter.finagle.util
import com.twitter.finagle.core.util.InetAddressUtil
import java.net.InetAddress
import org.scalatest.funsuite.AnyFunSuite
class InetAddressUtilTest extends AnyFunSuite {
test("InetAddressUtil should isPrivateAddress") {
import InetAddressUtil.isPrivateAddress
assert(!isPrivateAddress(InetAddress.getByName("0.0.0.0")))
assert(!isPrivateAddress(InetAddress.getByName("199.59.148.13")))
assert(isPrivateAddress(InetAddress.getByName("10.0.0.0")))
assert(isPrivateAddress(InetAddress.getByName("10.255.255.255")))
assert(isPrivateAddress(InetAddress.getByName("172.16.0.0")))
assert(isPrivateAddress(InetAddress.getByName("172.31.255.255")))
assert(isPrivateAddress(InetAddress.getByName("192.168.0.0")))
assert(isPrivateAddress(InetAddress.getByName("192.168.255.255")))
}
test("InetAddressUtil should getByName") {
import InetAddressUtil.getByName
assert(getByName("69.55.236.117").getHostAddress == "69.55.236.117")
assert(getByName("0.0.0.0").getHostAddress == "0.0.0.0")
assert(getByName("255.0.0.0").getHostAddress == "255.0.0.0")
assert(getByName("0.255.0.0").getHostAddress == "0.255.0.0")
assert(getByName("0.0.255.0").getHostAddress == "0.0.255.0")
assert(getByName("0.0.0.255").getHostAddress == "0.0.0.255")
assert(getByName("255.255.255.255").getHostAddress == "255.255.255.255")
}
}
| twitter/finagle | finagle-core/src/test/scala/com/twitter/finagle/util/InetAddressUtilTest.scala | Scala | apache-2.0 | 1,409 |
package org.mandrake.simulation
trait Event extends StateInput
| louis-mon/mandrake | src/main/scala/org/mandrake/simulation/Event.scala | Scala | mit | 64 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala.tools
package reflect
import scala.reflect.internal.util.ScalaClassLoader
import scala.tools.nsc.{Global, Settings}
import scala.tools.nsc.reporters.Reporter
/** A version of Global that uses reflection to get class
* infos, instead of reading class or source files.
*/
class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader)
extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable {
/** Obtains the classLoader used for runtime macro expansion.
*
* Macro expansion can use everything available in `global.classPath` or `rootClassLoader`.
* The `rootClassLoader` is used to obtain runtime defined macros.
*/
override def findMacroClassLoader(): ClassLoader = {
val classpath = classPath.asURLs
perRunCaches.recordClassloader(ScalaClassLoader.fromURLs(classpath, rootClassLoader))
}
override def transformedType(sym: Symbol) =
postErasure.transformInfo(sym,
erasure.transformInfo(sym,
uncurry.transformInfo(sym, sym.info)))
override def isCompilerUniverse = true
// Typically `runtimeMirror` creates a new mirror for every new classloader
// and shares symbols between the created mirrors.
//
// However we can't do that for the compiler.
// The problem is that symbol sharing violates owner chain assumptions that the compiler has.
//
// For example, we can easily end up with a situation when:
//
// Predef defined in package scala loaded by the classloader that has scala-library.jar
//
// cannot be accessed in:
//
// package scala for the rootMirror of ReflectGlobal that might correspond to a different classloader
//
// This happens because, despite the fact that `Predef` is shared between multiple `scala` packages (i.e. multiple scopes)
// (each mirror has its own set package symbols, because of the peculiarities of symbol loading in scala),
// that `Predef` symbol only has a single owner, and this messes up visibility, which is calculated based on owners, not scopes.
override def runtimeMirror(cl: ClassLoader): Mirror = rootMirror
// Mirror and RuntimeClass come from both Global and reflect.runtime.SymbolTable
// so here the compiler needs an extra push to help decide between those (in favor of the latter)
import scala.reflect.ClassTag
override type Mirror = MirrorImpl
override implicit val MirrorTag: ClassTag[Mirror] = ClassTag[Mirror](classOf[Mirror])
override type RuntimeClass = java.lang.Class[_]
override implicit val RuntimeClassTag: ClassTag[RuntimeClass] = ClassTag[RuntimeClass](classOf[RuntimeClass])
override def openPackageModule(pkgClass: Symbol, force: Boolean): Unit = super.openPackageModule(pkgClass, true)
}
| scala/scala | src/compiler/scala/tools/reflect/ReflectGlobal.scala | Scala | apache-2.0 | 3,094 |
package org.apache.spark.ml.parity.clustering
import org.apache.spark.ml.clustering.BisectingKMeans
import org.apache.spark.ml.feature.{StringIndexer, VectorAssembler}
import org.apache.spark.ml.{Pipeline, Transformer}
import org.apache.spark.ml.parity.SparkParityBase
import org.apache.spark.sql._
/**
* Created by hollinwilkins on 12/27/16.
*/
class BisectingKMeansParitySpec extends SparkParityBase {
override val dataset: DataFrame = baseDataset.select("dti", "loan_amount", "fico_score_group_fnl")
override val sparkTransformer: Transformer = new Pipeline().setStages(Array(new StringIndexer().
setInputCol("fico_score_group_fnl").
setOutputCol("fico_index"),
new VectorAssembler().
setInputCols(Array("fico_index", "dti")).
setOutputCol("features"),
new BisectingKMeans().
setFeaturesCol("features").
setPredictionCol("prediction"))).fit(dataset)
override val unserializedParams = Set("stringOrderType", "k", "maxIter", "seed", "minDivisibleClusterSize")
}
| combust/mleap | mleap-spark/src/test/scala/org/apache/spark/ml/parity/clustering/BisectingKMeansParitySpec.scala | Scala | apache-2.0 | 1,016 |
package examples3
import org.scalatest.FunSuite
import scala.runtime.ScalaRunTime
class CaseClassPersonTest extends FunSuite {
test("apply & equals") {
val pA1 = Person("Alex1", 12)
val pA2 = Person.apply("Alex", 12)
val pA3 = Person("Alex", 12)
assert(pA1 === pA1)
assert(pA1 !== pA2)
assert(pA3 === pA2)
assert(ScalaRunTime._equals(pA1, pA2) === false)
assert(ScalaRunTime._equals(pA3, pA2))
}
test("hashCode") {
val p1 = Person("Alex", 12)
val p2 = Person("Alex1", 12)
val p3 = Person("Alex1", 12)
assert(p2.hashCode === p3.hashCode)
val personSet = Set(p1, p2, p3)
assert(personSet.size === 2)
assert(ScalaRunTime._hashCode(p2) === ScalaRunTime._hashCode(p3))
}
//
test("copy") {
val pA0 = Person("Alex1", 12).copy(name = "Alex")
val pA1 = pA0.copy(age = 11)
val pA2 = Person("Alex", 11)
assert(pA1 === pA2)
}
//
test("toString") {
val pA1 = Person("Alex1", 12)
assert(pA1.toString === "Person(Alex1,12)")
assert(Person.toString === "Person")
assert(ScalaRunTime._toString(pA1) === "Person(Alex1,12)")
}
//
test("unapply") {
val pA1 = Person("Alex1", 12)
pA1 match {
case Person(name, age) =>
assert(name === "Alex1")
assert(age === 12)
}
null.asInstanceOf[Person] match {
case Person(name, age) =>
fail("should fail")
case _ =>
}
}
//
test("product") {
val pA1 = Person("Alex1", 12)
assert("Person" === pA1.productPrefix)
assert(2 === pA1.productArity)
assert("Alex1" === pA1.productElement(0))
assert(12 === pA1.productElement(1))
assert(pA1.productIterator.toSeq === Seq("Alex1", 12))
intercept[IndexOutOfBoundsException] {
pA1.productElement(2)
}
}
//
test("tupled") {
val pA1 = Person("Alex1", 12)
val tuple: (String, Int) = ("Alex1", 12)
assert(Person.tupled(tuple) === pA1)
}
test("curried") {
val pA1 = Person("Alex1", 12)
val fun1: (Int) => Person = Person.curried("Alex1")
assert(fun1(12) === pA1)
}
}
| pharmpress/codingdojo | scala-case/src/test/scala/examples1/CaseClassPersonTest.scala | Scala | apache-2.0 | 2,071 |
package x7c1.wheat.macros.intent
import android.content.Context
import android.os.Bundle
import x7c1.wheat.macros.base.PublicFieldsFinder
import scala.language.experimental.macros
import scala.reflect.macros.blackbox
object ServiceCaller {
def using[A]: ServiceCaller[A] = new ServiceCaller[A]
def reify[A](context: Context, klass: Class[_]): A with ContextHolder[A] =
macro ServiceCallerImpl.reify[A]
}
class ServiceCaller[A]{
def startService(context: Context, klass: Class[_])(f: A => Unit): Unit =
macro ServiceCallerImpl.startService[A]
}
private object ServiceCallerImpl {
def startService[A: c.WeakTypeTag](c: blackbox.Context)
(context: c.Tree, klass: c.Tree)(f: c.Tree): c.Tree = {
import c.universe._
val factory = new IntentTreeFactory {
override val context: c.type = c
override val block = f
}
val intent = TermName(c freshName "intent")
val tree = q"""
val $intent = ${factory.newIntent(context, klass)}
$context.startService($intent)
"""
// println(showCode(tree))
tree
}
def reify[A: c.WeakTypeTag](c: blackbox.Context)(context: c.Tree, klass: c.Tree): c.Tree = {
val androidContext = context
val caller = new ServiceCallerTreeFactory {
override val context: c.type = c
override val contextTree = androidContext
override val klassTree = klass
override val serviceType = c.universe.weakTypeOf[A]
}
val tree = caller.reify
// println(tree)
tree
}
}
trait ServiceCallerTreeFactory extends PublicFieldsFinder {
import context.universe._
/* android.content.Context */
val contextTree: Tree
/* java.lang.Class */
val klassTree: Tree
val serviceType: Type
def reify = {
val methods = methodsOf(serviceType) map { method =>
val paramLists = method.paramLists map { params =>
params map { param =>
q"${param.name.encodedName.toTermName}: ${param.typeSignature}"
}
}
val argLists = method.paramLists map { params =>
params map { param =>
q"${param.name.encodedName.toTermName}"
}
}
q"""
override def ${method.name.encodedName.toTermName}(...$paramLists) =
${typeOf[ServiceCaller[_]].companion}.using[$serviceType].
startService($contextTree, $klassTree){
_.${method.name.encodedName.toTermName}(...$argLists)
}
"""
}
val holder = appliedType(
typeOf[ContextHolder[_]].typeConstructor,
serviceType
)
val tree = q"""
new $holder($contextTree, $klassTree) with $serviceType {
..$methods
}
"""
// println(tree)
tree
}
}
trait BundleConvertible[A] {
def toBundle(target: A): Bundle
}
| x7c1/Linen | wheat-macros/src/main/scala/x7c1/wheat/macros/intent/ServiceCaller.scala | Scala | mit | 2,754 |
package metrics
import com.codahale.metrics.MetricRegistry
import nl.grons.metrics.scala.{HdrMetricBuilder, InstrumentedBuilder}
trait Instrumented extends InstrumentedBuilder {
val metricsManager: MetricsManager
override lazy protected val metricBuilder = new HdrMetricBuilder(metricBaseName, metricRegistry, resetAtSnapshot = true)
override val metricRegistry: MetricRegistry = metricsManager.metricRegistry
}
| alexanderfefelov/area-451 | app/metrics/Instrumented.scala | Scala | mit | 422 |
/*
* Copyright 2012-2017 Johannes Rudolph
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spray.json
package lenses
import java.lang.StringBuilder
import org.parboiled.Context
import org.parboiled.scala._
import org.parboiled.errors.{ ErrorUtils, ParsingException }
/**
* A parser for json-path expression as specified here:
* [[http://goessner.net/articles/JsonPath/]]
*/
object JsonPathParser extends Parser with BasicRules {
def JsonPathExpr = rule { Path ~ EOI }
def Path: Rule1[JsonPath.Path] = rule { Root ~ OptionalSelection }
def Root: Rule1[JsonPath.Root.type] = rule {
// we don't distinguish between '$' and '@'
anyOf("$@") ~ push(JsonPath.Root)
}
def OptionalSelection: ReductionRule1[JsonPath.Path, JsonPath.Path] = rule {
Projection ~~> JsonPath.Selection ~ OptionalSelection |
EMPTY ~~> identity
}
def Projection: Rule1[JsonPath.Projection] = rule {
"." ~ DotProjection |
"[" ~ BracketProjection ~ "]"
}
def DotProjection: Rule1[JsonPath.Projection] = rule {
ByFieldName
}
def AllElements = rule { "*" ~ push(JsonPath.AllElements) }
def ByFieldName = rule { FieldName ~~> JsonPath.ByField }
def BracketProjection: Rule1[JsonPath.Projection] = rule {
Digits ~> (d => JsonPath.ByIndex(d.toInt)) |
SingleQuotedString ~~> JsonPath.ByField |
AllElements |
"?(" ~ WhiteSpace ~ Predicate ~ WhiteSpace ~ ")" ~~> JsonPath.ByPredicate
}
def Predicate: Rule1[JsonPath.Predicate] = rule {
Lt | Gt | Eq | Exists
}
def Eq: Rule1[JsonPath.Eq] = rule { op("==")(JsonPath.Eq) }
def Lt: Rule1[JsonPath.Lt] = rule { op("<")(JsonPath.Lt) }
def Gt: Rule1[JsonPath.Gt] = rule { op(">")(JsonPath.Gt) }
def Exists: Rule1[JsonPath.Exists] = rule {
Path ~~> JsonPath.Exists
}
def op[T](op: String)(cons: (JsonPath.Expr, JsonPath.SimpleExpr) => T) =
Expr ~ WhiteSpace ~ op ~ WhiteSpace ~ SimpleExpr ~~> cons
def Expr: Rule1[JsonPath.Expr] = rule {
Path ~~> JsonPath.PathExpr |
SimpleExpr
}
def SimpleExpr: Rule1[JsonPath.SimpleExpr] = rule {
JsConstant ~~> JsonPath.Constant
}
def JsConstant: Rule1[JsValue] = rule {
JsonNumber |
SingleQuotedString ~~> (JsString(_))
}
val WhiteSpaceChars = " \\n\\r\\t\\f"
def FieldName: Rule1[String] = rule {
oneOrMore("a" - "z" | "A" - "Z" | "0" - "9" | anyOf("_-")) ~> identity
}
def SingleQuotedString: Rule1[String] =
rule { "'" ~ push(new java.lang.StringBuilder) ~ zeroOrMore(!anyOf("'") ~ ("\\\\" ~ EscapedChar | NormalChar)) } ~ "'" ~~> (_.toString)
/**
* The main parsing method. Uses a ReportingParseRunner (which only reports the first error) for simplicity.
*/
def apply(path: String): JsonPath.Path = apply(path.toCharArray)
/**
* The main parsing method. Uses a ReportingParseRunner (which only reports the first error) for simplicity.
*/
def apply(path: Array[Char]): JsonPath.Path = {
val parsingResult = ReportingParseRunner(JsonPathExpr).run(path)
parsingResult.result.getOrElse {
throw new ParsingException("Invalid JSON source:\\n" + ErrorUtils.printParseErrors(parsingResult))
}
}
}
// a set of basic rules taken from the old spray-json parser
// see https://github.com/spray/spray-json/blob/v1.2.6/src/main/scala/spray/json/JsonParser.scala
trait BasicRules { _: Parser =>
def EscapedChar = rule(
anyOf("\\"\\\\/") ~:% withContext(appendToSb(_)(_))
| "b" ~ appendToSb('\\b')
| "f" ~ appendToSb('\\f')
| "n" ~ appendToSb('\\n')
| "r" ~ appendToSb('\\r')
| "t" ~ appendToSb('\\t')
| Unicode ~~% withContext((code, ctx) => appendToSb(code.asInstanceOf[Char])(ctx)))
def NormalChar = rule { !anyOf("\\"\\\\") ~ ANY ~:% (withContext(appendToSb(_)(_))) }
def Unicode = rule { "u" ~ group(HexDigit ~ HexDigit ~ HexDigit ~ HexDigit) ~> (java.lang.Integer.parseInt(_, 16)) }
def JsonNumber = rule { group(Integer ~ optional(Frac) ~ optional(Exp)) ~> (JsNumber(_)) ~ WhiteSpace }
def Frac = rule { "." ~ Digits }
def Exp = rule { ignoreCase("e") ~ optional(anyOf("+-")) ~ Digits }
def Integer = rule { optional("-") ~ (("1" - "9") ~ Digits | Digit) }
def Digits = rule { oneOrMore(Digit) }
def Digit = rule { "0" - "9" }
def HexDigit = rule { "0" - "9" | "a" - "f" | "A" - "F" }
def WhiteSpace: Rule0 = rule { zeroOrMore(anyOf(" \\n\\r\\t\\f")) }
def appendToSb(c: Char): Context[Any] => Unit = { ctx =>
ctx.getValueStack.peek.asInstanceOf[StringBuilder].append(c)
()
}
} | jrudolph/json-lenses | src/main/scala/spray/json/lenses/JsonPathParser.scala | Scala | apache-2.0 | 5,039 |
/*
* Copyright ixias.net All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license
* For the full copyright and license information,
* please view the LICENSE file that was distributed with this source code.
*/
package ixias.aws.qldb.databind
import com.fasterxml.jackson.module.scala.JacksonModule
object DatabindModule extends JacksonModule
with EnumBitFlagsSerializerModule
with EnumBitFlagsDeserializerModule
with EnumStatusSerializerModule
with EnumStatusDeserializerModule
{
override def getModuleName = "DatabindModule"
}
| sp1rytus/ixias | framework/ixias-aws-qldb/src/main/scala/ixias/aws/qldb/databind/DatabindModule.scala | Scala | mit | 583 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.internal
import com.intel.analytics.bigdl.dllib.nn.abstractnn.{AbstractModule, DataFormat}
import com.intel.analytics.bigdl.dllib.nn.{InitializationMethod, SpatialFullConvolution, Xavier, Zeros}
import com.intel.analytics.bigdl.dllib.optim.Regularizer
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.Shape
import scala.reflect.ClassTag
/**
* Transposed convolution operator for filtering windows of 2-D inputs.
* The need for transposed convolutions generally arises from the desire to use a transformation
* going in the opposite direction of a normal convolution, i.e., from something that has
* the shape of the output of some convolution to something that has the shape of its input
* while maintaining a connectivity pattern that is compatible with said convolution.
* Data format currently supported for this layer is DataFormat.NCHW (dimOrdering='th').
* Border mode currently supported for this layer is 'valid'.
* You can also use Deconv2D as an alias of this layer.
* The input of this layer should be 4D.
*
* When using this layer as the first layer in a model, you need to provide the argument
* inputShape (a Single Shape, does not include the batch dimension).
* e.g. inputShape=Shape(3, 128, 128) for 128x128 RGB pictures.
*
* @param nbFilter Number of transposed convolution filters to use.
* @param nbRow Number of rows in the transposed convolution kernel.
* @param nbCol Number of columns in the transposed convolution kernel.
* @param init Initialization method for the weights of the layer. Default is Xavier.
* You can also pass in corresponding string representations such as 'glorot_uniform'
* or 'normal', etc. for simple init methods in the factory method.
* @param activation Activation function to use. Default is null.
* You can also pass in corresponding string representations such as 'relu'
* or 'sigmoid', etc. for simple activations in the factory method.
* @param subsample Int array of length 2. The step of the convolution in the height and
* width dimension. Also called strides elsewhere. Default is (1, 1).
* @param dimOrdering Format of input data. Please use DataFormat.NCHW (dimOrdering='th').
* @param wRegularizer An instance of [[Regularizer]], (eg. L1 or L2 regularization),
* applied to the input weights matrices. Default is null.
* @param bRegularizer An instance of [[Regularizer]], applied to the bias. Default is null.
* @param bias Whether to include a bias (i.e. make the layer affine rather than linear).
* Default is true.
* @tparam T The numeric type of parameter(e.g. weight, bias). Only support float/double now.
*/
class Deconvolution2D[T: ClassTag](
val nbFilter: Int,
val nbRow: Int,
val nbCol: Int,
val init: InitializationMethod = Xavier,
val activation: KerasLayer[Tensor[T], Tensor[T], T] = null,
val subsample: Array[Int] = Array(1, 1),
val dimOrdering: DataFormat = DataFormat.NCHW,
var wRegularizer: Regularizer[T] = null,
var bRegularizer: Regularizer[T] = null,
val bias: Boolean = true,
val inputShape: Shape = null)(implicit ev: TensorNumeric[T])
extends KerasLayer[Tensor[T], Tensor[T], T](KerasLayer.addBatch(inputShape)) {
require(dimOrdering == DataFormat.NCHW, s"Deconvolution2D currently only supports " +
s"format NCHW, but got format $dimOrdering")
require(subsample.length == 2,
s"For Deconvolution2D, subsample should be of length 2 but got length ${subsample.length}")
override def doBuild(inputShape: Shape): AbstractModule[Tensor[T], Tensor[T], T] = {
val input = inputShape.toSingle().toArray
val layer = SpatialFullConvolution(
nInputPlane = input(1),
nOutputPlane = nbFilter,
kW = nbCol,
kH = nbRow,
dW = subsample(1),
dH = subsample(0),
noBias = !bias,
wRegularizer = wRegularizer,
bRegularizer = bRegularizer)
layer.setInitMethod(weightInitMethod = init, biasInitMethod = Zeros)
KerasLayer.fuse(layer, activation,
inputShape).asInstanceOf[AbstractModule[Tensor[T], Tensor[T], T]]
}
}
object Deconvolution2D {
def apply[@specialized(Float, Double) T: ClassTag](
nbFilter: Int,
nbRow: Int,
nbCol: Int,
init: String = "glorot_uniform",
activation: String = null,
subsample: (Int, Int) = (1, 1),
dimOrdering: String = "th",
wRegularizer: Regularizer[T] = null,
bRegularizer: Regularizer[T] = null,
bias: Boolean = true,
inputShape: Shape = null)(implicit ev: TensorNumeric[T]): Deconvolution2D[T] = {
new Deconvolution2D[T](nbFilter, nbRow, nbCol, KerasUtils.getInitMethod(init),
KerasUtils.getKerasActivation(activation), Array(subsample._1, subsample._2),
KerasUtils.toBigDLFormat(dimOrdering), wRegularizer,
bRegularizer, bias, inputShape)
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/internal/Deconvolution2D.scala | Scala | apache-2.0 | 5,654 |
package mesosphere.marathon.core.appinfo
import mesosphere.marathon.core.task.Task
import mesosphere.marathon.core.health.Health
import mesosphere.marathon.state.PathId
case class EnrichedTask(
appId: PathId,
task: Task,
healthCheckResults: Seq[Health],
servicePorts: Seq[Int] = Nil)
| timcharper/marathon | src/main/scala/mesosphere/marathon/core/appinfo/EnrichedTask.scala | Scala | apache-2.0 | 294 |
object ArrayUtils {
def filter(xs: Array[Int], pred: Int => Boolean): Array[Int] = ???
def map(xs: Array[Int], f: Int => Int): Array[Int] = ???
def filtering(pred: Int => Boolean): Array[Int] => Array[Int] =
xs => filter(xs, pred)
def mapping(f: Int => Int): Array[Int] => Array[Int] =
xs => map(xs, f)
}
object UseCase {
val multiplyBy2 = ArrayUtils.mapping(_ * 2)
val filterDivBy3 = ArrayUtils.filtering(_ % 3 == 0)
val pipeline: Array[Int] => Array[Int] =
filterDivBy3 andThen multiplyBy2
}
| agconti/scala-school | 04-functions-as-values/slides/slide073.scala | Scala | mit | 534 |
/*
* Copyright 2017 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.common
import java.lang.StringBuilder
import java.math.BigInteger
import java.nio.ByteBuffer
import java.time.Instant
import java.util.{Arrays, Date}
import java.util.concurrent.ThreadLocalRandom
import java.util.concurrent.atomic.AtomicInteger
import scala.math.Ordering
import scala.util.Try
object UUID {
private[this] val UnsignedSixByteMin: Long = 0L
private[this] val UnsignedSixByteMax: Long = 0xffffffffffffL // We only use 6 bytes for the timestamp and random fields
private[this] val UnsignedShortMin: Int = 0
private[this] val UnsignedShortMax: Int = 65535 // Short.MaxValue - Short.MinValue
private[this] val SignedShortMax: Int = 32768
private[this] val counter = new AtomicInteger(ThreadLocalRandom.current().nextInt)
private def nextCounter(epochMilli: Long): Int = counter.getAndIncrement() & 0xffff
implicit object ordering extends Ordering[UUID] { def compare(a: UUID, b: UUID): Int = a.compare(b) }
/** 000000000000-0000-0000-000000000000 */
val Zero: UUID = UUID(0L, 0L)
/** ffffffffffff-ffff-ffff-ffffffffffff */
val UnsignedMaxValue: UUID = UUID(-1L, -1L)
/** 7fffffffffff-ffff-7fff-ffffffffffff */
val SignedMaxValue: UUID = UUID(Long.MaxValue, Long.MaxValue)
/** 800000000000-0000-8000-000000000000 */
val SignedMinValue: UUID = UUID(Long.MinValue, Long.MinValue)
/**
* Creates a completely random UUID
*/
def random(): UUID = {
val random: ThreadLocalRandom = ThreadLocalRandom.current()
UUID(random.nextLong(), random.nextLong())
}
/**
* Creates a new UUID based on the current time with a random node id
*/
def apply(): UUID = {
// No Node Id Specified so we use a random negative Short
makeWithNodeId(makeRandomNodeId())
}
/**
* Creates a new UUID based on the current time with the given node id
*/
def apply(nodeId: Int): UUID = {
if (nodeId < 0 || nodeId > 32767) throw new IllegalArgumentException("Invalid NodeId: '"+nodeId+"'. NodeId must be between 0 and 32767 (inclusive).")
makeWithNodeId(nodeId)
}
def apply(date: Date): UUID = forEpochMilli(date.getTime)
def apply(date: ImmutableDate): UUID = forEpochMilli(date.getTime)
def apply(instant: Instant): UUID = forEpochMilli(instant.toEpochMilli)
def forEpochMilli(epochMilli: Long): UUID = makeWithNodeIdAndEpochMilli(makeRandomNodeId(), epochMilli)
private def makeRandomNodeId(): Int = {
// Get a random int between 0 (inclusive) and 32768 (exclusive)
val randomInt: Int = ThreadLocalRandom.current().nextInt(SignedShortMax)
// Add one (we don't want zero as a value) and make it negative.
// This should give us a number between -32768 (inclusive) and -1 (inclusive)
// which corresponds to the range of all negative Short values
(randomInt + 1) * -1
}
private def makeWithNodeId(nodeId: Int): UUID = {
makeWithNodeIdAndEpochMilli(nodeId, System.currentTimeMillis())
}
private def makeWithNodeIdAndEpochMilli(nodeId: Int, epochMilli: Long): UUID = {
val counter: Int = nextCounter(epochMilli)
val random: Long = ThreadLocalRandom.current().nextLong(UnsignedSixByteMax + 1)
apply(epochMilli, counter, nodeId, random)
}
def apply(epochMilli: Long, counter: Int, nodeId: Int, random: Long): UUID = {
checkUnsignedSixByteRange("epochMilli", epochMilli)
checkUnsignedShortRange("counter", counter)
checkSignedShortRange("nodeId", nodeId)
checkUnsignedSixByteRange("random", random)
val timeAndCounter: Long = (epochMilli << 16) | (counter & 0xffffL)
val nodeIdAndRandom: Long = (nodeId.toLong << 48) | (random & 0xffffffffffffL)
UUID(timeAndCounter, nodeIdAndRandom)
}
def apply(bytes: ImmutableArray[Byte]): UUID = apply(bytes.toArray)
def apply(bytes: Array[Byte]): UUID = {
require(bytes.length == 16, "Not a UUID - Invalid Byte Array Length")
val buf: ByteBuffer = ByteBuffer.wrap(bytes)
UUID(buf.getLong, buf.getLong)
}
def apply(uuid: java.util.UUID): UUID = apply(uuid.getMostSignificantBits, uuid.getLeastSignificantBits)
def apply(uuid: BigInt): UUID = apply(uuid.bigInteger)
def apply(uuid: BigInteger): UUID = {
val bytes: Array[Byte] = uuid.toByteArray()
require(bytes.length <= 16, "Not a UUID - Invalid Byte Array Length")
// If we have less than 16 bytes then we need to extend the byte array to be 16 bytes
val newBytes: Array[Byte] = if (bytes.length < 16) {
val tmp: Array[Byte] = new Array(16)
// If the BigInteger is negative then we need to fill in -1s in our array otherwise we use the default 0s
if (uuid.isNegative) Arrays.fill(tmp, -1.toByte)
System.arraycopy(bytes, 0, tmp, 16 - bytes.length, bytes.length)
tmp
} else {
bytes
}
apply(newBytes)
}
def apply(uuid: String): UUID = {
// Note: If the UUID looks like Base58 then we go with that. In some cases this will conflict with Base64 encoded
// with no padding. Specifically when length in 22 and all characters look like valid Base58 we will treat
// it as Base58 and not as Base64. I'm tempted to remove any Base64 references in UUID to discourage use of
// it as a way to serialize the UUID.
if (mightBeBase58(uuid)) return apply(Base58.decode(uuid))
uuid.length match {
// Base 64: AVJHfgdafGqJBjASSLG0GQ==, AVJHfgdafGqJBjASSLG0GQ=, AVJHfgdafGqJBjASSLG0GQ
case 22 | 23 | 24 =>
apply(Base64.decode(uuid))
// Hex: 0152477e075a7c6a8906301248b1b419
case 32 =>
apply(Base16.decode(uuid))
// "Pretty" Hex: 0152477e075a-7c6a-8906-301248b1b419
case 35 =>
Seq(12, 17, 22).foreach{ idx: Int => require(!Character.isLetterOrDigit(uuid(idx)), s"Not a valid UUID: $uuid") }
val epochMillis: Long = java.lang.Long.parseLong(uuid.substring(0, 12), 16)
val counter: Int = Integer.parseInt(uuid.substring(13, 17), 16)
val nodeId: Int = Integer.parseInt(uuid.substring(18, 22), 16) << 16 >> 16 // Some shifting to restore the original sign
val random: Long = java.lang.Long.parseLong(uuid.substring(23, 35), 16)
apply(epochMillis, counter, nodeId, random)
// "Standard" formatted UUID: 0152477e-075a-7c6a-8906-301248b1b419
case 36 =>
Seq(8, 13, 18, 23).foreach{ idx: Int => require(!Character.isLetterOrDigit(uuid(idx)), s"Not a valid UUID: $uuid") }
val epochMillis: Long = java.lang.Long.parseLong(uuid.substring(0, 8)+uuid.substring(9, 13), 16)
val counter: Int = Integer.parseInt(uuid.substring(14, 18), 16)
val nodeId: Int = Integer.parseInt(uuid.substring(19, 23), 16) << 16 >> 16 // Some shifting to restore the original sign
val random: Long = java.lang.Long.parseLong(uuid.substring(24, 36), 16)
apply(epochMillis, counter, nodeId, random)
case _ => throw new IllegalArgumentException("Invalid UUID")
}
}
private def mightBeBase58(uuid: String): Boolean = !isNotBase58(uuid)
private def isNotBase58(uuid: String): Boolean = {
if (uuid.length < 11 || uuid.length > 22) return true
var i: Int = 0
while (i < uuid.length) {
val hasIllegalChar: Boolean = uuid.charAt(i) match {
case '0' | 'O' | 'I' | 'l' => true // Alpha Chars omitted from Base58
case '/' | '+' | '_' | '-' => true // Special Chars omitted from Base58
case '=' => true // Padding char (not used in Base 58)
case _ => false
}
if (hasIllegalChar) return true
i += 1
}
false
}
/**
* Can use this in an extractor:
* val Array(UUID.parse(first), UUID.parse(second)) = s.split(':')
*/
object parse {
def apply(uuid: String): Option[UUID] = get(uuid)
def unapply(uuid: String): Option[UUID] = get(uuid)
}
def get(uuid: String): Option[UUID] = Try{ apply(uuid) }.toOption
def get(bytes: ImmutableArray[Byte]): Option[UUID] = {
if (bytes.length == 16) Some(apply(bytes.toArray)) else None
}
def get(bytes: Array[Byte]): Option[UUID] = {
if (bytes.length == 16) Some(apply(bytes)) else None
}
def isValid(uuid: String): Boolean = get(uuid).isDefined
private def checkUnsignedSixByteRange(name: String, value: Long): Unit = {
if (value < UnsignedSixByteMin || value > UnsignedSixByteMax) throw new IllegalArgumentException(name+": '"+value+"' is outside of the valid range which should be between "+UnsignedSixByteMin+" and "+UnsignedSixByteMax)
}
private def checkUnsignedShortRange(name: String, value: Int): Unit = {
if (value < UnsignedShortMin || value > UnsignedShortMax) throw new IllegalArgumentException(name+": '"+value+"' is outside of the valid range which should be between "+UnsignedShortMin+" and "+UnsignedShortMax)
}
private def checkSignedShortRange(name: String, value: Int): Unit = {
if (value < Short.MinValue || value > Short.MaxValue) throw new IllegalArgumentException(name+": '"+value+"' is outside of the valid range which should be between "+Short.MinValue+" and "+Short.MaxValue)
}
/**
* A non-scientific super simple performance tester
*/
def main(args: Array[String]): Unit = Util.printAppStats{
val doPrettyString: Boolean = args.headOption.flatMap{ _.parseBoolean }.getOrElse{ false }
{
var i: Int = 0
var tmp: Int = 0
while (i < 1000000) {
val uuid: UUID = UUID()
tmp += uuid.counter
if (doPrettyString) tmp += uuid.toPrettyString().length
i += 1
}
println("Warming Complete: "+tmp)
}
import java.util.concurrent.CountDownLatch
val threads: Int = 8
val iterationsPerThread: Int = 10000000
val latch: CountDownLatch = new CountDownLatch(threads)
val runner: TaskRunner = TaskRunner("UUID Tester", threads = threads)
val millis: Long = Util.time{
(0 until threads).foreach{ i =>
runner.submit{
var sum: Int = 0
var i: Int = 0
while(i < iterationsPerThread) {
val uuid: UUID = UUID()
sum += uuid.counter
if (doPrettyString) sum += uuid.toPrettyString().length
i += 1
}
latch.countDown()
println(s"$i - Sum: $sum")
}
}
latch.await()
}
val totalUUIDs: Int = iterationsPerThread*threads
println(s"Total Time: ${millis}ms, total UUIDs Created: ${totalUUIDs}, per ms: ${totalUUIDs/millis}")
}
}
/**
* A custom UUID implementation (not to be confused with java.util.UUID or RFC4122 implementations)
* that allows natural sorting by timestamp based on the string or numeric representation.
*
* The UUID consists of 16 bytes (128 bits) broken up into 2 longs:
*
* timeAndCounter: {6-Byte Millis since epoch}{2-Byte Counter}
* nodeIdAndRandom: {2-Byte Node ID}{6-Byte Random Number}
*
* The "pretty" hex encoded representation is:
* {6-byte millis since epoch}-{2-byte-counter}-{2-byte-optional-node-id}-{4-byte-random}
*
* Example: 015247f01787-9740-85e0-3e9672a8dfa2
*/
final case class UUID(timeAndCounter: Long, nodeIdAndRandom: Long) extends Ordered[UUID] {
/** Between 0 and 281474976710655 (both inclusive) which is a 6-byte unsigned int */
def epochMilli: Long = timeAndCounter >>> 16
/** Between 0 and 65535 (both inclusive) */
def counter: Int = (timeAndCounter & 0xffffL).toInt
/** Between Short.MinValue (-32768) and Short.MaxValue (32767) (both inclusive) */
def nodeId: Int = (nodeIdAndRandom >> 48).toInt
/** Between 0 and 281474976710655 (both inclusive) which is a 6-byte unsigned int */
def random: Long = nodeIdAndRandom & 0xffffffffffffL
/** The java.time.Instant represented by the epochMilli */
def instant: Instant = Instant.ofEpochMilli(epochMilli)
/** The fm.common.ImmutableDate represented by the epochMilli */
def date: ImmutableDate = new ImmutableDate(epochMilli)
/** The java.util.Date represented by the epochMilli */
def javaDate: Date = new Date(epochMilli)
/** Is this UUID using a random node id? */
def isRandomNodeId: Boolean = nodeId < 0
def toImmutableByteArray(): ImmutableArray[Byte] = {
ImmutableArray.wrap(toByteArray())
}
def toByteArray(): Array[Byte] = {
val buf: ByteBuffer = ByteBuffer.allocate(16)
buf.putLong(timeAndCounter)
buf.putLong(nodeIdAndRandom)
buf.array()
}
def toBigInteger: BigInteger = new BigInteger(toByteArray)
def toBigInt: BigInt = new BigInt(toBigInteger)
def toHex(): String = Base16.encode(toByteArray)
def toBase16(): String = Base16.encode(toByteArray)
def toBase58(): String = Base58.encode(toByteArray)
def toBase64(): String = Base64Strict.encode(toByteArray)
def toBase64NoPadding(): String = Base64Strict.encodeNoPadding(toByteArray)
def toBase64URL(): String = Base64URL.encode(toByteArray)
def toBase64URLNoPadding(): String = Base64URL.encodeNoPadding(toByteArray)
/** {6-byte millis since epoch}-{2-byte-counter}-{2-byte-optional-node-id}-{4-byte-random} */
def toPrettyString(): String = toPrettyString('-')
/** {6-byte millis since epoch}{sep}{2-byte-counter}{sep}{2-byte-optional-node-id}{sep}{4-byte-random} */
def toPrettyString(sep: Char): String = {
val bytes: Array[Byte] = toByteArray()
val sb: StringBuilder = new StringBuilder(35)
sb.append(Base16.encode(bytes, 0, 6))
sb.append(sep)
sb.append(Base16.encode(bytes, 6, 2))
sb.append(sep)
sb.append(Base16.encode(bytes, 8, 2))
sb.append(sep)
sb.append(Base16.encode(bytes, 10, 6))
sb.toString()
}
/** {upper 4-bytes of 6-byte millis since epoch}-{lower 2-bytes of 6-byte millis since epoch}-{2-byte-counter}-{2-byte-optional-node-id}-{4-byte-random} */
def toStandardString(): String = toStandardString('-')
/** {upper 4-bytes of 6-byte millis since epoch}{sep}{lower 2-bytes of 6-byte millis since epoch}{sep}{2-byte-counter}{sep}{2-byte-optional-node-id}{sep}{4-byte-random} */
def toStandardString(sep: Char): String = {
val bytes: Array[Byte] = toByteArray()
val sb: StringBuilder = new StringBuilder(36)
sb.append(Base16.encode(bytes, 0, 4))
sb.append(sep)
sb.append(Base16.encode(bytes, 4, 2))
sb.append(sep)
sb.append(Base16.encode(bytes, 6, 2))
sb.append(sep)
sb.append(Base16.encode(bytes, 8, 2))
sb.append(sep)
sb.append(Base16.encode(bytes, 10, 6))
sb.toString()
}
override def toString(): String = toPrettyString()
def compare(that: UUID): Int = {
val res: Int = java.lang.Long.compare(this.timeAndCounter, that.timeAndCounter)
if (res == 0) java.lang.Long.compare(this.nodeIdAndRandom, that.nodeIdAndRandom) else res
}
def toJavaUUID: java.util.UUID = new java.util.UUID(timeAndCounter, nodeIdAndRandom)
def isZero: Boolean = timeAndCounter === 0L && nodeIdAndRandom === 0L
def isUnsignedMaxValue: Boolean = timeAndCounter === -1L && nodeIdAndRandom === -1L
def isSignedMinValue: Boolean = timeAndCounter === Long.MinValue && nodeIdAndRandom === Long.MinValue
def isSignedMaxValue: Boolean = timeAndCounter === Long.MaxValue && nodeIdAndRandom === Long.MaxValue
} | frugalmechanic/fm-common | jvm/src/main/scala/fm/common/UUID.scala | Scala | apache-2.0 | 15,914 |
/*
* Copyright (c) 2014 Dufresne Management Consulting LLC.
*/
package com.nickelsoftware.bettercare4me.hedis.hedis2014
import scala.util.Random
import org.joda.time.DateTime
import org.joda.time.Interval
import com.nickelsoftware.bettercare4me.hedis.HEDISRule
import com.nickelsoftware.bettercare4me.hedis.Scorecard
import com.nickelsoftware.bettercare4me.models.Claim
import com.nickelsoftware.bettercare4me.models.MedClaim
import com.nickelsoftware.bettercare4me.models.Patient
import com.nickelsoftware.bettercare4me.models.PatientHistory
import com.nickelsoftware.bettercare4me.models.PersistenceLayer
import com.nickelsoftware.bettercare4me.models.Provider
import com.nickelsoftware.bettercare4me.models.RuleConfig
import com.nickelsoftware.bettercare4me.utils.Utils
object CDCHbA1cTest {
val name = "CDC-HbA1c-Test-HEDIS-2014"
val hasHbA1cTest = "HbA1c Test"
/**
* CPT codes for HbA1c Test
*/
val cptA = List("83036", "83037", "3044F", "3045F", "3046F", "3047F")
val cptAS = cptA.toSet
}
/**
* Diabetes HbA1c Test
*
* Diabetes HbA1c Test indicates whether a patient with type 1 or type 2 diabetes, aged 18 to 75 years, had a hemoglobin A1c test
* performed. This excludes patients with a previous diagnosis of polycystic ovaries, gestational diabetes, or steroid-induced
* diabetes.
*
* NUMERATOR:
* Identifies patients with type 1 or type 2 diabetes, aged 18 to 75 years, who had an HbA1c test done.
*
*/
class CDCHbA1cTestRule(config: RuleConfig, hedisDate: DateTime) extends CDCRuleBase(config, hedisDate) {
val name = CDCHbA1cTest.name
val fullName = "Diabetes HbA1c Test"
val description = "Diabetes HbA1c Test indicates whether a patient with type 1 or type 2 diabetes, aged 18 to 75 years, had a hemoglobin A1c test" +
"performed. This excludes patients with a previous diagnosis of polycystic ovaries, gestational diabetes, or steroid-induced" +
"diabetes."
import CDCHbA1cTest._
override def generateMeetMeasureClaims(pl: PersistenceLayer, patient: Patient, provider: Provider): List[Claim] = {
val days = Utils.daysBetween(hedisDate.minusYears(1), hedisDate)
val dos = hedisDate.minusDays(Random.nextInt(days))
// At least one HbA1c test (during the measurement year)
List(pl.createMedClaim(patient.patientID, patient.firstName, patient.lastName, provider.providerID, provider.firstName, provider.lastName, dos, dos, cpt = pickOne(cptA)))
}
override def scorePatientMeetMeasure(scorecard: Scorecard, patient: Patient, ph: PatientHistory): Scorecard = {
val measurementInterval = getIntervalFromYears(1)
// Check if patient had at least one HbA1c test (during the measurement year)
val claims = filterClaims(ph.cpt, cptAS, { claim: MedClaim => measurementInterval.contains(claim.dos) })
scorecard.addScore(name, fullName, HEDISRule.meetMeasure, hasHbA1cTest, claims)
}
}
| reactivecore01/bettercare4.me | play/app/com/nickelsoftware/bettercare4me/hedis/hedis2014/CDC_HbA1c_TestRule.scala | Scala | apache-2.0 | 2,886 |
package io.bimble.pqgram
import org.scalacheck.Prop
import org.scalatest.PropSpec
import org.scalatest.prop.Checkers
class BoundingLaws extends PropSpec with Checkers {
implicit object BoundedString extends Bounded[String] {
override val infimum: String = "__INFIMUM"
override val supremum: String = "__SUPREMUM"
override def compareInner(x: String, y: String): Int = x compare y
}
property("infimum < *") {
check { (s: String) =>
Prop(implicitly[Bounded[String]].compare(implicitly[Bounded[String]].infimum, s) < 0)
}
}
property("* > infimum") {
check { (s : String) =>
Prop(implicitly[Bounded[String]].compare(s, implicitly[Bounded[String]].infimum) > 0)
}
}
property("supremum > *") {
check { (s: String) =>
Prop(implicitly[Bounded[String]].compare(implicitly[Bounded[String]].supremum, s) > 0)
}
}
property("* < supremum") {
check { (s: String) =>
Prop(implicitly[Bounded[String]].compare(s, implicitly[Bounded[String]].supremum) < 0)
}
}
property("* <=> *") {
check { (s1: String, s2: String) =>
Prop.classify(s1 == s2, "==") { implicitly[Bounded[String]].compare(s1, s2) == 0 } ||
Prop.classify(s1 < s2, "<") { implicitly[Bounded[String]].compare(s1, s2) < 0 } ||
Prop.classify(s1 > s2, ">") { implicitly[Bounded[String]].compare(s1, s2) > 0 }
}
}
} | hythloday/pqgram | src/test/scala/io/bimble/pqgram/BoundingLaws.scala | Scala | apache-2.0 | 1,386 |
package com.sksamuel.scapegoat.inspections.unnecessary
import com.sksamuel.scapegoat.inspections.unneccesary.UnusedMethodParameter
import com.sksamuel.scapegoat.{InspectionTest, Warning}
/** @author Stephen Samuel */
class UnusedMethodParameterTest extends InspectionTest {
override val inspections = Seq(new UnusedMethodParameter)
"UnusedMethodParameter" - {
"should report warning" - {
"for unused parameters in concrete methods" in {
val code = """class Test {
val initstuff = "sammy"
def foo(a:String, b:Int, c:Int) {
println(b)
foo(a,b,b)
}
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 1
compiler.scapegoat.feedback.warns.size shouldBe 1
}
}
"should ignore @SuppressWarnings" in {
val code = """class Test {
@SuppressWarnings(Array("all"))
def foo(a:String, b:Int, c:Int) {
println(b)
foo(a,b,b)
}
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"should not report warning" - {
"for main method" in {
val code = """class Test {
def main(args: Array[String]) : Unit = {}
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for abstract methods" in {
val code = """abstract class Test {
def foo(name:String) : String
} """.stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for abstract constructor" in {
val code = """abstract class EventBusMessage(messageVersion: Int)"""
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for methods not returning" in {
val code = """class Test {
| def foo(name:String) = throw new RuntimeException
|}""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for methods not returning when their return type is specified" in {
val code = """class Test {
| def foo(name:String): String = throw new RuntimeException
|}""".stripMargin
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for overridden method" in {
val code = """package com.sam
trait Foo {
def foo(name:String):String
}
object Fool extends Foo {
override def foo(name:String) : String = "sam"
} """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for overridden method without override keyword" in {
val code = """package com.sam
trait Foo {
def foo(name:String):String
}
object Fool extends Foo {
def foo(name:String) : String = "sam"
} """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for implemented method" in {
val code = """package com.sam
trait Foo {
def foo(name:String): String
}
case class Fool() extends Foo {
def foo(name:String): String = "sam"
} """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
"for js.native defined method" in {
val code = """package scala.scalajs {
object js {
def native: Nothing = ???
}
}
package com.sam {
import scalajs.js
class Foo {
def foo(name: String): String = js.native
}
} """
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
"should handle constructor params" - {
"ignore unused case class primary param" in {
assertNoWarnings("""case class Foo(x: Int)""")
}
"warn on unused case class secondary params" in {
val code = """case class Foo(x: Int)(y: Int)"""
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings match {
case Seq(warning: Warning) =>
warning.snippet.get should include("y")
}
}
"not warn on case class secondary params used as fields" in {
assertNoWarnings("""case class Foo(x: Int)(y: Int) {
| def example: String = {
| s"x = $x, y = $y"
| }
|}
""".stripMargin)
}
"not warn on case class secondary params used as params" in {
assertNoWarnings("""case class Foo(x: Int)(y: Int) {
| println(s"x = $x, y = $y")
|
| def example: String = "irrelevant"
|}
""".stripMargin)
}
"warn on unused non-case class primary params" in {
val code = """class Foo(x: Int)"""
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings match {
case Seq(warning: Warning) =>
warning.snippet.get should include("x")
}
}
"not warn on non-case class primary params used as fields" in {
assertNoWarnings("""class Foo(x: Int) {
| def example: String = {
| s"x = $x"
| }
|}
""".stripMargin)
}
"not warn on non-case class primary params used as params" in {
assertNoWarnings("""class Foo(x: Int) {
| println(s"x = $x")
|
| def example: String = "irrelevant"
|}
""".stripMargin)
}
"not warn on non-case class primary params marked val" in {
assertNoWarnings("""class Foo(val x: Int)""")
}
}
}
private def assertNoWarnings(code: String) = {
compileCodeSnippet(code)
compiler.scapegoat.feedback.warnings.size shouldBe 0
}
}
| sksamuel/scalac-scapegoat-plugin | src/test/scala/com/sksamuel/scapegoat/inspections/unnecessary/UnusedMethodParameterTest.scala | Scala | apache-2.0 | 6,989 |
package org.nisshiee.towerdefense
import org.nisshiee.towerdefensescala._
class RichPoint(val underlying: Point) extends AnyVal {
def +(p: Point) =
Point(underlying.x + p.x, underlying.y + p.y)
def +(p: (Int, Int)) = p match {
case (x, y) => Point(underlying.x + x, underlying.y + y)
}
def neighbors = List (
underlying + (-1, 0)
,underlying + (0, -1)
,underlying + (0, 1)
,underlying + (1, 0)
)
def <>(p: Point) =
(underlying.x - p.x) * (underlying.x - p.x) + (underlying.y - p.y) * (underlying.y - p.y)
}
| nisshiee/towerdefense-nisshiee | player/src/main/scala/util/RichPoint.scala | Scala | mit | 554 |
package suiryc.scala.akka
import akka.actor.{Actor, ActorRef, Terminated}
import com.typesafe.scalalogging.StrictLogging
import scala.collection.mutable.ArrayBuffer
/**
* Reaper companion object.
*/
object Reaper {
/** Actor message: register an Actor for watching. */
case class WatchMe(ref: ActorRef)
}
/**
* Actors system reaper.
*
* The reaper watch over a list of registered actors, and call `allSoulsReaped`
* once all actors terminated.
*
* @see [[http://letitcrash.com/post/30165507578/shutdown-patterns-in-akka-2]]
*/
abstract class Reaper
extends Actor
with StrictLogging
{
import Reaper._
/** Watched actors. */
protected val watched: ArrayBuffer[ActorRef] = ArrayBuffer.empty[ActorRef]
/**
* Subclasses need to implement this method. It's the hook that's called when
* everything's dead.
*/
protected def allSoulsReaped(): Unit
/** Watch and check for termination. */
final override def receive: Receive = {
case WatchMe(ref) =>
logger.trace(s"Watching $ref")
context.watch(ref)
watched += ref
()
case Terminated(ref) =>
logger.trace(s"$ref terminated")
watched -= ref
if (watched.isEmpty) {
logger.debug("All souls reaped")
allSoulsReaped()
}
}
}
/** Simple reaper that shutdowns the system once finished. */
class ShutdownReaper extends Reaper {
/** Shutdown */
override protected def allSoulsReaped(): Unit = {
context.system.terminate()
()
}
}
| suiryc/suiryc-scala | core/src/main/scala/suiryc/scala/akka/Reaper.scala | Scala | gpl-3.0 | 1,501 |
/**
* Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0
* See accompanying LICENSE file.
*/
package kafka
import java.nio.charset.StandardCharsets
import kafka.manager.features.{ClusterFeatures, ClusterFeature}
/**
* @author hiral
*/
package object manager {
def nodeFromPath(s: String) : String = {
val l = s.lastIndexOf("/")
s.substring(l+1)
}
def asString(ba: Array[Byte]) : String = {
new String(ba, StandardCharsets.UTF_8)
}
def asByteArray(str: String) : Array[Byte] = {
str.getBytes(StandardCharsets.UTF_8)
}
}
| xuwei-k/kafka-manager | app/kafka/manager/package.scala | Scala | apache-2.0 | 582 |
package gov.nasa.jpl.analytics.tools.stats
import gov.nasa.jpl.analytics.base.{CliTool, Loggable}
import gov.nasa.jpl.analytics.nutch.SegmentReader
import gov.nasa.jpl.analytics.util.CommonUtil
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.nutch.protocol.Content
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
import org.kohsuke.args4j.Option
/**
* Created by karanjeetsingh on 10/13/16.
*/
class DDStats extends CliTool {
@Option(name = "-m", aliases = Array("--master"))
var sparkMaster: String = "local[*]"
@Option(name = "-s", aliases = Array("--segmentDir"))
var segmentDir: String = ""
@Option(name = "-f", aliases = Array("--segmentFile"))
var segmentFile: String = ""
var sc: SparkContext = _
def init(): Unit = {
val conf = new SparkConf()
conf.setAppName("DDStats")
.setMaster(sparkMaster)
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.kryo.classesToRegister", "org.apache.nutch.protocol.Content")
.set("spark.kryoserializer.buffer.max", "2040m")
sc = new SparkContext(conf)
}
override def run(): Unit = {
// Initialize SparkContext
init()
// Generate a list of segment parts
var parts: List[Path] = List()
val config: Configuration = sc.hadoopConfiguration
if (!segmentDir.isEmpty) {
parts = SegmentReader.listFromDir(segmentDir, config)
} else if (!segmentFile.isEmpty) {
parts = SegmentReader.listFromFile(segmentFile)
} else {
println("Please provide Segment Path")
System.exit(1)
}
// Converting all Segment parts to RDDs
var docs: Array[String] = Array()
var rdds: Seq[RDD[Tuple2[String, Content]]] = Seq()
for (part <- parts) {
rdds :+= sc.sequenceFile[String, Content](part.toString)
}
println("Number of Segments to process: " + rdds.length)
// Union of all RDDs
val segRDD:RDD[Tuple2[String, Content]] = sc.union(rdds)
segRDD.saveAsSequenceFile("allSegments")
// Filtering & Operations
val filteredRDD = segRDD.filter({case(text, content) => SegmentReader.filterUrl(content)})
val urlRDD = filteredRDD.map({case(url, content) => Some(url).get.toString}).distinct()
val hostRDD = urlRDD.map(url => CommonUtil.getHost(url)).distinct().collect()
urlRDD.map(url => (CommonUtil.getHost(url), 1))
.reduceByKey(_ + _, 1)
.map(item => item.swap)
.sortByKey(false, 1)
.map(item => item.swap)
.saveAsTextFile("host-url")
println("Number of Hosts: " + hostRDD.length)
println("Number of Web Pages: " + urlRDD.distinct().collect().length)
sc.stop()
}
}
object DDStats extends Loggable with Serializable {
def main(args: Array[String]) {
new DDStats().run(args)
}
} | USCDataScience/nutch-analytics | src/main/scala/gov/nasa/jpl/analytics/tools/stats/DDStats.scala | Scala | apache-2.0 | 2,847 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive
import monix.eval.{Task, TaskLike}
import monix.execution.cancelables.AssignableCancelable
import monix.execution.{Cancelable, Callback, Scheduler}
import monix.reactive.internal.consumers._
import monix.reactive.observers.Subscriber
/** The `Consumer` is a specification of how to consume an observable.
*
* It is a factory of subscribers with a completion callback attached,
* being effectively a way to transform observables into
* [[monix.eval.Task tasks]] for less error prone consuming of streams.
*/
abstract class Consumer[-In, +R] extends ((Observable[In]) => Task[R])
with Serializable { self =>
/** Builds a new [[monix.reactive.observers.Subscriber Subscriber]]
* that can be subscribed to an [[Observable]] for consuming a stream,
* with a callback that should eventually get called with a materialized
* result.
*
* Notes:
*
* - calling the callback must obey the contract for the
* [[monix.execution.Callback Callback]] type
* - the given callback should always get called, unless the
* upstream gets canceled
* - the given callback can be called when the subscriber is
* finished processing, but not necessarily
* - if the given callback isn't called after the subscriber is
* done processing, then the `Task` returned by [[apply]]
* loses the ability to cancel the stream, as that `Task` will
* complete before the stream is finished
*
* @param cb is the [[monix.execution.Callback Callback]] that will get
* called once the created subscriber is finished.
* @param s is the [[monix.execution.Scheduler Scheduler]] that will
* get used for subscribing to the source observable and to
* process the events.
*
* @return a new subscriber that can be used to consume observables.
*/
def createSubscriber(cb: Callback[Throwable, R], s: Scheduler): (Subscriber[In], AssignableCancelable)
/** Given a source [[Observable]], convert it into a [[monix.eval.Task Task]]
* by piggybacking on [[createSubscriber]].
*/
final def apply(source: Observable[In]): Task[R] =
Task.create { (scheduler, cb) =>
val (out, consumerSubscription) = createSubscriber(cb, scheduler)
// Start consuming the stream
val sourceSubscription = source.subscribe(out)
// Assign the observable subscription to our assignable,
// thus the subscriber can cancel its subscription
consumerSubscription := sourceSubscription
// We might not return the assignable returned by `createSubscriber`
// because it might be a dummy
if (consumerSubscription.isInstanceOf[Cancelable.IsDummy])
sourceSubscription
else
consumerSubscription
}
/** Given a contravariant mapping function, transform
* the source consumer by transforming the input.
*/
final def contramap[In2](f: In2 => In): Consumer[In2, R] =
new ContraMapConsumer[In2,In,R](self, f)
/** Given a function that transforms the input stream, uses it
* to transform the source consumer into one that accepts events
* of the type specified by the transformation function.
*/
final def transformInput[In2](f: Observable[In2] => Observable[In]): Consumer[In2, R] =
new TransformInputConsumer[In2,In,R](self, f)
/** Given a mapping function, when consuming a stream,
* applies the mapping function to the final result,
* thus modifying the output of the source consumer.
*
* Note that for applying the mapping function an
* asynchronous boundary is forced, otherwise it could
* trigger a stack overflow exception. For more efficient
* mapping of the result, it's probably better to `map`
* the resulting `Task` on [[Observable.consumeWith]].
*
* @see [[mapTask]] for a variant that can map the output
* to a `Task` that can be processed asynchronously.
*/
final def map[R2](f: R => R2): Consumer[In, R2] =
new MapConsumer[In,R,R2](self, f)
/** Given a mapping function, when consuming a stream,
* applies the mapping function to the final result,
* thus modifying the output of the source consumer.
*
* The mapping function returns results using a generic `F[_]`
* data type that must implement the `cats.effect.Effect` type
* class. Examples of such classes are `cats.effect.IO` and
* [[monix.eval.Task]], thus being able to do asynchronous
* processing.
*
* See [[mapTask]] for the version that's specialized on `Task`.
*/
final def mapEval[F[_], R2](f: R => F[R2])(implicit F: TaskLike[F]): Consumer[In, R2] =
new MapTaskConsumer[In,R,R2](self, r => F.toTask(f(r)))
/** Given a mapping function, when consuming a stream,
* applies the mapping function to the final result,
* thus modifying the output of the source consumer.
*
* The mapping function returns a [[monix.eval.Task Task]]
* that can be used to process results asynchronously.
*
* Note that for applying the mapping function an
* asynchronous boundary is forced, otherwise it could
* trigger a stack overflow exception. For more efficient
* mapping of the result, it's probably better to `map`
* the resulting `Task` on [[Observable.consumeWith]].
*
* See [[mapEval]] for the version that can work with any
* data type that implements `cats.effect.Effect`.
*/
final def mapTask[R2](f: R => Task[R2]): Consumer[In, R2] =
new MapTaskConsumer[In,R,R2](self, f)
}
/** The companion object of [[Consumer]], defines consumer builders.
*
* @define loadBalanceDesc Creates a consumer that, when consuming
* the stream, will start multiple subscribers corresponding
* and distribute the load between them.
*
* Once each subscriber emits a final result, this consumer will
* return a list of aggregated results.
*
* Has the following rules:
*
* - items are pushed on free subscribers, respecting their
* contract, each item being pushed to the first available
* subscriber in the queue
* - in case no free subscribers are available, then the
* source gets back-pressured until free subscribers are
* available
* - in case of `onComplete` or `onError`, all subscribers
* that are still active will receive the event
* - the `onSuccess` callback of individual subscribers is
* aggregated in a list buffer and once the aggregate contains
* results from all subscribers, the load-balancing consumer
* will emit the aggregate
* - the `onError` callback triggered by individual subscribers will
* signal that error upstream and cancel the streaming for
* every other subscriber
* - in case any of the subscribers cancels its subscription
* (either returning `Stop` in `onNext` or canceling its assigned
* cancelable), it gets excluded from the pool of active
* subscribers, but the other active subscribers will still
* receive notifications
* - if all subscribers canceled (either by returning `Stop`
* or by canceling their assignable cancelable reference),
* then streaming stops as well
*
* In other words the `Task`, created by applying this consumer to
* an observable, will complete once all the subscribers emit a result
* or as soon as an error happens.
*
* @define loadBalanceReturn a list of aggregated results that
* were computed by all of the subscribers as their result
*/
object Consumer {
/** Creates a [[Consumer]] out of the given function.
*
* The function returns an [[Observer]] and takes as input:
*
* - a [[monix.execution.Scheduler Scheduler]] for any asynchronous
* execution needs the returned observer might have
* - a [[monix.execution.Cancelable Cancelable]] that can be used for
* concurrently canceling the stream (in addition to being able to
* return `Stop` from `onNext`)
* - a [[monix.execution.Callback Callback]] that must be called
* to signal the final result, after the observer finished
* processing the stream, or an error if the processing finished
* in error
*
* @param f is the input function with an injected `Scheduler`,
* `Cancelable`, `Callback` and that returns an `Observer`
*/
def create[In,Out](f: (Scheduler, Cancelable, Callback[Throwable, Out]) => Observer[In]): Consumer[In,Out] =
new CreateConsumer[In,Out](f)
/** Given a function taking a `Scheduler` and returning an [[Observer]],
* builds a consumer from it.
*
* You can use the `Scheduler` as the execution context, for working
* with `Future`, for forcing asynchronous boundaries or for executing
* tasks with a delay.
*/
def fromObserver[In](f: Scheduler => Observer[In]): Consumer[In, Unit] =
new FromObserverConsumer[In](f)
/** A consumer that immediately cancels its upstream after subscription. */
def cancel[A]: Consumer.Sync[A, Unit] =
CancelledConsumer
/** A consumer that triggers an error and immediately cancels its
* upstream after subscription.
*/
def raiseError[In, R](ex: Throwable): Consumer.Sync[In,R] =
new RaiseErrorConsumer(ex)
/** Given a fold function and an initial state value, applies the
* fold function to every element of the stream and finally signaling
* the accumulated value.
*
* @param initial is a lazy value that will be fed at first
* in the fold function as the initial state.
* @param f is the function that calculates a new state on each
* emitted value by the stream, for accumulating state
*/
def foldLeft[S,A](initial: => S)(f: (S,A) => S): Consumer.Sync[A,S] =
new FoldLeftConsumer[A,S](initial _, f)
/** Given a fold function and an initial state value, applies the
* fold function to every element of the stream and finally signaling
* the accumulated value.
*
* The given fold function returns an `F[A]` value, where `F` is
* any data type that implements `cats.effect.Effect` (e.g. `Task`,
* `Coeval`), thus able to do asynchronous processing, with
* ordering of calls being guaranteed.
*
* @param initial is a lazy value that will be fed at first
* in the fold function as the initial state.
*
* @param f is the function that calculates a new state on each
* emitted value by the stream, for accumulating state,
* returning a `F[A]` capable of lazy or asynchronous
* execution.
*/
def foldLeftEval[F[_], S, A](initial: => S)(f: (S, A) => F[S])(implicit F: TaskLike[F]): Consumer[A, S] =
new FoldLeftTaskConsumer[A,S](initial _, (s, a) => F.toTask(f(s, a)))
/** Given a fold function and an initial state value, applies the
* fold function to every element of the stream and finally signaling
* the accumulated value.
*
* The given fold function returns a `Task` that can execute an
* asynchronous operation, with ordering of calls being guaranteed.
*
* @param initial is a lazy value that will be fed at first
* in the fold function as the initial state.
* @param f is the function that calculates a new state on each
* emitted value by the stream, for accumulating state,
* returning a `Task` capable of asynchronous execution.
*/
def foldLeftTask[S,A](initial: => S)(f: (S,A) => Task[S]): Consumer[A,S] =
new FoldLeftTaskConsumer[A,S](initial _, f)
/** A consumer that will produce the first streamed value on
* `onNext` after which the streaming gets cancelled.
*
* In case the stream is empty and so no `onNext` happen before
* `onComplete`, then the a `NoSuchElementException` will get
* triggered.
*/
def head[A]: Consumer.Sync[A, A] =
new HeadConsumer[A]
/** A consumer that will produce the first streamed value on
* `onNext` after which the streaming gets cancelled.
*
* In case the stream is empty and so no `onNext` happen before
* `onComplete`, then the a `NoSuchElementException` will get
* triggered.
*/
def headOption[A]: Consumer.Sync[A, Option[A]] =
new HeadOptionConsumer[A]
/** A consumer that will produce a [[Notification]] of the first value
* received (`onNext`, `onComplete` or `onError`), after which the
* streaming gets cancelled.
*
* - [[Notification.OnNext OnNext]] will be signaled on the first `onNext`
* event if it happens and the streaming will be stopped by `Stop`.
* - [[Notification.OnComplete OnComplete]] will be signaled if the stream
* was empty and thus completed without any `onNext`.
* - [[Notification.OnError OnError]] will be signaled if the stream
* was completed in error before the first `onNext` happened.
*/
def firstNotification[A]: Consumer.Sync[A, Notification[A]] =
new FirstNotificationConsumer[A]
/** A simple consumer that consumes all elements of the
* stream and then signals its completion.
*/
def complete[A]: Consumer.Sync[A, Unit] =
CompleteConsumer
/** Builds a consumer that will consume the stream, applying the given
* function to each element and then finally signaling its completion.
*
* @param cb is the function that will be called for each element
*/
def foreach[A](cb: A => Unit): Consumer.Sync[A, Unit] =
new ForeachConsumer[A](cb)
/** Builds a consumer that will consume the stream, applying the given
* function to each element and then finally signaling its completion.
*
* The given callback function returns a `F[A]` value that can
* execute an asynchronous operation, with ordering of calls being
* guaranteed, given that the `F[_]` data type is any type that
* implements `cats.effect.Effect` (e.g. `Task`, `IO`).
*
* @param cb is the function that will be called for each element
*/
def foreachEval[F[_], A](cb: A => F[Unit])(implicit F: TaskLike[F]): Consumer[A, Unit] =
foreachTask(a => F.toTask(cb(a)))
/** Builds a consumer that will consume the stream, applying the given
* function to each element and then finally signaling its completion.
*
* The given callback function returns a `Task` that can execute an
* asynchronous operation, with ordering of calls being guaranteed.
*
* @param cb is the function that will be called for each element
*/
def foreachTask[A](cb: A => Task[Unit]): Consumer[A, Unit] =
new ForeachAsyncConsumer[A](cb)
/** Builds a consumer that will consume the stream, applying the given
* function to each element, in parallel, then finally signaling its
* completion.
*
* @param parallelism is the maximum number of (logical) threads to use
* @param cb is the function that will be called for each element
*/
def foreachParallel[A](parallelism: Int)(cb: A => Unit): Consumer[A, Unit] =
loadBalance(parallelism, foreach(cb)).map(_ => ())
/** Builds a consumer that will consume the stream, applying the given
* function to each element, in parallel, then finally signaling its
* completion.
*
* The given callback function returns a `Task` that can execute an
* asynchronous operation, with ordering of calls being guaranteed
* per subscriber.
*
* @param parallelism is the maximum number of (logical) threads to use
* @param cb is the function that will be called for each element
*/
def foreachParallelTask[A](parallelism: Int)(cb: A => Task[Unit]): Consumer[A, Unit] =
loadBalance(parallelism, foreachTask(cb)).map(_ => ())
/** $loadBalanceDesc
*
* @param parallelism is the number of subscribers that will get
* initialized to process incoming events in parallel.
* @param consumer is the subscriber factory that will initialize
* all needed subscribers, in number equal to the specified
* parallelism and thus that will be fed in parallel
*
* @return $loadBalanceReturn
*/
def loadBalance[A,R](parallelism: Int, consumer: Consumer[A,R]): Consumer[A, List[R]] =
new LoadBalanceConsumer[A,R](parallelism, Array(consumer))
/** $loadBalanceDesc
*
* @param consumers is a list of consumers that will initialize
* the subscribers that will process events in parallel,
* with the parallelism factor being equal to the number
* of consumers specified in this list.
*
* @return $loadBalanceReturn
*/
def loadBalance[A,R](consumers: Consumer[A,R]*): Consumer[A, List[R]] =
new LoadBalanceConsumer[A,R](consumers.length, consumers.toArray)
/** Defines a synchronous [[Consumer]] that builds
* [[monix.reactive.observers.Subscriber.Sync synchronous subscribers]].
*/
trait Sync[-In, +R] extends Consumer[In, R] {
override def createSubscriber(cb: Callback[Throwable, R], s: Scheduler): (Subscriber.Sync[In], AssignableCancelable)
}
}
| ddworak/monix | monix-reactive/shared/src/main/scala/monix/reactive/Consumer.scala | Scala | apache-2.0 | 17,921 |
package net.paploo.orbital.rocket
import net.paploo.orbital.planetarysystem.Planetoid
import net.paploo.orbital.phys.{ PhysVec, State, OrbitalParameters }
import net.paploo.orbital.phys.PhysVec.{ SphericalVec, VecDouble }
import blackbox.BlackBox
import blackbox.{ RocketAnalyzer, StagedRocketAnalyzer }
import net.paploo.orbital.phys.Steppable
object Rocket {
/** Rocket ISP is calculated using this value for the surface gravity. */
val ispSurfaceGravity = 9.8072
/**
* While true drag is a weighted average, ships without parachutes deployed
* are typically very nearly 0.2 as of 0.21.
*/
val standardDragCoefficient = 0.2
/** As of 0.21, the cross-sectional area isn't calculated and instead set to 1. */
val crossSectionalArea = 1.0
}
/**
* The Rocket trait.
*
* All rockets are considered immutable, so most values are declared as val
* or lazy val.
*
* Rockets are Steppables with Orbital Parameters. Be sure to check these out.
*
* When subclassing, it is important to override the step method from Steppable,
* as well as provide for state, mass, attitude, thrust, and mass flow.
*
*/
trait Rocket[+T <: Rocket[T]] extends Steppable[T] with OrbitalParameters {
this: T =>
val state: State
val mass: Double
val massFlow: Double
val attitude: PhysVec
val thrust: Double
def isFuelStarved: Boolean
lazy val isOnCrashCourse: Boolean = periapsis <= planetoid.radius
lazy val isInStableOrbit: Boolean =
isFuelStarved && planetoid.isAboveAtmosphere(pos) && !isOnCrashCourse
def deltaV: Double
lazy val force: PhysVec = gravForce + dragForce + thrustForce
lazy val acceleration: PhysVec = force / mass
lazy val gravForce: PhysVec =
-((planetoid.mu * mass) / (pos.sq)) * pos.unit
lazy val dragForce: PhysVec = {
val relVel = vel - planetoid.linearVelocity(pos)
val magnitude = 0.5 * planetoid.density(pos) * relVel.sq * mass * Rocket.standardDragCoefficient * Rocket.crossSectionalArea
-magnitude * relVel.unit
}
lazy val thrustForce: PhysVec = SphericalVec(thrust, attitude.phi, attitude.th)
def blackBox: BlackBox[T]
override def toString = s"${getClass.getSimpleName}($state)"
}
/** An concrete unpowered Rocket superclass. */
class UnpoweredRocket(override val state: State,
override val mass: Double,
override val blackBox: BlackBox[UnpoweredRocket])
extends Rocket[UnpoweredRocket]
with Steppable[UnpoweredRocket] {
/* Overriding this to remove the thrust component reduces computation time by 25% */
override lazy val force = gravForce + dragForce
override val thrust = 0.0
override val massFlow = 0.0
override val attitude = PhysVec.zero
override val isFuelStarved = true
override val deltaV = 0.0
override def physStep(deltaT: Double): Option[UnpoweredRocket] =
Option(new UnpoweredRocket(state.step(deltaT, acceleration), mass, blackBox))
override def analyzeSteps[U >: UnpoweredRocket <: Steppable[U]](steps: (U, U)): Option[U] = steps match {
case (step: UnpoweredRocket, nextStep: UnpoweredRocket) => {
if (step.blackBox.isNewlyTerminated) None
else {
val eventLog = new RocketAnalyzer(step, nextStep).analyze
if (eventLog.isEmpty) Some(nextStep)
else Option(nextStep ++ eventLog)
}
}
case _ => None
}
def ++(eventLog: BlackBox.EventLog[UnpoweredRocket]): UnpoweredRocket =
new UnpoweredRocket(state, mass, blackBox ++ eventLog)
}
/**
* StagedRockets are PoweredRockets with a list of Stages to burn through.
*
* The simplest powered rocket is merely a staged rocket with one stage.
*/
class StagedRocket(override val state: State,
override val attitude: PhysVec,
val throttle: Double,
val stages: List[Stage],
override val blackBox: BlackBox[StagedRocket])
extends Rocket[StagedRocket]
with Steppable[StagedRocket] {
override lazy val mass = Stage.mass(stages)
lazy val currentStage = stages.head
lazy val atm = planetoid.atm(pos)
override lazy val thrust: Double = currentStage.thrust(atm, throttle)
override lazy val massFlow: Double = currentStage.massFlow(atm, throttle)
override lazy val isFuelStarved: Boolean = stages.forall(_.isEmpty)
override lazy val deltaV = Stage.deltaV(stages)
override def physStep(deltaT: Double): Option[StagedRocket] =
Option(new StagedRocket(
state.step(deltaT, acceleration),
attitude,
throttle,
steppedStages(deltaT),
blackBox
))
override def analyzeSteps[U >: StagedRocket <: Steppable[U]](steps: (U, U)): Option[U] = steps match {
case (step: StagedRocket, nextStep: StagedRocket) => {
if (step.blackBox.isNewlyTerminated) None
else {
val eventLog = new StagedRocketAnalyzer(step, nextStep).analyze
if (eventLog.isEmpty) Option(nextStep)
else Option(nextStep ++ eventLog)
}
}
case _ => None
}
def ++(eventLog: BlackBox.EventLog[StagedRocket]): StagedRocket =
new StagedRocket(
state,
attitude,
throttle,
stages,
blackBox ++ eventLog
)
protected def steppedStages(deltaT: Double): List[Stage] =
if (stages.isEmpty) Nil
else {
val nextStages = if (currentStage.isEmpty && stages.length > 1) stages.tail else stages
nextStages.head.step(deltaT, atm, throttle) :: nextStages.tail
}
} | paploo/orbital | src/main/scala/net/paploo/orbital/rocket/Rocket.scala | Scala | bsd-3-clause | 5,464 |
/*
* Copyright 2016 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.common
import java.io.File
import org.scalatest.{FunSuite, Matchers}
final class TestClassUtil extends FunSuite with Matchers {
import fm.common.test.classutil._
private val testDirPath: String = "fm/common/test/classutil"
private val testDirPaths: Seq[String] = Seq(testDirPath, testDirPath+"/", "/"+testDirPath, "/"+testDirPath+"/")
private val testDirFiles: Seq[File] = testDirPaths.map{ new File(_) }
private val testPath: String = "fm/common/test/classutil/lorem %20ipsum.txt"
private val testPaths: Seq[String] = Seq(testPath, "/"+testPath)
private val testFiles: Seq[File] = testPaths.map{ new File(_) }
private val testClasses: Set[String] = Set(
"fm.common.test.classutil.TestClass",
"fm.common.test.classutil.TestClass$",
"fm.common.test.classutil.TestClassExtendsTestTrait",
"fm.common.test.classutil.subpackage.TestSubPackageClass",
"fm.common.test.classutil.TestTrait",
"fm.common.test.classutil.TestJavaAnnotatedClass",
"fm.common.test.classutil.TestObject",
"fm.common.test.classutil.TestObject$",
"fm.common.test.classutil.TestObjectExtendsTestTrait",
"fm.common.test.classutil.TestObjectExtendsTestTrait$"
)
test("classForName") {
ClassUtil.classForName("fm.common.test.classutil.TestClass") shouldBe classOf[TestClass]
ClassUtil.classForName("fm.common.test.classutil.TestClass$") shouldBe TestClass.getClass
}
test("getClassForName") {
ClassUtil.getClassForName("fm.common.test.classutil.TestClass") shouldBe Some(classOf[TestClass])
ClassUtil.getClassForName("fm.common.test.classutil.TestClass$") shouldBe Some(TestClass.getClass)
}
test("companionObject") {
ClassUtil.companionObject(classOf[TestClass]) shouldBe TestClass
}
test("getCompanionObject") {
ClassUtil.getCompanionObject(classOf[TestClass]) shouldBe Some(TestClass)
}
test("isScalaObject") {
ClassUtil.isScalaObject("fm.common.test.classutil.TestObject$") shouldBe true
ClassUtil.isScalaObject(TestObject.getClass) shouldBe true
}
test("getScalaObject") {
ClassUtil.getScalaObject(TestObject.getClass) shouldBe Some(TestObject)
}
test("getScalaObjectAs") {
ClassUtil.getScalaObjectAs[TestClass](TestObject.getClass) shouldBe Some(TestObject)
ClassUtil.getScalaObjectAs(TestObject.getClass, classOf[TestClass]) shouldBe Some(TestObject)
ClassUtil.getScalaObjectAs[TestTrait](TestObject.getClass) shouldBe None
ClassUtil.getScalaObjectAs(TestObject.getClass, classOf[TestTrait]) shouldBe None
}
// classExists
test("classExists - defaultClassLoader - w/package") {
ClassUtil.classExists("fm.common.ClassUtil") shouldBe true
}
test("classExists - defaultClassLoader - w/o package") {
ClassUtil.classExists("ClassUtil") shouldBe false
}
test("classExists - defaultClassLoader - no class") {
ClassUtil.classExists("fm.common.FooBar") shouldBe false
}
//test("classExists - custom classLoader") { }
test("classpathContentLength") {
// Simpler to just hard code the length of the classutil/lorem-ipsum.txt file here
// ls -al lorem-ipsum.txt
// -rw-r--r--@ 1 eric staff 2771 Aug 8 13:27 lorem-ipsum.txt
testPaths.foreach{ ClassUtil.classpathContentLength(_) shouldBe 2771 }
testFiles.foreach{ ClassUtil.classpathContentLength(_) shouldBe 2771 }
}
// classpathDirExists
test("classpathDirExists - directories") {
testDirPaths.foreach { ClassUtil.classpathDirExists(_) shouldBe true }
testDirFiles.foreach { ClassUtil.classpathDirExists(_) shouldBe true }
}
test("classpathDirExists - files") {
testPaths.foreach{ ClassUtil.classpathDirExists(_) shouldBe false }
testFiles.foreach{ ClassUtil.classpathDirExists(_) shouldBe false }
}
// classpathFileExists
test("classpathFileExists - directories") {
// Test Directories
testDirPaths.foreach{ ClassUtil.classpathFileExists(_) shouldBe false }
testDirFiles.foreach{ ClassUtil.classpathFileExists(_) shouldBe false }
}
test("classpathFileExists - files") {
testPaths.foreach { ClassUtil.classpathFileExists(_) shouldBe true }
testFiles.foreach { ClassUtil.classpathFileExists(_) shouldBe true }
}
// classpathFileExists
/*
// Directories timestamps get changed every time they get moved to the new resource directory/jar file/etc,
// This was used to do a manual test for directories, but commenting out to get tests to pass
test("classpathLastModified - directories") {
// This is the project-relative path for the directory, and test assumes being ran in the project home
val f: File = new File("jvm/src/test/resources/test/classutil")
assert(f.isDirectory, s"$f must be a directory (is the working directory the project home?)")
testDirPaths.foreach{ ClassUtil.classpathLastModified(_) shouldBe f.lastModified }
testDirFiles.foreach{ ClassUtil.classpathLastModified(_) shouldBe f.lastModified }
}*/
test("classpathLastModified - files") {
val f: File = new File(s"jvm/src/test/resources/$testPath")
assert(f.isFile, s"$f must be a file (is the working directory the project home?)")
testPaths.foreach { ClassUtil.classpathLastModified(_) shouldBe f.lastModified }
testFiles.foreach { ClassUtil.classpathLastModified(_) shouldBe f.lastModified }
}
test("findAnnotatedClasses") {
ClassUtil.findAnnotatedClasses("fm.common.test.classutil", classOf[java.lang.Deprecated]) shouldBe Set(classOf[TestJavaAnnotatedClass])
}
test("findClassNames") {
ClassUtil.findClassNames("fm.common.test.classutil") shouldBe testClasses
}
test("findClassNames - defaultClassLoader - jar file") {
ClassUtil.findClassNames("scala.collection.immutable") should contain ("scala.collection.immutable.List")
}
// Includes recursive file(s)
test("findClasspathFiles") {
// Normal Resource Diretory + Class Files
val expectedFiles: Set[File] = {
testClasses.map{ _.replace(".", "/") + ".class" } ++ Set(testPath, "fm/common/test/classutil/subdirectory/subfile.txt")
}.map{ new File(_) }
ClassUtil.findClasspathFiles("fm.common.test.classutil") shouldBe expectedFiles
// Empty Paths
ClassUtil.findClasspathFiles("") should not be empty
ClassUtil.findClasspathFiles("/") should not be empty
// Jar Files
ClassUtil.findClasspathFiles("scala.collection") should contain (new File("scala/collection/immutable/List.class"))
}
test("findImplementingObjects") {
ClassUtil.findImplementingObjects("fm.common.test.classutil", classOf[TestClass]) shouldBe Set(TestObject)
ClassUtil.findImplementingObjects("fm.common.test.classutil", TestObject.getClass) shouldBe Set(TestObject)
}
test("findImplementingClasses") {
ClassUtil.findImplementingClasses("fm.common.test.classutil", classOf[TestTrait]) shouldBe Set(classOf[TestClassExtendsTestTrait], TestObjectExtendsTestTrait.getClass)
}
/*
test("def findLoadedClass(cls: String, classLoader: ClassLoader = defaultClassLoader): Option[Class[_]]") { }
test("def isClassLoaded(cls: String, classLoader: ClassLoader = defaultClassLoader): Boolean") { }
*/
// Does NOT include recursive file(s)
test("listClasspathFiles - defaultClassLoader") {
// Normal Resource Diretory + Class Files
val updatedTestClasses: Set[String] = testClasses - "fm.common.test.classutil.subpackage.TestSubPackageClass" // don't include subpackage class
val expectedFiles: Set[File] = {
updatedTestClasses.map{ _.replace(".", "/") + ".class" } ++ Set(testPath, "fm/common/test/classutil/subpackage", "fm/common/test/classutil/subdirectory")
}.map{ new File(_) }
ClassUtil.listClasspathFiles("fm.common.test.classutil") shouldBe expectedFiles
// Empty Paths
ClassUtil.listClasspathFiles("") should not be empty
ClassUtil.listClasspathFiles("/") should not be empty
// Jar Files
ClassUtil.listClasspathFiles("scala.collection") should contain (new File("scala/collection/Seq.class"))
ClassUtil.listClasspathFiles("scala.collection") should not contain (new File("scala/collection/immutable/List.class"))
}
test("requireClass") {
// This shouldn't throw an exception
ClassUtil.requireClass("fm.common.ClassUtil", "ClassUtil must exist")
val msg: String = "my custom exception message"
val caughtException: Exception = intercept[Exception] { ClassUtil.requireClass("ClassUtil", msg) }
// Error message is something like ""Missing Class: ClassUtil - my custom exception message", so just look for words containing custom msg
caughtException.getMessage should include(msg)
}
}
| frugalmechanic/fm-common | jvm/src/test/scala/fm/common/TestClassUtil.scala | Scala | apache-2.0 | 9,256 |
package codechicken.microblock
import codechicken.multipart.asm.{ScratchBitSet, ASMMixinFactory}
import java.util.BitSet
object MicroblockGenerator extends ASMMixinFactory(classOf[Microblock], classOf[Int]) with ScratchBitSet
{
trait IGeneratedMaterial
{
def addTraits(traits:BitSet, mcrClass:MicroblockClass, client:Boolean)
}
def create(mcrClass: MicroblockClass, material: Int, client: Boolean) = {
val bitset = freshBitSet
bitset.set(mcrClass.baseTraitId)
if(client) bitset.set(mcrClass.clientTraitId)
MicroMaterialRegistry.getMaterial(material) match {
case genMat:IGeneratedMaterial => genMat.addTraits(bitset, mcrClass, client)
case _ =>
}
construct(bitset, material:Integer)
}
}
| Chicken-Bones/ForgeMultipart | src/codechicken/microblock/MicroblockGenerator.scala | Scala | lgpl-2.1 | 792 |
package scalikejdbc
import java.time.ZoneId
import com.google.cloud.bigquery.{QueryJobConfiguration, QueryParameterValue}
import scalikejdbc.bigquery.{BqParameter, BqPreparedStatement, Format}
import scala.collection.JavaConverters._
import scala.language.reflectiveCalls
object QueryRequestBuilder {
private val LocalDateEpoch = java.time.LocalDate.ofEpochDay(0)
/**
* Instantiate QueryRequestBuilder that SQL statement and parameters are set.
*/
def apply(statement: SQLSyntax): QueryJobConfiguration.Builder = {
val builder = QueryJobConfiguration.newBuilder(statement.value)
val ps = new BqPreparedStatement
// almost same implementation as scalikejdbc.StatementExecutor
statement.rawParameters.zipWithIndex.foreach { case (param, index) =>
param match {
case binder: ParameterBinder =>
binder(ps, index)
case p: BigDecimal => ps.setBigDecimal(index, p.bigDecimal)
case p: BigInt => ps.setBigDecimal(index, new java.math.BigDecimal(p.bigInteger))
case p: Boolean => ps.setBoolean(index, p)
case p: Byte => ps.setByte(index, p)
case p: java.sql.Date => ps.setDate(index, p)
case p: Double => ps.setDouble(index, p)
case p: Float => ps.setFloat(index, p)
case p: Int => ps.setInt(index, p)
case p: Long => ps.setLong(index, p)
case p: Short => ps.setShort(index, p)
case p: String => ps.setString(index, p)
case p: java.sql.Time => ps.setTime(index, p)
case p: java.sql.Timestamp => ps.setTimestamp(index, p)
case p: java.util.Date => ps.setTimestamp(index, p.toSqlTimestamp)
case p: java.time.ZonedDateTime => ps.setTimestamp(index, java.sql.Timestamp.from(p.toInstant))
case p: java.time.OffsetDateTime => ps.setTimestamp(index, java.sql.Timestamp.from(p.toInstant))
case p: java.time.Instant => ps.setTimestamp(index, java.sql.Timestamp.from(p))
case p: java.time.LocalDateTime =>
ps.setTimestamp(index, java.sql.Timestamp.valueOf(p))
case p: java.time.LocalDate =>
ps.setDate(index, java.sql.Date.valueOf(p))
case p: java.time.LocalTime =>
val millis = p.atDate(LocalDateEpoch).atZone(java.time.ZoneId.systemDefault).toInstant.toEpochMilli
val time = new java.sql.Time(millis)
ps.setTime(index, time)
case p =>
param.getClass.getCanonicalName match {
case "org.joda.time.DateTime" =>
val t = p.asInstanceOf[ {def toDate: java.util.Date}].toDate.toSqlTimestamp
ps.setTimestamp(index, t)
case "org.joda.time.LocalDateTime" =>
val t = p.asInstanceOf[ {def toDate: java.util.Date}].toDate.toSqlTimestamp
ps.setTimestamp(index, t)
case "org.joda.time.LocalDate" =>
val t = p.asInstanceOf[ {def toDate: java.util.Date}].toDate.toSqlDate
ps.setDate(index, t)
case "org.joda.time.LocalTime" =>
val millis = p.asInstanceOf[ {def toDateTimeToday: {def getMillis: Long}}].toDateTimeToday.getMillis
ps.setTime(index, new java.sql.Time(millis))
case _ =>
throw new UnsupportedOperationException(
s"unsupported parameter type. index: ${index}, parameter : ${param}, class: ${param.getClass}")
}
}
}
val parameters = ps.parameters.toList
.sortBy { case (parameterIndex, _) => parameterIndex }
.map { case (_, parameter) =>
parameter match {
case BqParameter.Int64(value) =>
QueryParameterValue.int64(value)
case BqParameter.Float64(value) =>
QueryParameterValue.float64(value)
case BqParameter.Bool(value) =>
QueryParameterValue.bool(value)
case BqParameter.String(value) =>
QueryParameterValue.string(value)
case BqParameter.Bytes(value) =>
QueryParameterValue.bytes(value)
case BqParameter.Date(value) =>
QueryParameterValue.date(value.format(Format.date))
case BqParameter.DateTime(value) =>
QueryParameterValue.dateTime(value.format(Format.dateTime))
case BqParameter.Time(value) =>
QueryParameterValue.time(value.format(Format.time))
case BqParameter.Timestamp(value) =>
QueryParameterValue.timestamp(value.withZoneSameInstant(ZoneId.of("UTC")).format(Format.timestamp))
}
}.asJava
builder.setPositionalParameters(parameters)
}
}
| ocadaruma/scalikejdbc-bigquery | src/main/scala/scalikejdbc/QueryRequestBuilder.scala | Scala | apache-2.0 | 4,580 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf.loaders
import com.intel.analytics.bigdl.tensor.Tensor
class ErfSpec extends UnaryOpBaseSpec {
override def getOpName: String = "Erf"
override def getInput: Tensor[_] = Tensor[Float](4, 32, 32, 3).rand()
}
| yiheng/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/utils/tf/loaders/ErfSpec.scala | Scala | apache-2.0 | 858 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.kafka010.mocks
import java.util.concurrent.TimeUnit
import scala.collection.mutable.PriorityQueue
import kafka.utils.{Scheduler, Time}
/**
* A mock scheduler that executes tasks synchronously using a mock time instance.
* Tasks are executed synchronously when the time is advanced.
* This class is meant to be used in conjunction with MockTime.
*
* Example usage
* <code>
* val time = new MockTime
* time.scheduler.schedule("a task", println("hello world: " + time.milliseconds), delay = 1000)
* time.sleep(1001) // this should cause our scheduled task to fire
* </code>
*
* Incrementing the time to the exact next execution time of a task will result in that task
* executing (it as if execution itself takes no time).
*/
private[kafka010] class MockScheduler(val time: Time) extends Scheduler {
/* a priority queue of tasks ordered by next execution time */
var tasks = new PriorityQueue[MockTask]()
def isStarted: Boolean = true
def startup(): Unit = {}
def shutdown(): Unit = synchronized {
tasks.foreach(_.fun())
tasks.clear()
}
/**
* Check for any tasks that need to execute. Since this is a mock scheduler this check only occurs
* when this method is called and the execution happens synchronously in the calling thread.
* If you are using the scheduler associated with a MockTime instance this call
* will be triggered automatically.
*/
def tick(): Unit = synchronized {
val now = time.milliseconds
while(!tasks.isEmpty && tasks.head.nextExecution <= now) {
/* pop and execute the task with the lowest next execution time */
val curr = tasks.dequeue
curr.fun()
/* if the task is periodic, reschedule it and re-enqueue */
if(curr.periodic) {
curr.nextExecution += curr.period
this.tasks += curr
}
}
}
def schedule(
name: String,
fun: () => Unit,
delay: Long = 0,
period: Long = -1,
unit: TimeUnit = TimeUnit.MILLISECONDS): Unit = synchronized {
tasks += MockTask(name, fun, time.milliseconds + delay, period = period)
tick()
}
}
case class MockTask(
val name: String,
val fun: () => Unit,
var nextExecution: Long,
val period: Long) extends Ordered[MockTask] {
def periodic: Boolean = period >= 0
def compare(t: MockTask): Int = {
java.lang.Long.compare(t.nextExecution, nextExecution)
}
}
| bravo-zhang/spark | external/kafka-0-10/src/test/scala/org/apache/spark/streaming/kafka010/mocks/MockScheduler.scala | Scala | apache-2.0 | 3,238 |
/*
Copyright 2016-17, Hasso-Plattner-Institut fuer Softwaresystemtechnik GmbH
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package de.hpi.ingestion.deduplication.similarity
/**
* A similarity measure that computes how close two given numbers are to each other in terms of percentage.
*/
object RelativeNumbersSimilarity extends SimilarityMeasure[String] {
/**
* Calculates how close are two given numbers to each other in terms of percentage
* @param s double to be compared to t
* @param t double to be compared to s
* @param u has no specific use in here
* @return a normalized similarity score between 1.0 and 0.0 or
*/
override def compare(s: String, t: String, u: Int = 1) : Double = {
val max = Math.max(s.toDouble, t.toDouble)
val min = Math.min(s.toDouble, t.toDouble)
min / max
}
}
| bpn1/ingestion | src/main/scala/de/hpi/ingestion/deduplication/similarity/RoughlyEqualNumbers.scala | Scala | apache-2.0 | 1,349 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.internal
import com.intel.analytics.bigdl.{Criterion, DataSet}
import com.intel.analytics.bigdl.dllib.feature.dataset._
import com.intel.analytics.bigdl.dllib.nn.Graph._
import com.intel.analytics.bigdl.dllib.nn.abstractnn.{AbstractModule, Activity}
import com.intel.analytics.bigdl.dllib.nn.{Container, StaticGraph, Sequential => TSequential}
import com.intel.analytics.bigdl.dllib.optim._
import com.intel.analytics.bigdl.serialization.Bigdl.BigDLModule
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.{LoggerFilter, Shape}
import com.intel.analytics.bigdl.dllib.utils.serializer._
import org.apache.spark.rdd.RDD
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import scala.reflect.ClassTag
abstract class KerasModel[T: ClassTag](implicit ev: TensorNumeric[T])
extends KerasLayer[Activity, Activity, T] {
def getSubModules(): List[AbstractModule[Activity, Activity, T]] = {
require(this.labor.isInstanceOf[Container[Activity, Activity, T]],
"labor should be a container, but we got: $this")
this.labor.asInstanceOf[Container[Activity, Activity, T]].modules.toList
}
private var optimMethod: OptimMethod[T] = null
private var criterion: Criterion[T] = null
private var vMethods: Array[ValidationMethod[T]] = null
/**
* Configure the learning process. Must be called before fit or evaluate.
* @param optimizer Optimization method to be used.
* @param loss Criterion to be used.
* @param metrics Array of validation methods to be used.
*/
// TODO: support checkpoint, summary, etc.
def compile(optimizer: OptimMethod[T],
loss: Criterion[T],
metrics: Array[ValidationMethod[T]] = null): Unit = {
LoggerFilter.redirectSparkInfoLogs()
this.optimMethod = optimizer
this.criterion = loss
this.vMethods = metrics
}
/**
* Alternatively, one can pass in string representations when calling compile.
* For example: optimizer = "sgd", loss = "mse", metrics = Array("accuracy")
*/
def compile(optimizer: String,
loss: String,
metrics: Array[String])
(implicit ev: TensorNumeric[T]): Unit = {
this.compile(KerasUtils.toBigDLOptimMethod[T](optimizer),
KerasUtils.toBigDLCriterion[T](loss),
KerasUtils.toBigDLMetrics[T](metrics))
}
private def toDataSet(x: RDD[Sample[T]], batchSize: Int)
: DataSet[MiniBatch[T]] = {
if (x != null) DataSet.rdd(x) -> SampleToMiniBatch[T](batchSize)
else null
}
/**
* Train a model for a fixed number of epochs on a dataset.
* @param x Training dataset. If x is an instance of LocalDataSet, train in local mode.
* @param nbEpoch Number of iterations to train.
* @param validationData Dataset for validation, or null if validation is not configured.
*/
def fit[D: ClassTag](x: DataSet[D], nbEpoch: Int,
validationData: DataSet[MiniBatch[T]])
(implicit ev: TensorNumeric[T]): Unit = {
require(this.optimMethod != null && this.criterion != null,
"compile must be called before fit")
val optimizer = Optimizer(
model = this,
dataset = x,
criterion = this.criterion)
if (validationData != null) {
require(this.vMethods != null, "Validation metrics haven't been set yet")
optimizer.setValidation(trigger = Trigger.everyEpoch,
dataset = validationData,
vMethods = this.vMethods)
}
optimizer.setOptimMethod(this.optimMethod)
.setEndWhen(Trigger.maxEpoch(nbEpoch))
optimizer.optimize()
}
/**
* Train a model for a fixed number of epochs on a dataset.
* @param x Training dataset, RDD of Sample.
* @param batchSize Number of samples per gradient update.
* @param nbEpoch Number of iterations to train.
* @param validationData RDD of Sample, or null if validation is not configured.
*/
def fit(x: RDD[Sample[T]], batchSize: Int = 32, nbEpoch: Int = 10,
validationData: RDD[Sample[T]] = null)
(implicit ev: TensorNumeric[T]): Unit = {
this.fit(toDataSet(x, batchSize), nbEpoch, toDataSet(validationData, batchSize))
}
/**
* Evaluate a model on a given dataset.
* @param x Evaluation dataset, RDD of Sample.
* @param batchSize Number of samples per batch.
*/
def evaluate(x: RDD[Sample[T]],
batchSize: Int)
(implicit ev: TensorNumeric[T]): Array[(ValidationResult, ValidationMethod[T])] = {
require(this.vMethods != null, "Evaluation metrics haven't been set yet")
this.evaluate(x, this.vMethods, Some(batchSize))
}
/**
* Evaluate a model in local mode.
* @param x Evaluation dataset, LocalDataSet.
*/
def evaluate(x: LocalDataSet[MiniBatch[T]])
(implicit ev: TensorNumeric[T]): Array[(ValidationResult, ValidationMethod[T])] = {
require(this.vMethods != null, "Evaluation metrics haven't been set yet")
this.evaluate(x, this.vMethods)
}
/**
* Use a model to do prediction.
* @param x Prediction data, RDD of Sample.
* @param batchSize Number of samples per batch.
*/
def predict(x: RDD[Sample[T]],
batchSize: Int)(implicit ev: TensorNumeric[T]): RDD[Activity] = {
this.predict(x, batchSize, false)
}
/**
* Use a model to do prediction in LOCAL mode.
* @param x Prediction data, LocalDataSet.
*/
def predict(x: LocalDataSet[MiniBatch[T]])(implicit ev: TensorNumeric[T]): Array[Activity] = {
val localPredictor = LocalPredictor(this)
localPredictor.predict(x)
}
}
@deprecated("`Model` is deprecated." +
"com.intel.analytics.bigdl.dllib.nn.internal is deprecated in BigDL 0.11, " +
"and will be removed in future releases", "0.10.0")
class Model[T: ClassTag](private val _inputs : Seq[ModuleNode[T]],
private val _outputs : Seq[ModuleNode[T]])(implicit ev: TensorNumeric[T])
extends KerasModel[T] {
this.labor = doBuild(null)
excludeInvalidLayers(this.labor.asInstanceOf[StaticGraph[T]].
getForwardExecutions().map {_.element})
this.inputShapeValue = Shape(_inputs.map{n => n.element.getInputShape()}.toList)
this.outputShapeValue = Shape(_outputs.map{_.element.getOutputShape()}.toList)
override def isKerasStyle(): Boolean = true
override def computeOutputShape(inputShape: Shape): Shape = {
getOutputShape()
}
override def doBuild(inputShape: Shape): StaticGraph[T] =
new StaticGraph[T](_inputs, _outputs, None, false)
override def build(calcInputShape: Shape): Shape = {
checkWithCurrentInputShape(calcInputShape)
getOutputShape()
}
}
object Model extends KerasLayerSerializable{
/**
* Build multiple inputs, multiple outputs graph container.
* @param input input node
* @param output output node
* @return a graph container
*/
def apply[T: ClassTag](
input : Array[ModuleNode[T]],
output : Array[ModuleNode[T]])(implicit ev: TensorNumeric[T]) : Model[T] = {
new Model[T](input, output)
}
/**
* Build a single input, multiple outputs graph container
* @param input input node
* @param output output nodes
* @return a graph container
*/
def apply[T: ClassTag](input : ModuleNode[T], output : Array[ModuleNode[T]])
(implicit ev: TensorNumeric[T]) : Model[T] = {
new Model[T](Seq(input), output)
}
/**
* Build a multiple inputs, single output graph container
* @param input input nodes
* @param output output node
* @return a graph container
*/
def apply[T: ClassTag](input : Array[ModuleNode[T]], output : ModuleNode[T])
(implicit ev: TensorNumeric[T]) : Model[T] = {
new Model[T](input, Seq(output))
}
/**
* Build a single input, single output graph container
* @param input input nodes
* @param output output nodes
* @return a graph container
*/
def apply[T: ClassTag](input : ModuleNode[T], output : ModuleNode[T])
(implicit ev: TensorNumeric[T]) : Model[T] = {
new Model[T](Seq(input), Seq(output))
}
override def doSerializeModule[T: ClassTag](context: SerializeContext[T],
builder: BigDLModule.Builder)
(implicit ev: TensorNumeric[T]): Unit = {
val labor = context.moduleData.module.
asInstanceOf[KerasLayer[Activity, Activity, T]].labor
val subModule = ModuleSerializer.serialize(SerializeContext(ModuleData(labor,
new ArrayBuffer[String](), new ArrayBuffer[String]()), context.storages,
context.storageType, _copyWeightAndBias))
builder.addSubModules(subModule.bigDLModule)
}
override def doLoadModule[T: ClassTag](context: DeserializeContext)
(implicit ev: TensorNumeric[T]): AbstractModule[Activity, Activity, T] = {
val subProtoModules = context.bigdlModule.getSubModulesList.asScala
val subModules = subProtoModules.map(module => {
val subModuleData = ModuleSerializer.load(DeserializeContext(module,
context.storages, context.storageType, _copyWeightAndBias))
subModuleData.module
})
val tGraph = subModules(0).asInstanceOf[StaticGraph[T]]
Model(tGraph.inputs.toArray, tGraph.outputs.toArray)
}
}
@deprecated("`Sequential` is deprecated." +
"com.intel.analytics.bigdl.dllib.nn.internal is deprecated in BigDL 0.11, " +
"and will be removed in future releases", "0.10.0")
class Sequential[T: ClassTag]()
(implicit ev: TensorNumeric[T]) extends KerasModel[T] {
private[bigdl] var frozen: Boolean = false
this.labor = doBuild(null)
private def triggerBuilding(module: AbstractModule[_ <: Activity, _ <: Activity, T]): Unit = {
if (!this.isBuilt()) {
if (module.getInputShape() == null) {
throw new RuntimeException("The first layer should explicitly declare inputshape")
} else {
val outputShape = module.build(module.getInputShape())
// The inputShape of Sequential should only be init here.
this.inputShapeValue = module.getInputShape()
this.outputShapeValue = outputShape
}
} else {
val outputShape = module.build(this.getOutputShape())
this.outputShapeValue = outputShape
}
}
/**
* Add a sub-module to the contained `modules`
*
* @param module module to be add
* @return this container
*/
def add(module: AbstractModule[_ <: Activity, _ <: Activity, T]): this.type = {
if (frozen) {
throw new RuntimeException(
"This Sequential has been frozen, as it has been added into other container")
}
if (module.isInstanceOf[Sequential[T]]) {
module.asInstanceOf[Sequential[T]].frozen = true
}
validateInput[T](Seq(module))
triggerBuilding(module)
labor.asInstanceOf[TSequential[T]].modules +=
module.asInstanceOf[AbstractModule[Activity, Activity, T]]
checkDuplicate()
this
}
override def computeOutputShape(inputShape: Shape): Shape = {
if (labor.asInstanceOf[TSequential[T]].modules.isEmpty) {
inputShape
} else {
labor.asInstanceOf[TSequential[T]].modules.last.getOutputShape()
}
}
override def doBuild(inputShape: Shape): TSequential[T] = TSequential[T]()
override def build(calcInputShape: Shape): Shape = {
checkWithCurrentInputShape(calcInputShape)
getOutputShape()
}
}
object Sequential extends KerasLayerSerializable{
def apply[@specialized(Float, Double) T: ClassTag]()
(implicit ev: TensorNumeric[T]) : Sequential[T] = {
new Sequential[T]()
}
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/internal/Topology.scala | Scala | apache-2.0 | 12,100 |
package com.github.lavrov.xml
import cats.FlatMap
import cats.data.{Kleisli, Validated, ValidatedNel}
import scala.xml._
import scala.util.control.Exception
package object reader {
type Reader[A] = Kleisli[Result, NodeSeq, A]
case class ReaderError(path: XmlPath, message: String)
type Result[A] = ValidatedNel[ReaderError, A]
def valid[A](a: A): Result[A] = Validated.valid(a)
def invalid(error: String, path: XmlPath = XmlPath.__): Result[Nothing] =
Validated invalidNel ReaderError(path, error)
implicit val resultMonad = new FlatMap[Result] {
override def map[A, B](fa: Result[A])(f: (A) => B): Result[B] = fa match {
case Validated.Valid(a) => Validated.Valid(f(a))
case [email protected](_) => invalid
}
override def flatMap[A, B](fa: Result[A])(f: (A) => Result[B]): Result[B] = fa match {
case Validated.Valid(a) => f(a)
case Validated.Invalid(e) => Validated.invalid(e)
}
override def tailRecM[A, B](a: A)(f: (A) => Result[Either[A, B]]): Result[B] = ???
}
implicit val stringReader: Reader[String] = Reader {
case nodeSeq: NodeSeq if nodeSeq.size == 1 =>
nodeSeq.head.child.headOption match {
case Some(Text(value)) => valid(value)
case None => valid("")
case Some(_) => invalid("not a text node")
}
case _ =>
invalid("there must be one node containing Text node inside")
}
implicit val longReader: Reader[Long] = stringReader.andThen(
string =>
Exception.nonFatalCatch.either(string.toLong)
.fold(
_ => invalid(s"'$string' cannot be interpeted as long"),
valid
)
)
implicit val intReader: Reader[Int] = longReader.map(_.toInt)
implicit val booleanReader: Reader[Boolean] = stringReader.flatMapF {
case "true" => valid(true)
case "false" => valid(false)
case string => invalid(s"cannot interpret $string as Boolean")
}
implicit def listReader[A: Reader]: Reader[List[A]] = XmlPath.__.list[A]
implicit def optionReader[A: Reader]: Reader[Option[A]] = XmlPath.__.optional[A]
def attribute(name: String): Reader[String] = Reader(
_ \ s"@$name" match {
case Group(Seq(Text(value))) => valid(value)
case _ => invalid(s"attr $name is missing")
}
)
}
| lavrov/xml-reader | src/main/scala/com/github/lavrov/xml/reader/package.scala | Scala | mit | 2,295 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.TensorModule
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import scala.math.tanh
import com.intel.analytics.bigdl.tensor._
import scala.reflect.ClassTag
/**
* Applies the Tanh function element-wise to the input Tensor,
* thus outputting a Tensor of the same dimension.
* Tanh is defined as f(x) = (exp(x)-exp(-x))/(exp(x)+exp(-x)).
*/
@SerialVersionUID(9062199894710333035L)
class Tanh[@specialized(Float, Double) T: ClassTag](
implicit ev: TensorNumeric[T]) extends TensorModule[T] {
override def updateOutput(input: Tensor[T]): Tensor[T] = {
output.resizeAs(input)
output.map(input, (_, inputVal) => ev.fromType[Double](tanh(ev.toType[Double](inputVal))))
output
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
gradInput.resizeAs(gradOutput)
gradInput.copy(gradOutput)
gradInput.map(output, (gradValue, outputValue) => ev.times(
gradValue, ev.minus(ev.fromType[Int](1), ev.times(outputValue, outputValue))))
gradInput
}
}
object Tanh {
def apply[T: ClassTag]()
(implicit ev: TensorNumeric[T]) : Tanh[T] = {
new Tanh[T]()
}
}
| 122689305/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Tanh.scala | Scala | apache-2.0 | 1,849 |
package actor
import akka.actor.{Props, Actor}
import model.Cell
case object StartNew
case class GetResult(id: Int, n: Int)
case object NotFound
class CellManager extends Actor {
private val initialState: Set[Cell] = Set(
Cell(9,10), Cell(10, 10), Cell(11,10), Cell(11, 9), Cell(10, 8),
Cell(20,9), Cell(21, 9), Cell(22,9), Cell(20, 8), Cell(21, 7),
Cell(30,15), Cell(31, 15), Cell(32,15), Cell(30, 14))
// todo initialize
private var lastId = 1
def receive = {
case StartNew =>
context.actorOf(Props(classOf[CellWorker], initialState), lastId.toString)
sender ! lastId
lastId += 1
case GetResult(i, n) => context.child(i.toString).map(_ forward Get(n)).getOrElse(sender ! NotFound)
}
}
| semberal/good-cells | app/actor/CellManager.scala | Scala | mit | 741 |
package com.singlestore.spark
import java.sql.{Connection, PreparedStatement, Statement}
import com.singlestore.spark.SQLGen.{StringVar, VariableList}
import com.singlestore.spark.SinglestoreOptions.{TableKey, TableKeyType}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JdbcUtils}
import org.apache.spark.sql.jdbc.JdbcDialects
import org.apache.spark.sql.types.{StringType, StructType}
import org.apache.spark.sql.{Row, SaveMode}
import scala.util.Try
case class SinglestorePartitionInfo(ordinal: Int, name: String, hostport: String)
object JdbcHelpers extends LazyLogging {
final val SINGLESTORE_CONNECT_TIMEOUT = "10000" // 10 seconds in ms
// register the SinglestoreDialect
JdbcDialects.registerDialect(SinglestoreDialect)
// Connection implicits
implicit class ConnectionHelpers(val conn: Connection) {
def withStatement[T](handle: Statement => T): T =
Loan(conn.createStatement).to(handle)
def withPreparedStatement[T](query: String, handle: PreparedStatement => T): T =
Loan(conn.prepareStatement(query)).to(handle)
}
def getJDBCOptions(conf: SinglestoreOptions, hostPorts: String*): JDBCOptions = {
val url: String = {
val base = s"jdbc:mysql:loadbalance://${hostPorts.mkString(",")}"
conf.database match {
case Some(d) => s"$base/$d"
case None => base
}
}
val sessionVariables = Seq(
"collation_server=utf8_general_ci",
"sql_select_limit=18446744073709551615",
"compile_only=false",
"sql_mode='STRICT_ALL_TABLES,ONLY_FULL_GROUP_BY'"
).mkString(";")
new JDBCOptions(
Map(
JDBCOptions.JDBC_URL -> url,
JDBCOptions.JDBC_TABLE_NAME -> "XXX",
JDBCOptions.JDBC_DRIVER_CLASS -> "org.mariadb.jdbc.Driver",
"user" -> conf.user,
"password" -> conf.password,
"zeroDateTimeBehavior" -> "convertToNull",
"allowLoadLocalInfile" -> "true",
"connectTimeout" -> SINGLESTORE_CONNECT_TIMEOUT,
"sessionVariables" -> sessionVariables
) ++ conf.jdbcExtraOptions
)
}
def getDDLJDBCOptions(conf: SinglestoreOptions): JDBCOptions =
getJDBCOptions(conf, conf.ddlEndpoint)
def getDMLJDBCOptions(conf: SinglestoreOptions): JDBCOptions =
getJDBCOptions(conf, conf.dmlEndpoints: _*)
def executeQuery(conn: Connection, query: String, variables: Any*): Iterator[Row] = {
val statement = conn.prepareStatement(query)
try {
fillStatementJdbc(statement, variables.toList)
val rs = statement.executeQuery()
val schema = JdbcUtils.getSchema(rs, SinglestoreDialect, alwaysNullable = true)
JdbcUtils.resultSetToRows(rs, schema)
} finally {
statement.close()
}
}
def loadSchema(conf: SinglestoreOptions, query: String, variables: VariableList): StructType = {
val conn = JdbcUtils.createConnectionFactory(getDDLJDBCOptions(conf))()
try {
val statement =
conn.prepareStatement(SinglestoreDialect.getSchemaQuery(s"($query) AS q"))
try {
fillStatement(statement, variables)
val rs = statement.executeQuery()
try {
JdbcUtils.getSchema(rs, SinglestoreDialect, alwaysNullable = true)
} finally {
rs.close()
}
} finally {
statement.close()
}
} finally {
conn.close()
}
}
def explainQuery(conf: SinglestoreOptions, query: String, variables: VariableList): String = {
val conn = JdbcUtils.createConnectionFactory(getDDLJDBCOptions(conf))()
try {
val statement = conn.prepareStatement(s"EXPLAIN $query")
try {
fillStatement(statement, variables)
val rs = statement.executeQuery()
try {
var out = List.empty[String]
while (rs.next) {
out = rs.getString(1) :: out
}
out.reverseIterator.mkString("\\n")
} finally {
rs.close()
}
} finally {
statement.close()
}
} finally {
conn.close()
}
}
// explainJSONQuery runs `EXPLAIN JSON` on the query and returns the String
// representing this queries plan as JSON.
def explainJSONQuery(conf: SinglestoreOptions, query: String, variables: VariableList): String = {
val conn = JdbcUtils.createConnectionFactory(getDDLJDBCOptions(conf))()
try {
val statement = conn.prepareStatement(s"EXPLAIN JSON ${query}")
try {
fillStatement(statement, variables)
val rs = statement.executeQuery()
try {
// we only expect one row in the output
if (!rs.next()) { assert(false, "EXPLAIN JSON failed to return a row") }
val json = rs.getString(1)
assert(!rs.next(), "EXPLAIN JSON returned more than one row")
json
} finally {
rs.close()
}
} finally {
statement.close()
}
} finally {
conn.close()
}
}
// partitionHostPorts returns a list of (ordinal, name, host:port) for all master
// partitions in the specified database
def partitionHostPorts(conf: SinglestoreOptions,
database: String): List[SinglestorePartitionInfo] = {
val conn = JdbcUtils.createConnectionFactory(getDDLJDBCOptions(conf))()
try {
val statement = conn.prepareStatement(s"""
SELECT HOST, PORT
FROM INFORMATION_SCHEMA.DISTRIBUTED_PARTITIONS
WHERE DATABASE_NAME = ? AND ROLE = "Master"
ORDER BY ORDINAL ASC
""")
try {
fillStatement(statement, List(StringVar(database)))
val rs = statement.executeQuery()
try {
var out = List.empty[SinglestorePartitionInfo]
var idx = 0
while (rs.next) {
out = SinglestorePartitionInfo(idx,
s"${database}_${idx}",
s"${rs.getString(1)}:${rs.getInt(2)}") :: out
idx += 1
}
out.reverse
} finally {
rs.close()
}
} finally {
statement.close()
}
} finally {
conn.close()
}
}
def fillStatement(stmt: PreparedStatement, variables: VariableList): Unit = {
import SQLGen._
if (variables.isEmpty) { return }
variables.zipWithIndex.foreach {
case (StringVar(v), index) => stmt.setString(index + 1, v)
case (IntVar(v), index) => stmt.setInt(index + 1, v)
case (LongVar(v), index) => stmt.setLong(index + 1, v)
case (ShortVar(v), index) => stmt.setShort(index + 1, v)
case (FloatVar(v), index) => stmt.setFloat(index + 1, v)
case (DoubleVar(v), index) => stmt.setDouble(index + 1, v)
case (DecimalVar(v), index) =>
stmt.setBigDecimal(index + 1, v.toJavaBigDecimal)
case (BooleanVar(v), index) => stmt.setBoolean(index + 1, v)
case (ByteVar(v), index) => stmt.setByte(index + 1, v)
case (DateVar(v), index) => stmt.setDate(index + 1, v)
case (TimestampVar(v), index) => stmt.setTimestamp(index + 1, v)
case (v, _) =>
throw new IllegalArgumentException(
"Unexpected Variable Type: " + v.getClass.getName
)
}
}
def fillStatementJdbc(stmt: PreparedStatement, variables: List[Any]): Unit = {
// here we leave it to JDBC driver to do type conversions
if (variables.isEmpty) { return }
for ((v, index) <- variables.zipWithIndex) {
stmt.setObject(index + 1, v)
}
}
def schemaToString(schema: StructType, tableKeys: List[TableKey]): String = {
// spark should never call any of our code if the schema is empty
assert(schema.length > 0)
val fieldsSql = schema.fields
.map(field => {
val name = SinglestoreDialect.quoteIdentifier(field.name)
val typ = SinglestoreDialect
.getJDBCType(field.dataType)
.getOrElse(
throw new IllegalArgumentException(
s"Can't get JDBC type for ${field.dataType.simpleString}"
)
)
val nullable = if (field.nullable) "" else " NOT NULL"
val collation = if (field.dataType == StringType) " COLLATE UTF8_BIN" else ""
s"${name} ${typ.databaseTypeDefinition}${collation}${nullable}"
})
// we want to default all tables to columnstore, but in 6.8 and below you *must*
// specify a sort key so we just pick the first column arbitrarily for now
var finalTableKeys = tableKeys
// if all the keys are shard keys it means there are no other keys so we can default
if (tableKeys.forall(_.keyType == TableKeyType.Shard)) {
finalTableKeys = TableKey(TableKeyType.Columnstore, columns = schema.head.name) :: tableKeys
}
def keyNameColumnsSQL(key: TableKey) =
s"${key.name.map(SinglestoreDialect.quoteIdentifier).getOrElse("")}(${key.columns})"
val keysSql = finalTableKeys.map {
case key @ TableKey(TableKeyType.Primary, _, _) => s"PRIMARY KEY ${keyNameColumnsSQL(key)}"
case key @ TableKey(TableKeyType.Columnstore, _, _) =>
s"KEY ${keyNameColumnsSQL(key)} USING CLUSTERED COLUMNSTORE"
case key @ TableKey(TableKeyType.Unique, _, _) => s"UNIQUE KEY ${keyNameColumnsSQL(key)}"
case key @ TableKey(TableKeyType.Shard, _, _) => s"SHARD KEY ${keyNameColumnsSQL(key)}"
case key @ TableKey(TableKeyType.Key, _, _) => s"KEY ${keyNameColumnsSQL(key)}"
}
(fieldsSql ++ keysSql).mkString("(\\n ", ",\\n ", "\\n)")
}
def tableExists(conn: Connection, table: TableIdentifier): Boolean = {
conn.withStatement(
stmt =>
Try {
try {
stmt.execute(SinglestoreDialect.getTableExistsQuery(table.quotedString))
} finally {
stmt.close()
}
}.isSuccess
)
}
def createTable(conn: Connection,
table: TableIdentifier,
schema: StructType,
tableKeys: List[TableKey]): Unit = {
val sql = s"CREATE TABLE ${table.quotedString} ${schemaToString(schema, tableKeys)}"
log.trace(s"Executing SQL:\\n$sql")
conn.withStatement(stmt => stmt.executeUpdate(sql))
}
def truncateTable(conn: Connection, table: TableIdentifier): Unit = {
val sql = s"TRUNCATE TABLE ${table.quotedString}"
log.trace(s"Executing SQL:\\n$sql")
conn.withStatement(stmt => stmt.executeUpdate(sql))
}
def dropTable(conn: Connection, table: TableIdentifier): Unit = {
val sql = s"DROP TABLE ${table.quotedString}"
log.trace(s"Executing SQL:\\n$sql")
conn.withStatement(stmt => stmt.executeUpdate(sql))
}
def isReferenceTable(conf: SinglestoreOptions, table: TableIdentifier): Boolean = {
val jdbcOpts = JdbcHelpers.getDDLJDBCOptions(conf)
val conn = JdbcUtils.createConnectionFactory(jdbcOpts)()
// Assume that either table.database is set or conf.database is set
val databaseName =
table.database
.orElse(conf.database)
.getOrElse(throw new IllegalArgumentException("Database name should be defined"))
val sql = s"using $databaseName show tables extended like '${table.table}'"
log.trace(s"Executing SQL:\\n$sql")
val resultSet = conn.withStatement(stmt => {
Try {
try {
stmt.executeQuery(sql)
} finally {
stmt.close()
conn.close()
}
}
})
resultSet.toOption.fold(false)(resultSet => {
if (resultSet.next()) {
!resultSet.getBoolean("distributed")
} else {
throw new IllegalArgumentException(s"Table `$databaseName.${table.table}` doesn't exist")
}
})
}
def prepareTableForWrite(conf: SinglestoreOptions,
table: TableIdentifier,
mode: SaveMode,
schema: StructType): Unit = {
val jdbcOpts = JdbcHelpers.getDDLJDBCOptions(conf)
val conn = JdbcUtils.createConnectionFactory(jdbcOpts)()
try {
if (JdbcHelpers.tableExists(conn, table)) {
mode match {
case SaveMode.Overwrite =>
conf.overwriteBehavior match {
case Truncate =>
JdbcHelpers.truncateTable(conn, table)
case DropAndCreate =>
JdbcHelpers.dropTable(conn, table)
JdbcHelpers.createTable(conn, table, schema, conf.tableKeys)
case Merge =>
// nothing to do
}
case SaveMode.ErrorIfExists =>
sys.error(
s"Table '${table}' already exists. SaveMode: ErrorIfExists."
)
case SaveMode.Ignore =>
// table already exists, nothing to do
case SaveMode.Append => // continue
}
} else {
JdbcHelpers.createTable(conn, table, schema, conf.tableKeys)
}
} finally {
conn.close()
}
}
}
| memsql/memsql-spark-connector | src/main/scala/com/singlestore/spark/JdbcHelpers.scala | Scala | apache-2.0 | 13,004 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller
* @version 1.2
* @date Tue Nov 29 15:46:51 EST 2011
* @see LICENSE (MIT style license file).
*
* @see www.ece.virginia.edu/~mv/edu/715/lectures/QNet.pdf
* @see hspm.sph.sc.edu/Courses/J716/pdf/716-8%20Queuing%20Theory%20Cookbook.pdf
*/
package scalation.queueingnet
import scalation.linalgebra.{MatrixD, VectorD}
import scalation.linalgebra.MatrixD.eye
import scalation.math.Combinatorics.fac
import scalation.math.double_exp
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `JacksonNet` class is used to solve Jackson Queueing Network problems.
* Each node in the network represents a service station consisting of one queue
* and k_i servers. It is currently limited to networks of M/M/k queues.
* @param p the routing probabilities from node to node
* @param r the external arrival rates for each node
* @param mu the service rates for each node
* @param k the number of servers for each node
*/
class JacksonNet (p: MatrixD, r: VectorD, mu: VectorD, private var k: Array [Int] = null)
{
/** Size of the Jackson network (number of nodes)
*/
private val m = mu.dim
/** Identity matrix
*/
private val ident = eye (m)
/** Effective arrival rates at each node
*/
private val lambda = r * (ident - p).inverse // with routing
// private val lambda = r // no routing
if (k == null) k = Array.fill [Int] (m)(1) // default to M/M/1 queues
/** Utilization factor for each node
*/
private val rho = new VectorD (m)
for (i <- 0 until m) rho(i) = lambda(i) / (mu(i) * k(i).toDouble)
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check intermediate results.
*/
def check
{
println ("check queueing network parameters:")
println ("p = " + p) // routing probability matrix
println ("r = " + r) // external rate vector
println ("lambda = " + lambda) // effective arrival rate vector
println ("mu = " + mu) // service rate vector
println ("k = " + k.deep) // number of servers vector
println ("rho = " + rho) // utilization factor vector
} // check
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Calculate the probability of a node being empty, based on its utilization
* factor and number of servers.
* @param ro the utilization factor
* @param kk the number of servers
*/
def pi_0 (ro: Double, kk: Int): Double =
{
val rok = ro * kk
val sum = (for (i <- 0 until kk) yield rok~^i / fac (i)).sum
1.0 / (sum + rok~^kk / (fac (kk) * (1.0 - ro)))
} // pi_0
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Calculate the expected number in the queue at the j-th node.
* @param j the j-th node
*/
def nQueue (j: Int): Double = if (k(j) > 1) nQueue_k (j) else nQueue_1 (k(j))
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Calculate the expected number in the queue at the j-th node for an M/M/1 queue.
* @param j the j-th node
*/
def nQueue_1 (j: Int): Double = { val ro = rho(j); ro~^2 / (1.0 - ro) }
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Calculate the expected number in the queue at the j-th node for an M/M/k queue.
* @param j the j-th node
*/
def nQueue_k (j: Int): Double =
{
val ro = rho(j)
val kk = k(j)
pi_0 (ro, kk) * kk~^kk * ro~^(kk+1) / (fac (kk) * (1.0 - ro)~^2)
} // nQueue
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Report the results.
*/
def report
{
for (j <- 0 until m) {
val lQ = nQueue (j) // expected number waiting in the queue at node j
val lS = rho(j) * k(j) // expected number in service at node j
val lT = lQ + lS // expected number at node j
val lamb_j = lambda(j) // effective arrival rate at node j
println ("\nResults for node " + j + ":")
println ("lQ = %g".format (lQ) + "\twQ = %g".format (lQ / lamb_j))
println ("lS = %g".format (lS) + "\twS = %g".format (lS / lamb_j))
println ("lT = %g".format (lT) + "\twT = %g".format (lT / lamb_j))
} // for
} // report
} // JacksonNet
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `JacksonNetTest` object is used to test the `JacksonNet` class.
*/
object JacksonNetTest extends App
{
val p = new MatrixD ((2, 2), 0.0, 1.0,
0.0, 0.0)
val r = VectorD (5.0, 0.0)
val mu = VectorD (8.0, 10.0)
val jqn = new JacksonNet (p, r, mu)
jqn.check
jqn.report
} // JacksonNetTest object
| mvnural/scalation | src/main/scala/scalation/queueingnet/JacksonNet.scala | Scala | mit | 5,285 |
package se.ramn.bottfarmen.simulation.view
trait BotView {
val id: Int
val commanderId: Int
val row: Int
val col: Int
val hitpoints: Int
}
| ramn/bottfarmen | common/src/main/scala/simulation/view/BotView.scala | Scala | gpl-3.0 | 151 |
package org.bitcoins.core.crypto
import org.bitcoins.core.config.TestNet3
import org.bitcoins.core.gen.CryptoGenerators
import org.scalacheck.{Prop, Properties}
/**
* Created by chris on 7/25/16.
*/
class ECPrivateKeySpec extends Properties("ECPrivateKeySpec") {
property("Serialization symmetry for WIF format") =
Prop.forAll(CryptoGenerators.privateKey) { privKey =>
val wif = privKey.toWIF(TestNet3)
ECPrivateKey.fromWIFToPrivateKey(wif) == privKey
}
property("Serialization symmetry") =
Prop.forAll(CryptoGenerators.privateKey) { privKey =>
ECPrivateKey(privKey.hex) == privKey
}
property("unique key generation") =
Prop.forAll(CryptoGenerators.privateKey, CryptoGenerators.privateKey) { (privKey1, privKey2) =>
privKey1 != privKey2
}
}
| SuredBits/bitcoin-s-sidechains | src/test/scala/org/bitcoins/core/crypto/ECPrivateKeySpec.scala | Scala | mit | 804 |
/*
* Copyright 2008, Mark Harrah
*
* This file is part of Frostbridge.
*
* Frostbridge is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 2.1 of the License, or
* (at your option) any later version.
*
* Frostbridge is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Frostbridge. If not, see <http://www.gnu.org/licenses/>.
*/
package net.frostbridge.out
import javax.xml.stream.XMLStreamWriter
/**
* Writes a sequence of Nodes to XML using Woodstox.
*/
//Notes: writing "" as the prefix will make the root element's namespace the default namespace
object StAXOutput
{
import javax.xml.stream.XMLOutputFactory._
import org.codehaus.stax2.XMLOutputFactory2._
import org.codehaus.stax2.XMLStreamProperties._
import com.ctc.wstx.cfg.OutputConfigFlags
import com.ctc.wstx.stax.WstxOutputFactory
/** The factory for creating an XMLStreamWriter. It is configured to
* automatically generate namespace prefixes and to validate names, content, and structure*/
private val outputFactory =
{
val factory = new WstxOutputFactory
def enable(property: String) = factory.setProperty(property, true)
enable(IS_REPAIRING_NAMESPACES)
enable(P_AUTOMATIC_EMPTY_ELEMENTS)
factory.setProperty(P_AUTOMATIC_NS_PREFIX, "p")
enable(XSP_NAMESPACE_AWARE)
val config = factory.getConfig
config.doValidateContent(true)
config.doValidateNames(true)
config.doValidateStructure(true)
config.doFixContent(true)
config.doValidateAttributes(true)
factory
}
import java.io.{OutputStream,Writer}
def createWriter(writer: Writer) = outputFactory.createXMLStreamWriter(writer)
def createWriter(stream: OutputStream) = outputFactory.createXMLStreamWriter(stream)
import javax.xml.stream.XMLStreamException
def write(nodes: Seq[Node], writer: XMLStreamWriter): Option[XMLStreamException] =
{
try
{
writer.writeStartDocument()
nodes.foreach(_.write(writer))
writer.writeEndDocument()
None
}
catch
{
case e: XMLStreamException => Some(e)
}
finally
{
writer.close()
}
}
}
sealed trait Node
{
def write(writer: XMLStreamWriter): Unit
}
final case class Attribute(name: QName, value: String) extends Node
{
assume(value != null, "Attribute value cannot be null")
def write(writer: XMLStreamWriter) = writer.writeAttribute("", name.namespaceURI, name.localPart, value)
override def toString = name.localPart + "=\\"" + value + "\\""
}
final case class Element(name: QName, content: Seq[Node]) extends Node
{
val (attributes, children) =
{
import scala.collection.mutable.ListBuffer
val attr = new ListBuffer[Attribute]
val children = new ListBuffer[Node]
for(node <- content)
{
node match
{
case a: Attribute => attr += a
case _ => children += node
}
}
(attr.readOnly, children.readOnly)
}
def write(writer: XMLStreamWriter) =
{
writer.writeStartElement("", name.localPart, name.namespaceURI)
attributes.foreach(_.write(writer))
children.foreach(_.write(writer))
writer.writeEndElement()
}
override def toString = "<" + name.localPart + " " + attributes.mkString(" ") + ">" +
children.mkString + "</" + name.localPart + ">"
}
final case class Text(text: String) extends Node
{
assume(text != null, "Text content cannot be null")
def write(writer: XMLStreamWriter) = writer.writeCharacters(text)
override def toString = text
}
final case class ProcessingInstruction(target: String, data: String) extends Node
{
assume(target != null, "Processing instruction target cannot be null.")
assume(data != null, "Processing instruction data cannot be null.")
def write(writer: XMLStreamWriter) = writer.writeProcessingInstruction(target, data)
override def toString = "<?" + target + " " + data + " ?>"
}
final case class Comment(text: String) extends Node
{
assume(text != null, "Comment content cannot be null.")
def write(writer: XMLStreamWriter) = writer.writeComment(text)
override def toString = "<!--" + text + "-->"
} | yzhhui/frostbridge | src/main/scala/net/frostbridge/out/StAXOutput.scala | Scala | lgpl-3.0 | 4,370 |
/*
* Copyright (C) 2016 DANS - Data Archiving and Networked Services ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.agreement.datafetch
import better.files.File
import javax.naming.directory.BasicAttributes
import nl.knaw.dans.easy.agreement.DepositorId
import nl.knaw.dans.easy.agreement.fixture.{ FileSystemSupport, TestSupportFixture }
import org.apache.commons.io.{ FileUtils, IOUtils }
import org.scalamock.scalatest.MockFactory
import org.scalatest.{ BeforeAndAfterAll, BeforeAndAfterEach }
import scala.collection.JavaConverters._
import scala.language.reflectiveCalls
import scala.util.{ Success, Try }
class DatasetLoaderSpec extends TestSupportFixture
with FileSystemSupport
with MockFactory
with BeforeAndAfterEach
with BeforeAndAfterAll {
private val fedoraMock = mock[Fedora]
private val ldapMock = mock[Ldap]
val (userAttributes, expectedUser) = {
val attrs = new BasicAttributes
attrs.put("displayname", "name")
attrs.put("o", "org")
attrs.put("postaladdress", "addr")
attrs.put("postalcode", "pc")
attrs.put("l", "city")
attrs.put("st", "cntr")
attrs.put("telephonenumber", "phone")
attrs.put("mail", "mail")
val user = EasyUser("name", "org", "addr", "pc", "city", "cntr", "phone", "mail")
(attrs, user)
}
override def beforeEach(): Unit = {
super.beforeEach()
File(getClass.getResource("/datasetloader")).copyTo(testDir / "datasetloader")
}
"getUserById" should "query the user data from ldap for a given user id" in {
ldapMock.query _ expects "testID" returning Success(userAttributes)
val loader = new DatasetLoaderImpl(fedoraMock, ldapMock)
loader.getUserById("testID") should matchPattern { case Success(`expectedUser`) => }
}
it should "default to an empty String if the field is not available in the attributes" in {
ldapMock.query _ expects "testID" returning Success(new BasicAttributes)
val loader = new DatasetLoaderImpl(fedoraMock, ldapMock)
loader.getUserById("testID") should matchPattern { case Success(EasyUser("", "", "", "", "", "", "", "")) => }
}
"getDatasetById" should "return the dataset corresponding to the given identifier" in {
val id = "testID"
val depID = "depID"
val user = EasyUser("name", "org", "addr", "pc", "city", "cntr", "phone", "mail")
val amdStream = IOUtils.toInputStream(<foo><depositorId>{depID}</depositorId></foo>.toString)
val emdStream = FileUtils.openInputStream((testDir / "datasetloader" / "emd.xml").toJava)
fedoraMock.getAMD _ expects id returning Success(amdStream)
fedoraMock.getEMD _ expects id returning Success(emdStream)
val loader = new DatasetLoaderImpl(fedoraMock, ldapMock) {
override def getUserById(depositorId: DepositorId): Try[EasyUser] = {
if (depositorId == depID) Success(user)
else fail(s"not the correct depositorID, was $depositorId, should be $depID")
}
}
loader.getDatasetById(id)
// there is no equals defined for the emd, so I need to unpack here
.map { case Dataset(datasetID, emd, usr) =>
(datasetID, emd.getEmdDescription.getDcDescription.asScala.map(_.getValue), usr)
} should matchPattern { case Success((`id`, Seq("descr foo bar"), `user`)) => }
}
}
| DANS-KNAW/easy-license-creator | src/test/scala/nl/knaw/dans/easy/agreement/datafetch/DatasetLoaderSpec.scala | Scala | apache-2.0 | 3,826 |
package lila
package object relation extends PackageObject {
type Relation = Boolean
val Follow: Relation = true
val Block: Relation = false
private[relation] type ID = String
private[relation] type OnlineStudyingCache = com.github.blemale.scaffeine.Cache[ID, String]
}
| luanlv/lila | modules/relation/src/main/package.scala | Scala | mit | 285 |
package org.jetbrains.bsp.data
import com.intellij.openapi.externalSystem.model.DataNode
import com.intellij.openapi.externalSystem.model.project.ProjectData
import com.intellij.openapi.externalSystem.service.project.IdeModifiableModelsProvider
import com.intellij.openapi.module.Module
import com.intellij.openapi.project.Project
import org.jetbrains.plugins.scala.project.external.ScalaAbstractProjectDataService
import org.jetbrains.sbt.project.module
import org.jetbrains.sbt.project.module.SbtModule.{Build, Imports}
import java.util
import scala.jdk.CollectionConverters.CollectionHasAsScala
/**
* @author Jason Zaugg
*/
final class SbtBuildModuleDataBspService extends ScalaAbstractProjectDataService[SbtBuildModuleDataBsp, Module](SbtBuildModuleDataBsp.Key) {
override def importData(toImport: util.Collection[_ <: DataNode[SbtBuildModuleDataBsp]],
projectData: ProjectData,
project: Project,
modelsProvider: IdeModifiableModelsProvider): Unit = {
for {
moduleNode <- toImport.asScala
module <- modelsProvider.getIdeModuleByNode(moduleNode)
} {
val SbtBuildModuleDataBsp(imports, buildFor) = moduleNode.getData
Imports(module) = imports
buildFor.forEach(uri => Build(module) = uri)
}
}
}
| JetBrains/intellij-scala | bsp/src/org/jetbrains/bsp/data/SbtBuildModuleDataBspService.scala | Scala | apache-2.0 | 1,330 |
package org.scalaide.ui.internal.actions
import org.eclipse.core.resources.IProject
import org.scalaide.core.internal.project.Nature
class AddScalaLibraryAction extends AbstractPopupAction {
def performAction(project: IProject): Unit = {
Nature.addScalaLibAndSave(project)
}
}
| Kwestor/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/ui/internal/actions/AddScalaLibraryAction.scala | Scala | bsd-3-clause | 287 |
package slate
package chrome
import scala.scalajs.js
import scala.scalajs.js.annotation.{JSName, ScalaJSDefined}
import scala.scalajs.js.{UndefOr, native, undefined}
@ScalaJSDefined
class GetAuthTokenOptions(val interactive: UndefOr[Boolean] = undefined,
val account: UndefOr[AccountInfo] = undefined,
val scopes: UndefOr[js.Array[String]] = undefined) extends js.Object
@ScalaJSDefined
class LaunchWebAuthFlowOptions(val url: String,
val interactive: UndefOr[Boolean] = undefined) extends js.Object
@ScalaJSDefined
class RemoveCachedAuthTokenOptions(val token: String) extends js.Object
@ScalaJSDefined
class AccountInfo(val id: String) extends js.Object
@native
@JSName("chrome.identity")
object ChromeIdentity extends js.Object {
@JSName("getAuthToken")
@native
def fetchAuthToken(details: UndefOr[GetAuthTokenOptions] = undefined,
callback: js.Function1[String, Unit]): Unit = native
@native
def launchWebAuthFlow(details: LaunchWebAuthFlowOptions,
callback: js.Function1[String, Unit]): Unit = native
@native
def removeCachedAuthToken(details: RemoveCachedAuthTokenOptions,
callback: js.Function1[Unit, Unit]): Unit = native
}
@ScalaJSDefined
class LastError(val message: UndefOr[String]) extends js.Object
@native
@JSName("chrome.runtime")
object ChromeRuntime extends js.Object {
@native
def lastError: UndefOr[LastError] = native
}
| edmundnoble/slate | ui/src/main/scala/slate/chrome/ChromeIdentity.scala | Scala | mit | 1,525 |
// tests that a refinement subtype satisfies all constraint
// of its refinemen supertype
class Test3 {
trait A
trait B
class C { type T }
type T1 = C { type T <: A }
type T2 = T1 { type T <: B }
type U1 = C { type T <: B }
type U2 = C { type T <: A }
var x: T2 = _
val y1: U1 = ???
val y2: U2 = ???
x = y1 // error
x = y2 // error
}
| AlexSikia/dotty | tests/neg/refinedSubtyping.scala | Scala | bsd-3-clause | 365 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.tools
import java.util.Properties
import kafka.integration.KafkaServerTestHarness
import kafka.server.KafkaConfig
import kafka.utils.{Exit, Logging, TestUtils}
import org.apache.kafka.clients.CommonClientConfigs
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord}
import org.apache.kafka.common.serialization.StringSerializer
import org.junit.jupiter.api.Assertions.assertEquals
import org.junit.jupiter.api.{BeforeEach, Test, TestInfo}
class GetOffsetShellTest extends KafkaServerTestHarness with Logging {
private val topicCount = 4
private val offsetTopicPartitionCount = 4
override def generateConfigs: collection.Seq[KafkaConfig] = TestUtils.createBrokerConfigs(1, zkConnect)
.map { p =>
p.put(KafkaConfig.OffsetsTopicPartitionsProp, Int.box(offsetTopicPartitionCount))
p
}.map(KafkaConfig.fromProps)
@BeforeEach
override def setUp(testInfo: TestInfo): Unit = {
super.setUp(testInfo)
Range(1, topicCount + 1).foreach(i => createTopic(topicName(i), i))
val props = new Properties()
props.put(CommonClientConfigs.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers())
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[StringSerializer])
// Send X messages to each partition of topicX
val producer = new KafkaProducer[String, String](props)
Range(1, topicCount + 1).foreach(i => Range(0, i*i)
.foreach(msgCount => producer.send(new ProducerRecord[String, String](topicName(i), msgCount % i, null, "val" + msgCount))))
producer.close()
TestUtils.createOffsetsTopic(zkClient, servers)
}
@Test
def testNoFilterOptions(): Unit = {
val offsets = executeAndParse(Array())
assertEquals(expectedOffsetsWithInternal(), offsets)
}
@Test
def testInternalExcluded(): Unit = {
val offsets = executeAndParse(Array("--exclude-internal-topics"))
assertEquals(expectedTestTopicOffsets(), offsets)
}
@Test
def testTopicNameArg(): Unit = {
Range(1, topicCount + 1).foreach(i => {
val offsets = executeAndParse(Array("--topic", topicName(i)))
assertEquals(expectedOffsetsForTopic(i), offsets, () => "Offset output did not match for " + topicName(i))
})
}
@Test
def testTopicPatternArg(): Unit = {
val offsets = executeAndParse(Array("--topic", "topic.*"))
assertEquals(expectedTestTopicOffsets(), offsets)
}
@Test
def testPartitionsArg(): Unit = {
val offsets = executeAndParse(Array("--partitions", "0,1"))
assertEquals(expectedOffsetsWithInternal().filter { case (_, partition, _) => partition <= 1 }, offsets)
}
@Test
def testTopicPatternArgWithPartitionsArg(): Unit = {
val offsets = executeAndParse(Array("--topic", "topic.*", "--partitions", "0,1"))
assertEquals(expectedTestTopicOffsets().filter { case (_, partition, _) => partition <= 1 }, offsets)
}
@Test
def testTopicPartitionsArg(): Unit = {
val offsets = executeAndParse(Array("--topic-partitions", "topic1:0,topic2:1,topic(3|4):2,__.*:3"))
assertEquals(
List(
("__consumer_offsets", 3, Some(0)),
("topic1", 0, Some(1)),
("topic2", 1, Some(2)),
("topic3", 2, Some(3)),
("topic4", 2, Some(4))
),
offsets
)
}
@Test
def testTopicPartitionsArgWithInternalExcluded(): Unit = {
val offsets = executeAndParse(Array("--topic-partitions",
"topic1:0,topic2:1,topic(3|4):2,__.*:3", "--exclude-internal-topics"))
assertEquals(
List(
("topic1", 0, Some(1)),
("topic2", 1, Some(2)),
("topic3", 2, Some(3)),
("topic4", 2, Some(4))
),
offsets
)
}
@Test
def testTopicPartitionsNotFoundForNonExistentTopic(): Unit = {
assertExitCodeIsOne(Array("--topic", "some_nonexistent_topic"))
}
@Test
def testTopicPartitionsNotFoundForExcludedInternalTopic(): Unit = {
assertExitCodeIsOne(Array("--topic", "some_nonexistent_topic:*"))
}
@Test
def testTopicPartitionsNotFoundForNonMatchingTopicPartitionPattern(): Unit = {
assertExitCodeIsOne(Array("--topic-partitions", "__consumer_offsets", "--exclude-internal-topics"))
}
@Test
def testTopicPartitionsFlagWithTopicFlagCauseExit(): Unit = {
assertExitCodeIsOne(Array("--topic-partitions", "__consumer_offsets", "--topic", "topic1"))
}
@Test
def testTopicPartitionsFlagWithPartitionsFlagCauseExit(): Unit = {
assertExitCodeIsOne(Array("--topic-partitions", "__consumer_offsets", "--partitions", "0"))
}
private def expectedOffsetsWithInternal(): List[(String, Int, Option[Long])] = {
Range(0, offsetTopicPartitionCount).map(i => ("__consumer_offsets", i, Some(0L))).toList ++ expectedTestTopicOffsets()
}
private def expectedTestTopicOffsets(): List[(String, Int, Option[Long])] = {
Range(1, topicCount + 1).flatMap(i => expectedOffsetsForTopic(i)).toList
}
private def expectedOffsetsForTopic(i: Int): List[(String, Int, Option[Long])] = {
val name = topicName(i)
Range(0, i).map(p => (name, p, Some(i.toLong))).toList
}
private def topicName(i: Int): String = "topic" + i
private def assertExitCodeIsOne(args: Array[String]): Unit = {
var exitStatus: Option[Int] = None
Exit.setExitProcedure { (status, _) =>
exitStatus = Some(status)
throw new RuntimeException
}
try {
GetOffsetShell.main(addBootstrapServer(args))
} catch {
case e: RuntimeException =>
} finally {
Exit.resetExitProcedure()
}
assertEquals(Some(1), exitStatus)
}
private def executeAndParse(args: Array[String]): List[(String, Int, Option[Long])] = {
val output = executeAndGrabOutput(args)
output.split(System.lineSeparator())
.map(_.split(":"))
.filter(_.length >= 2)
.map { line =>
val topic = line(0)
val partition = line(1).toInt
val timestamp = if (line.length == 2 || line(2).isEmpty) None else Some(line(2).toLong)
(topic, partition, timestamp)
}
.toList
}
private def executeAndGrabOutput(args: Array[String]): String = {
TestUtils.grabConsoleOutput(GetOffsetShell.main(addBootstrapServer(args)))
}
private def addBootstrapServer(args: Array[String]): Array[String] = {
args ++ Array("--bootstrap-server", bootstrapServers())
}
}
| TiVo/kafka | core/src/test/scala/kafka/tools/GetOffsetShellTest.scala | Scala | apache-2.0 | 7,208 |
/*
* ============= Ryft-Customized BSD License ============
* Copyright (c) 2015, Ryft Systems, Inc.
* All rights reserved.
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software must display the following acknowledgement:
* This product includes software developed by Ryft Systems, Inc.
* 4. Neither the name of Ryft Systems, Inc. nor the names of its contributors may be used
* to endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY RYFT SYSTEMS, INC. ''AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL RYFT SYSTEMS, INC. BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* ============
*/
package com.ryft.spark.connector.util
import java.sql.{Timestamp, Date}
import com.ryft.spark.connector.query.value.model.Format
import org.joda.time.DateTime
import org.joda.time.format.DateTimeFormat
/**
* A simple converter to/from Apache Spark SQL Timestamp format
*/
private[connector] object SparkSqlFormatConverter {
private val sparkSqlDateFormat = "yyyy-MM-dd"
private val sparkSqlTimestampFormat = "yyyy-MM-dd HH:mm:ss.SS"
def toSparkSqlNumber(num: String, parameters: Map[String, String]): String = {
val decimal = parameters.getOrElse("decimal", ".")
val subitizer = parameters.getOrElse("subitizer", "")
num
.filter(_ != subitizer.toCharArray.headOption.getOrElse('\0'))
.replace(decimal.toCharArray.headOption.getOrElse('\0'), '.')
}
def toSparkSqlTimestamp(ts: String, format: String): Timestamp = {
val dateTime = DateTime.parse(ts take format.length, DateTimeFormat.forPattern(format))
new Timestamp(dateTime.getMillis)
}
def toSparkSqlDate(date: String, format: String): Date = {
val dateTime = DateTime.parse(date take format.length, DateTimeFormat.forPattern(format))
new Date(dateTime.getMillis)
}
def fromSparkSqlTimestamp(ts: String, format: Format): String = {
val dateTime = DateTime.parse(ts, DateTimeFormat.forPattern(sparkSqlTimestampFormat))
dateTime.toString(format.value)
}
def fromSparkSqlDate(date: String, format: Format): String = {
val dateTime = DateTime.parse(date, DateTimeFormat.forPattern(sparkSqlDateFormat))
dateTime.toString(format.value)
}
// FIXME: works if date format is in start of line
def dateFormat(format: String): Option[Format] = {
val trimmed = format.trim
if ((trimmed contains "MM") && (trimmed contains "dd")) {
if (trimmed contains "yyyy") Some(Format(trimmed take 10))
else if (trimmed contains "yy") Some(Format(trimmed take 8))
else None
} else None
}
def timeFormat(format: String): Option[Format] = {
val trimmed = format.trim
// FIXME: split(' ') isnt generic decision
if ((trimmed contains "MM") && (trimmed contains "dd")) {
if (trimmed contains "yyyy") {
val fmt = trimmed drop 11
Some(Format(fmt.split(' ')(0)))
}
else if (trimmed contains "yy") {
val fmt = trimmed drop 9
Some(Format(fmt.split(' ')(0)))
}
else None
}
else None
}
def toRyftTimeFormat(format: Format): Format = {
if (format.value contains "S") Format("HH:MM:SS:ss")
else Format("HH:MM:SS")
}
def toRyftDateFormat(format: Format): Format = Format(format.value.toUpperCase)
}
| getryft/spark-ryft-connector | spark-ryft-connector/src/main/scala/com/ryft/spark/connector/util/SparkSqlFormatConverter.scala | Scala | bsd-3-clause | 4,444 |
package org.example.usage
import org.example.declaration.{Random => Random42, X}
import scala.util._
object UsageOtherPackage1_Renamed {
def main(args: Array[String]): Unit = {
println(this.getClass)
println(classOf[Random42])
println(Properties.versionString)
println()
org.example.declaration.UsageSameSourcePackage1.main(args)
org.example.declaration.data.UsageSameTargetPackage1.main(args)
val x: X = ???
}
} | JetBrains/intellij-scala | scala/scala-impl/testdata/moveScala213/moveClass_NameClashesWithOtherNamesImportedFromOtherPackageWithWithWildcard/before/org/example/usage/UsageOtherPackage1_Renamed.scala | Scala | apache-2.0 | 449 |
package org.sgine.ui
import render.ArrayBuffer
import org.powerscala.property.Property
import com.badlogic.gdx.Gdx
import com.badlogic.gdx.graphics.{GL10, Texture}
/**
* ShapeComponent is a base class for Components that need to render vertices and textures to the
* screen.
*
* @author Matt Hicks <[email protected]>
*/
class ShapeComponent extends RenderableComponent {
private val arrayBuffer = new ArrayBuffer(false)
protected[ui] val _vertices = Property[List[Double]]("_vertices", Nil)
protected[ui] val _texture = Property[Texture]("_texture", null)
protected[ui] val _textureCoordinates = Property[List[Double]]("_textureCoordinates", Nil)
private var verticesLength = 0
onUpdate(_vertices, _textureCoordinates) {
arrayBuffer.data = _vertices() ::: _textureCoordinates()
verticesLength = _vertices().length
}
protected def draw() = {
val texture = _texture()
val textureCoordinates = _textureCoordinates()
if (texture != null) {
texture.bind()
} else {
Gdx.gl11.glBindTexture(GL10.GL_TEXTURE_2D, 0)
}
arrayBuffer.bind()
if (!textureCoordinates.isEmpty) {
arrayBuffer.bindTextureCoordinates(verticesLength)
}
arrayBuffer.drawVertices(0, verticesLength / 3)
}
} | Axiometry/sgine | ui/src/main/scala/org/sgine/ui/ShapeComponent.scala | Scala | bsd-3-clause | 1,258 |
package controllers
import java.io.{File, FileOutputStream}
import akka.ConfigurationException
import org.joda.time.{DateTime, DateTimeZone}
import play.api.Play.current
import play.api._
import play.api.libs.json.{JsObject, JsString, JsValue, Json}
import play.api.mvc._
import play.api.Logger
import buildinfo.BuildInfo
import scala.reflect.io.{Directory, Path}
import scala.sys.process._
import scala.util.Random
object Application extends Controller {
val playbooks = Directory(Play.configuration.getString("ansible.playbooks").get)
if (!playbooks.isDirectory) throw new ConfigurationException(s"$playbooks is not a directory")
val ansible = Play.configuration.getString("ansible.command").get
val vaultPassword = Play.configuration.getString("ansible.vault_password").get
val verbose = Play.configuration.getBoolean("ansible.verbose").getOrElse(false)
val passwordFile = createTempVaultPassFile()
val startedAt = DateTime.now.toDateTime(DateTimeZone.UTC)
val ansibleVersion = s"$ansible --version" !!
val random = Random
val inventoryMap = Play.configuration.getConfig("inventoryMap").get
private val defaultInventory = Play.configuration.getString("defaultInventory").get
case class Version(version: String)
implicit val versionFormat = Json.format[Version]
def index = Action {
Ok(views.html.index(playbooks, ansible, ansibleVersion, startedAt))
}
def play: Action[JsValue] = Action(parse.json) { request =>
val buildId = Math.abs(random.nextInt).toString
val refId = escapeJson(request.getQueryString("refId").getOrElse(""))
val branch = request.getQueryString("inventory")
val jsValue: JsValue = Json.parse("""{"message":"must give playbook"}""")
val playbookName = request.getQueryString("playbook")
val inventoryName = branch map (inventoryMap.getString(_).getOrElse(defaultInventory))
val start = DateTime.now().getMillis
val inventory = inventoryName map (playbooks / _)
val playbook = playbookName map (n => playbooks / (n + ".yaml"))
val versionJson = request.body
val version = (versionJson.asOpt[Version] map (_.version))
def resultJson(status: Boolean, message: Option[JsValue]): JsValue = {
val json = JsObject(Seq(
"buildId" β JsString(buildId),
"refId" -> JsString(refId),
"inventory" β JsString(inventoryName.getOrElse("N/A")),
"playbook" β JsString(playbookName.getOrElse("N/A")),
"version" -> JsString(version.getOrElse("N/A")),
"status" β JsString(if (status) "success" else "failed"),
"execTime" β JsString(execTime)
))
val result = message match {
case Some(m) => json + ("message" β message.get)
case None => json
}
JsObject(Seq("result" β result))
}
def execTime: String = s"PT${(DateTime.now.getMillis - start + 500) / 1000}S"
def reportBadRequest(message: JsValue): Option[Result] = {
val json = resultJson(status = false, message = Some(message))
Logger.warn(json.toString)
Some(BadRequest(json))
}
def reportServiceUnavailable(message: JsObject): Option[Result] = {
val json = resultJson(status = false, message = Some(message))
Logger.warn(json.toString)
Some(ServiceUnavailable(json))
}
def checkPath(file: Option[Path], hint: String): Option[Result] = {
file match {
case Some(f) =>
if (f.exists) {
None
} else {
val json = resultJson(status = false, message = Some(JsString(s"File not found: $hint file: $f")))
Logger.warn(json.toString())
Some(NotFound(json))
}
case None =>
val json = resultJson(status = false, message = Some(JsString(s"$hint not set")))
Logger.warn(json.toString)
Some(BadRequest(json))
}
}
def appendLine(builder: StringBuilder, line: String): Unit = {
builder.append(s"$line\\n")
}
def gitPull: Option[Result] = {
val cmd = Seq("git", "-C", playbooks.toString, "pull")
runCommand(cmd) match {
case (0, _) =>
None
case (code, message) =>
reportServiceUnavailable(message)
}
}
def runCommand(cmd: Seq[String]) = {
val stdout = new StringBuilder
val stderr = new StringBuilder
val code = cmd ! ProcessLogger(appendLine(stdout, _), appendLine(stderr, _))
val message = JsObject(Seq(
"stdout" β JsString(stdout.toString()),
"stderr" β JsString(stderr.toString())
))
if (code == 0)
Logger.debug(s"""CmdExecution=ok, code=0, cmd="${cmd.mkString(" ")}"""")
else
Logger.warn(s"""CmdExecution=failed code=$code, cmd="${cmd.mkString(" ")}", message="$message"""")
(code, message)
}
Logger.info(
JsObject(Seq(
"request" β JsObject(Seq(
"buildId" β JsString(buildId),
"refId" β JsString(refId),
"inventory" β JsString(inventoryName.getOrElse("N/A")),
"playbook" β JsString(playbookName.getOrElse("N/A")),
"version" -> JsString(version.getOrElse("N/A")),
"remoteAddress" β JsString(request.remoteAddress)
)))).toString()
)
(checkPath(inventory, "inventory") orElse {
checkPath(playbook, "playbook")
} orElse gitPull orElse {
// Run ansible
val cmdPre = Seq(ansible,
"-i", inventory.getOrElse("N/A").toString,
"-e", versionJson.toString,
"--vault-password-file", passwordFile,
playbook.getOrElse("N/A").toString)
val cmd = if (verbose) {
(cmdPre :+ "-v")
} else {
cmdPre
}
Logger.debug(JsObject(Seq(
"buildId" β JsString(buildId),
"refId" β JsString(refId),
"command" β JsString(cmd.mkString(" "))
)).toString)
version match {
case Some(v) =>
runCommand(cmd) match {
case (0, message) =>
Logger.trace(resultJson(status = true, message = Some(JsString(stdout.toString))).toString)
val json = resultJson(status = true, message = None)
Logger.info(json.toString)
Some(Ok(json))
case (_, message) =>
reportServiceUnavailable(message)
}
case _ => reportBadRequest(JsString("must give version"))
}
}).get
}
def createTempVaultPassFile(): String = {
val passwordFile = File.createTempFile("rocannon-", ".tmp")
passwordFile.deleteOnExit()
val stream = new FileOutputStream(passwordFile)
stream.write(vaultPassword.getBytes)
stream.close()
passwordFile.getCanonicalPath
}
def escapeJson(input: String): String = {
input.replace("\\"", "^").replace("\\'", "^").replace("\\\\", "/").replace("\\n", "\\\\n")
}
def ping = Action {
Ok(Json.obj(
"name" -> JsString(BuildInfo.name),
"version" -> JsString(BuildInfo.version)
)).withHeaders(CACHE_CONTROL -> "no-cache")
}
}
| strawpay/rocannon | app/controllers/Application.scala | Scala | mit | 7,031 |
package wandou.math.random
import java.util.Random
/**
* Discrete, uniformly distributed random sequence. Generates
* values between the specified minimum and maximum values (inclusive).
* @author Daniel Dyer
*/
class DiscreteUniformGenerator(minValue: Int, maxValue: Int, rng: Random) extends NumberGenerator[Int] {
private val range = maxValue - minValue + 1
def nextValue: Int = rng.nextInt(range) + minValue
}
| wandoulabs/wandou-math | wandou-math/src/main/scala/wandou/math/random/DiscreteUniformGenerator.scala | Scala | apache-2.0 | 426 |
package com.ruimo.recoeng
import play.api.Logger
import play.api.libs.json._
import play.api.libs.functional.syntax._
import org.joda.time.DateTime
import java.util.concurrent.atomic.AtomicLong
import com.ruimo.recoeng.json.Desc
import com.ruimo.recoeng.json.SortOrder
import com.ruimo.recoeng.json.ScoredItem
import com.ruimo.recoeng.json.JsonRequestPaging
import com.ruimo.recoeng.json.OnSalesJsonRequest
import com.ruimo.recoeng.json.RecommendByItemJsonRequest
import com.ruimo.recoeng.json.OnSalesJsonResponse
import com.ruimo.recoeng.json.JsonRequestHeader
import com.ruimo.recoeng.json.TransactionMode
import com.ruimo.recoeng.json.TransactionSalesMode
import com.ruimo.recoeng.json.SalesItem
import com.ruimo.recoeng.json.JsonResponseHeader
import com.ruimo.recoeng.json.RecommendByItemJsonResponse
import play.api._
object SequenceNumber {
private val seed = new AtomicLong
def apply(): Long = seed.incrementAndGet
}
trait RecoEngApi {
def onSales(
requestTime: Long = System.currentTimeMillis,
sequenceNumber: Long = SequenceNumber(),
transactionMode: TransactionMode,
transactionTime: Long,
userCode: String,
itemTable: Seq[SalesItem]
): JsResult[OnSalesJsonResponse]
def recommendByItem(
requestTime: Long = System.currentTimeMillis,
sequenceNumber: Long = SequenceNumber(),
salesItems: Seq[SalesItem],
sort: SortOrder = Desc("score"),
paging: JsonRequestPaging
): JsResult[RecommendByItemJsonResponse]
}
class RecoEngApiImpl(
plugin: RecoEngPlugin,
serverFactory: RecoEngPlugin => (String, JsValue) => JsValue =
(p: RecoEngPlugin) => JsonServer.jsServer(p.config)
) extends RecoEngApi {
val logger = Logger(getClass)
def server: (String, JsValue) => JsValue = serverFactory.apply(plugin)
implicit val pagingWrites = Writes[JsonRequestPaging] { p =>
Json.obj(
"offset" -> Json.toJson(p.offset),
"limit" -> Json.toJson(p.limit)
)
}
implicit val requestHeaderWrites = Writes[JsonRequestHeader] { req =>
Json.obj(
"dateTime" -> Json.toJson(req.dateTimeInYyyyMmDdHhMmSs),
"sequenceNumber" -> Json.toJson(req.sequenceNumber)
)
}
implicit val salesItemWrites = Writes[SalesItem] { it =>
Json.obj(
"storeCode" -> Json.toJson(it.storeCode),
"itemCode" -> Json.toJson(it.itemCode),
"quantity" -> Json.toJson(it.quantity)
)
}
implicit val onSalesJsonRequestWrites = Writes[OnSalesJsonRequest] { req =>
Json.obj(
"header" -> Json.toJson(req.header),
"transactionMode" -> Json.toJson(req.mode),
"dateTime" -> Json.toJson(req.tranDateInYyyyMmDdHhMmSs),
"userCode" -> Json.toJson(req.userCode),
"salesItems" -> Json.toJson(req.salesItems)
)
}
implicit val recommendByItemJsonRequestWrites = Writes[RecommendByItemJsonRequest] { req =>
Json.obj(
"header" -> Json.toJson(req.header),
"salesItems" -> Json.toJson(req.salesItems),
"sort" -> Json.toJson(req.sort),
"paging" -> Json.toJson(req.paging)
)
}
implicit val responseHeaderWrites: Writes[JsonResponseHeader] = (
(__ \\ "sequenceNumber").write[String] and
(__ \\ "statusCode").write[String] and
(__ \\ "message").write[String]
)(unlift(JsonResponseHeader.unapply))
implicit val onSalesResponseWrites = Writes[OnSalesJsonResponse] { resp =>
Json.obj("header" -> Json.toJson(resp.header))
}
implicit val responseHeaderReads: Reads[JsonResponseHeader] = (
(JsPath \\ "sequenceNumber").read[String] and
(JsPath \\ "statusCode").read[String] and
(JsPath \\ "message").read[String]
)(JsonResponseHeader.apply _)
implicit val onSalesJsonResponseReads: Reads[OnSalesJsonResponse] =
(JsPath \\ "header").read[JsonResponseHeader] map OnSalesJsonResponse.apply
implicit val scoredItemReads: Reads[ScoredItem] = (
(JsPath \\ "storeCode").read[String] and
(JsPath \\ "itemCode").read[String] and
(JsPath \\ "score").read[Double]
)(ScoredItem.apply _)
implicit val jsonRequestPagingReads: Reads[JsonRequestPaging] = (
(JsPath \\ "offset").read[Int] and
(JsPath \\ "limit").read[Int]
)(JsonRequestPaging.apply _)
implicit val recommendByItemJsonResponseReads: Reads[RecommendByItemJsonResponse] = (
(JsPath \\ "header").read[JsonResponseHeader] and
(JsPath \\ "salesItems").read[Seq[ScoredItem]] and
(JsPath \\ "sort").read[String] and
(JsPath \\ "paging").read[JsonRequestPaging]
)(RecommendByItemJsonResponse.apply _)
def onSales(
requestTime: Long,
sequenceNumber: Long,
transactionMode: TransactionMode,
transactionTime: Long,
userCode: String,
itemTable: Seq[SalesItem]
): JsResult[OnSalesJsonResponse] = {
val req = OnSalesJsonRequest(
header = JsonRequestHeader(
dateTime = new DateTime(requestTime),
sequenceNumber = sequenceNumber.toString
),
mode = TransactionSalesMode.asString,
dateTime = new DateTime(transactionTime),
userCode = userCode,
salesItems = itemTable
)
sendJsonRequest("/onSales", "onSales", Json.toJson(req), _.validate[OnSalesJsonResponse])
}
def recommendByItem(
requestTime: Long,
sequenceNumber: Long,
salesItems: Seq[SalesItem],
sort: SortOrder,
paging: JsonRequestPaging
): JsResult[RecommendByItemJsonResponse] = {
val req = RecommendByItemJsonRequest(
header = JsonRequestHeader(
dateTime = new DateTime(requestTime),
sequenceNumber = sequenceNumber.toString
),
salesItems = salesItems,
sort = sort.toString,
paging = paging
)
sendJsonRequest(
"/recommendByItem",
"recommendByItem", Json.toJson(req), _.validate[RecommendByItemJsonResponse]
)
}
def sendJsonRequest[T](
contextPath: String, apiName: String, jsonRequest: JsValue, resultValidator: JsValue => JsResult[T]
): JsResult[T] = {
val jsonResponse: JsValue = server(contextPath, jsonRequest)
val result: JsResult[T] = resultValidator(jsonResponse)
result match {
case JsError(error) =>
logger.error(
"Sending recommend " + apiName + " request. error: " + error +
", req: " + jsonRequest + ", resp: " + jsonResponse
)
case _ =>
}
result
}
}
class RecoEngPlugin(val app: Application) extends Plugin {
val logger = Logger(getClass)
val config: Option[RecoEngConfig] = RecoEngConfig.get(app.configuration)
val api: RecoEngApi = new RecoEngApiImpl(this)
override def onStart() {
logger.info("RecoEng Plugin started " + config)
}
override def onStop() {
logger.info("RecoEng Plugin stopped...")
}
}
object RecoEngPlugin {
def api(implicit app: Application): RecoEngApi = app.plugin[RecoEngPlugin] match {
case Some(plugin) => plugin.api
case None => throw new Error("No recoeng plugin found in this application>")
}
}
| ruimo/recoengPlugin | app/com/ruimo/recoeng/RecoEngPlugin.scala | Scala | apache-2.0 | 6,876 |
package org.scalawiki.bots
import org.scalawiki.bots.TextLib._
import org.specs2.mutable.Specification
class ReplaceSpec extends Specification {
"parse params" should {
def parse(args: Seq[String]) = Replace.parse(args)
"empty" in {
parse(Nil) === ReplaceConfig()
}
"replacements" in {
parse(Seq("old", "new")).replacements === Map("old" -> "new")
parse(Seq("old1", "new1", "old2", "new2")).replacements === Map("old1" -> "new1", "old2" -> "new2")
}
"replacement and cat" in {
val args = parse(Seq("--cat", "category name", "old", "new"))
args.replacements === Map("old" -> "new")
args.pages.cat === Seq("category name")
}
"replacements and cat" in {
val args = parse(Seq("--cat", "category name", "old1", "new1", "old2", "new2"))
args.pages.cat === Seq("category name")
args.replacements === Map("old1" -> "new1", "old2" -> "new2")
}
"regex" in {
parse(Seq("--regex")).regex === true
parse(Seq()).regex === false
}
}
"replaceExcept" should {
"no replace" in {
replaceExcept("12345678", "x", "y") === "12345678"
}
"simple replace" in {
replaceExcept("AxB", "x", "y") === "AyB"
replaceExcept("AxxB", "x", "y") === "AyyB"
replaceExcept("AxyxB", "x", "y") === "AyyyB"
}
"regex replace" in {
replaceExcept("A123B", "\\\\d", "x") === "AxxxB"
replaceExcept("A123B", "\\\\d+", "x") === "AxB"
replaceExcept("A123B", "A(\\\\d)2(\\\\d)B", "A$1x$2B") === "A1x3B"
replaceExcept("", "(a?)", "$1B") === "B"
replaceExcept("abc", "x*", "-") === "-a-b-c-"
replaceExcept("", "(a)?", "$1$1") === ""
replaceExcept("A123B", "A(?<a>\\\\d)2(?<b>\\\\d)B", "A${a}x${b}B") === "A1x3B"
replaceExcept("A123B", "A(?<a>\\\\d)2(\\\\d)B", "A${a}x$2B") === "A1x3B"
// test regex with lookbehind.
replaceExcept("A behindB C", "(?<=behind)\\\\w", "Z") === "A behindZ C"
// test regex with lookbehind and groups.
replaceExcept("A behindB C D", "(?<=behind)\\\\w( )", "$1Z") === "A behind ZC D"
// test regex with lookahead.
replaceExcept("A Bahead C", "\\\\w(?=ahead)", "Z") === "A Zahead C"
// test regex with lookahead and groups.
replaceExcept("A Bahead C D", "( )\\\\w(?=ahead)", "Z$1") === "AZ ahead C D"
}
"case sensitivity" in {
replaceExcept("AxB", "x", "y", ignoreCase = false) === "AyB"
replaceExcept("AxB", "X", "y", ignoreCase = false) === "AxB"
replaceExcept("AxB", "x", "y", ignoreCase = true) === "AyB"
replaceExcept("AxB", "X", "y", ignoreCase = true) === "AyB"
}
"replace with marker" in {
replaceExcept("AxyxB", "x", "y", marker = Some(".")) === "Ayyy.B"
replaceExcept("AxyxB", "1", "y", marker = Some(".")) === "AxyxB."
}
"overlapping replace" in {
replaceExcept("1111", "11", "21") === "2121"
// allow overlap not supported
//replaceExcept("1111", "11", "21") === "2221"
}
"replacing not inside a specific regex" in {
replaceExcept("123x123", "123", "000") === "000x000"
replaceExcept("123x123", "123", "000", exceptions = Seq("\\\\w123".r)) === "000x123"
}
}
} | intracer/scalawiki | scalawiki-bots/src/test/scala/org/scalawiki/bots/ReplaceSpec.scala | Scala | apache-2.0 | 3,200 |
/*
* MOIS: Variable Types
* Copyright (C) 2014 University of Edinburgh School of Informatics
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package uk.ac.ed.inf.mois
import spire.algebra.{Rig, Ring, Field}
abstract class ImmutableVar[T] extends Var[T] {
def update(value: T) {}
}
class RigVarIsRig[T: Rig] extends Rig[Var[T]] {
def zero = new ImmutableVar[T] {
val meta = VarMeta("", Rig[T])
def value = Rig[T].zero
}
def one = new ImmutableVar[T] {
val meta = VarMeta("", Rig[T])
def value = Rig[T].one
}
def plus(x: Var[T], y: Var[T]) = new ImmutableVar[T] {
val meta = VarMeta("", Rig[T])
def value = Rig[T].plus(x.value, y.value)
}
def times(x: Var[T], y: Var[T]) = new ImmutableVar[T] {
val meta = VarMeta("", Rig[T])
def value = Rig[T].times(x.value, y.value)
}
}
class RingVarIsRing[T: Ring] extends RigVarIsRig[T] with Ring[Var[T]] {
def negate(x: Var[T]) = new ImmutableVar[T] {
val meta = VarMeta("", Ring[T])
def value = Ring[T].negate(x.value)
}
}
class FieldVarIsField[T: Field] extends RingVarIsRing[T] with Field[Var[T]] {
def gcd(a: Var[T], b: Var[T]) = new ImmutableVar[T] {
val meta = VarMeta("", Field[T])
def value = Field[T].gcd(a.value, b.value)
}
def mod(a: Var[T], b: Var[T]) = new ImmutableVar[T] {
val meta = VarMeta("", Field[T])
def value = Field[T].mod(a.value, b.value)
}
def quot(a: Var[T], b: Var[T]) = new ImmutableVar[T] {
val meta = VarMeta("", Field[T])
def value = Field[T].quot(a.value, b.value)
}
def div(a: Var[T], b: Var[T]) = new ImmutableVar[T] {
val meta = VarMeta("", Field[T])
def value = Field[T].div(a.value, b.value)
}
}
| edinburgh-rbm/mois | src/main/scala/uk/ac/ed/inf/mois/Types.scala | Scala | gpl-3.0 | 2,306 |
package com.mesosphere.cosmos.model
import com.mesosphere.universe.PackageDetailsVersion
case class InstallResponse(
packageName: String,
packageVersion: PackageDetailsVersion,
appId: AppId
)
| movicha/cosmos | cosmos-model/src/main/scala/com/mesosphere/cosmos/model/InstallResponse.scala | Scala | apache-2.0 | 200 |
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.express.flow
import com.google.common.base.Objects
import org.kiji.annotations.ApiAudience
import org.kiji.annotations.ApiStability
import org.kiji.annotations.Inheritance
import org.kiji.schema.KijiColumnName
import org.kiji.schema.KijiInvalidNameException
/**
* A request for data from a Kiji table. Provides access to options common to all types of column
* input specs. There are two types of column input specs:
* <ul>
* <li>
* [[org.kiji.express.flow.QualifiedColumnInputSpec]] - Requests versions of cells from an
* fully-qualified column.
* </li>
* <li>
* [[org.kiji.express.flow.ColumnFamilyInputSpec]] - Requests versions of cells from columns in
* a column family.
* </li>
* </ul>
*
* Requested data will be represented as a sequence of flow cells (`Seq[FlowCell[T] ]`).
*
* To see more information about reading data from a Kiji table, see
* [[org.kiji.express.flow.KijiInput]].
*
* Note: Subclasses of `ColumnInputSpec` are case classes that override its abstract methods
* (e.g., `schemaSpec`) with `val`s.
*/
@ApiAudience.Public
@ApiStability.Stable
sealed trait ColumnInputSpec {
/**
* Maximum number of cells to retrieve starting from the most recent cell. By default, only the
* most recent cell is retrieved.
*
* @return the maximum number of cells to retrieve.
*/
def maxVersions: Int
/**
* Filter that a cell must pass in order to be retrieved. If NoFilterSpec, no filter is used.
*
* @return the column filter specification
*/
def filterSpec: ColumnFilterSpec
/**
* Specifies the maximum number of cells to maintain in memory when paging through a column.
*
* @return the paging specification for this column.
*/
def pagingSpec: PagingSpec
/**
* Specifies the schema that should be applied to the requested data.
*
* @return the schema that should be used for reading.
*/
def schemaSpec: SchemaSpec
/**
* The [[org.kiji.schema.KijiColumnName]] of the requested data.
*
* @return the column name of the requested data.
*/
def columnName: KijiColumnName
}
/**
* Provides convenience factory methods for creating [[org.kiji.express.flow.ColumnInputSpec]]
* instances.
*/
@ApiAudience.Public
@ApiStability.Stable
@Inheritance.Sealed
object ColumnInputSpec {
/** Constants for default parameters. */
val DEFAULT_MAX_VERSIONS = latest
val DEFAULT_PAGING_SPEC = PagingSpec.Off
val DEFAULT_SCHEMA_SPEC = SchemaSpec.Writer
val DEFAULT_COLUMN_FILTER_SPEC = ColumnFilterSpec.NoFilter
/**
* A request for data from a Kiji table column. The input spec will be for a qualified column if
* the column parameter contains a ':', otherwise the input will assumed to be for a column family
* (column family names cannot contain ';' characters).
*
* @param column name of the requested data.
* @param maxVersions to read back from the requested column (default is only most recent).
* @param filterSpec to use when reading back cells (default is NoColumnFilterSpec).
* @param pagingSpec options specifying the maximum number of cells to retrieve from Kiji
* per page.
* @param schemaSpec specifies the schema to use when reading cells. Defaults to
* [[org.kiji.express.flow.SchemaSpec.Writer]].
* @return a new column input spec with supplied options.
*/
private[express] def apply(
column: String,
maxVersions: Int = DEFAULT_MAX_VERSIONS,
filterSpec: ColumnFilterSpec = DEFAULT_COLUMN_FILTER_SPEC,
pagingSpec: PagingSpec = DEFAULT_PAGING_SPEC,
schemaSpec: SchemaSpec = DEFAULT_SCHEMA_SPEC
): ColumnInputSpec = {
column.split(':') match {
case Array(family, qualifier) =>
QualifiedColumnInputSpec(
family,
qualifier,
maxVersions,
filterSpec,
pagingSpec,
schemaSpec
)
case Array(family) =>
ColumnFamilyInputSpec(
family,
maxVersions,
filterSpec,
pagingSpec,
schemaSpec
)
case _ => throw new IllegalArgumentException("column name must contain 'family:qualifier'" +
" for a group-type, or 'family' for a map-type column.")
}
}
}
/**
* Specifies a request for versions of cells from a fully-qualified column.
*
* Basic example that reads data into generic records using the schema with which they were written:
* {{{
* // Request the latest version of data stored in the "info:name" column.
* val myColumnSpec: QualifiedColumnInputSpec =
* QualifiedColumnInputSpec.builder
* .withColumn("info", "name")
* .withMaxVersions(1)
* .build
* }}}
*
* Paging can be enabled on a column input specification causing blocks of cells to be retrieved
* from Kiji at a time:
* {{{
* // Request cells from the "info:status" column retrieving 1000 cells per block.
* val myPagedColumn: QualifiedColumnInputSpec =
* QualifiedColumnInputSpec.builder
* .withColumn("info", "status")
* .withMaxVersions(flow.all)
* .withPagingSpec(PagingSpec.Cells(1000))
* .build
* }}}
*
* If compiled avro classes are being used, a compiled record class can be specified. Data read from
* this column will be of the specified type:
* {{{
* // Request cells from the "info:user" column containing User records.
* val myColumnSpec: QualifiedColumnInputSpec =
* QualifiedColumnInputSpec.builder
* .withColumn("info", "user")
* .withMaxVersions(1)
* .withSchemaSpec(SchemaSpec.Specific(classOf[User]))
* .build
* }}}
*
* Note: Columns containing no values will be replaced with an empty sequence unless all requested
* columns are empty in which case the entire row will be skipped.
*
* To see more information about reading data from a Kiji table, see
* [[org.kiji.express.flow.KijiInput]].
*
* @param family of columns the requested data belongs to.
* @param qualifier of the column the requested data belongs to.
* @param maxVersions to read back from the requested column (default is only most recent).
* @param filterSpec to use when reading back cells (default is NoColumnFilterSpec).
* @param pagingSpec options specifying the maximum number of cells to retrieve from Kiji per page.
* @param schemaSpec specifies the schema to use when reading cells. Defaults to
* [[org.kiji.express.flow.SchemaSpec.Writer]].
*/
@ApiAudience.Public
@ApiStability.Stable
final class QualifiedColumnInputSpec private(
val family: String,
val qualifier: String,
val maxVersions: Int = ColumnInputSpec.DEFAULT_MAX_VERSIONS,
val filterSpec: ColumnFilterSpec = ColumnInputSpec.DEFAULT_COLUMN_FILTER_SPEC,
val pagingSpec: PagingSpec = ColumnInputSpec.DEFAULT_PAGING_SPEC,
val schemaSpec: SchemaSpec = ColumnInputSpec.DEFAULT_SCHEMA_SPEC
) extends ColumnInputSpec with Serializable {
override def columnName: KijiColumnName = new KijiColumnName(family, qualifier)
override def toString: String = Objects.toStringHelper(classOf[QualifiedColumnInputSpec])
.add("family", family)
.add("qualifier", qualifier)
.add("max_versions", maxVersions)
.add("filter_spec", filterSpec)
.add("paging_spec", pagingSpec)
.add("schema_spec", schemaSpec)
.toString
override def hashCode: Int =
Objects.hashCode(
family,
qualifier,
maxVersions: java.lang.Integer,
filterSpec,
pagingSpec,
schemaSpec)
override def equals(obj: Any): Boolean = obj match {
case other: QualifiedColumnInputSpec => {
family == other.family &&
qualifier == other.qualifier &&
maxVersions == other.maxVersions &&
filterSpec == other.filterSpec &&
pagingSpec == other.pagingSpec &&
schemaSpec == other.schemaSpec
}
case _ => false
}
}
/**
* Provides factory functions for creating [[org.kiji.express.flow.QualifiedColumnInputSpec]]
* instances.
*/
@ApiAudience.Public
@ApiStability.Stable
object QualifiedColumnInputSpec {
/**
* Convenience function for creating a [[org.kiji.express.flow.QualifiedColumnInputSpec]] with
* a generic Avro type specified by a [[org.apache.avro.Schema]].
*
* @param family of columns the requested data belongs to.
* @param qualifier of the column the requested data belongs to.
* @param maxVersions to read back from the requested column (default is only most recent).
* @param filterSpec to use when reading back cells (default is NoColumnFilterSpec).
* @param pagingSpec options specifying the maximum number of cells to retrieve
* from Kiji per page.
* @param schemaSpec specification with which to read data.
* @return a new column input spec with supplied options.
*/
private[express] def apply(
family: String,
qualifier: String,
maxVersions: Int = ColumnInputSpec.DEFAULT_MAX_VERSIONS,
filterSpec: ColumnFilterSpec = ColumnInputSpec.DEFAULT_COLUMN_FILTER_SPEC,
pagingSpec: PagingSpec = ColumnInputSpec.DEFAULT_PAGING_SPEC,
schemaSpec: SchemaSpec = ColumnInputSpec.DEFAULT_SCHEMA_SPEC
): QualifiedColumnInputSpec = {
new QualifiedColumnInputSpec(
family,
qualifier,
maxVersions,
filterSpec,
pagingSpec,
schemaSpec)
}
/**
* Decompose the given object into its constituent parts if it is an instance of
* QualifiedColumnInputSpec.
*
* @param target object to decompose if it is a QualifiedColumnInputSpec.
* @return the fields used to construct the target.
* (family, qualifier, maxVersions, filterSpec, pagingSpec, schemaSpec)
*/
private[express] def unapply(
target: Any
): Option[(
String,
String,
Int,
ColumnFilterSpec,
PagingSpec,
SchemaSpec)] = PartialFunction.condOpt(target) {
case qcis: QualifiedColumnInputSpec => (
qcis.family,
qcis.qualifier,
qcis.maxVersions,
qcis.filterSpec,
qcis.pagingSpec,
qcis.schemaSpec)
}
/**
* A request for data from a fully qualified Kiji table column.
* This construct method is used by Java builders for ColumnInputSpec.
* Scala users ought to use the Builder APIs.
*
* @param column is the fully qualified column name of the requested data.
* @param maxVersions to read back from the requested column (default is only most recent).
* @param filterSpec to use when reading back cells. Defaults to
* [[org.kiji.express.flow.ColumnFilterSpec.NoFilter]].
* @param pagingSpec options specifying the maximum number of cells to retrieve from Kiji
* per page. Defaults to [[org.kiji.express.flow.PagingSpec.Off]].
* @param schemaSpec specifies the schema to use when reading cells. Defaults to
* [[org.kiji.express.flow.SchemaSpec.Writer]].
* @return a new column input spec with supplied options.
*/
private[express] def construct(
column: KijiColumnName,
maxVersions: java.lang.Integer,
filterSpec: ColumnFilterSpec,
pagingSpec: PagingSpec,
schemaSpec: SchemaSpec
): QualifiedColumnInputSpec = {
// Construct QualifiedColumnInputSpec
new QualifiedColumnInputSpec(
column.getFamily(),
column.getQualifier(),
Option(maxVersions) match {
case None => ColumnInputSpec.DEFAULT_MAX_VERSIONS
case _ => maxVersions
},
Option(filterSpec).getOrElse(ColumnInputSpec.DEFAULT_COLUMN_FILTER_SPEC),
Option(pagingSpec).getOrElse(ColumnInputSpec.DEFAULT_PAGING_SPEC),
Option(schemaSpec).getOrElse(ColumnInputSpec.DEFAULT_SCHEMA_SPEC)
)
}
/**
* Create a new QualifiedColumnInputSpec.Builder.
*
* @return a new QualifiedColumnInputSpec.Builder.
*/
def builder: Builder = Builder()
/**
* Create a new QualifiedColumnInputSpec.Builder as a copy of the given Builder.
*
* @param other Builder to copy.
* @return a new QualifiedColumnInputSpec.Builder as a copy of the given Builder.
*/
def builder(other: Builder): Builder = Builder(other)
/**
* Builder for QualifiedColumnInputSpec.
*
* @param mFamily optional family with which to initialize this builder.
* @param mQualifier optional qualifier with which to initialize this builder.
* @param mMaxVersions optional maxVersions with which to initialize this builder.
* @param mFilterSpec optional FilterSpec with which to initialize this builder.
* @param mPagingSpec optional PagingSpec with which to initialize this builder.
* @param mSchemaSpec optional SchemaSpec with which to initialize this builder.
*/
@ApiAudience.Public
@ApiStability.Stable
final class Builder private(
private[this] var mFamily: Option[String],
private[this] var mQualifier: Option[String],
private[this] var mMaxVersions: Option[Int],
private[this] var mFilterSpec: Option[ColumnFilterSpec],
private[this] var mPagingSpec: Option[PagingSpec],
private[this] var mSchemaSpec: Option[SchemaSpec]
) {
/** protects read and write access to private var fields. */
private val monitor = new AnyRef
/**
* Configure the input spec to read the given Kiji column.
*
* @param column into which to read the values
* @return this builder.
*/
def withColumn(column: KijiColumnName): Builder = monitor.synchronized {
require(column != null, "Input column may not be null.")
require(column.isFullyQualified, "Input column must be fully qualified.")
require(mFamily.isEmpty, "Input column family already set to: " + mFamily.get)
require(mQualifier.isEmpty, "Input column qualifier already set to: " + mQualifier.get)
mFamily = Some(column.getFamily)
mQualifier = Some(column.getQualifier)
this
}
/**
* Configure the input spec to read the given Kiji column.
*
* @param family of the column from which to read.
* @param qualifier of the column from which to read.
* @return this builder.
*/
def withColumn(family: String, qualifier: String): Builder = monitor.synchronized {
require(family != null, "Input column family may not be null.")
require(qualifier != null, "Input column qualifier may not be null.")
require(mFamily.isEmpty, "Input column family already set to: " + mFamily.get)
require(mQualifier.isEmpty, "Input column qualifier already set to: " + mQualifier.get)
mFamily = Some(family)
mQualifier = Some(qualifier)
this
}
/**
* Configure the input spec to read from the given Kiji column family. Must also call
* [[org.kiji.express.flow.QualifiedColumnInputSpec.Builder.withQualifier()]] before calling
* [[org.kiji.express.flow.QualifiedColumnInputSpec.Builder.build]].
*
* @param family of the column from which to read.
* @return this builder.
*/
def withFamily(family: String): Builder = monitor.synchronized {
require(family != null, "Input column family may not be null.")
require(mFamily.isEmpty, "Input column family already set to: " + mFamily.get)
mFamily = Some(family)
this
}
/**
* Configure the input spec to read from the given Kiji column qualifier. Must also call
* [[org.kiji.express.flow.QualifiedColumnInputSpec.Builder.withFamily()]] before calling
* [[org.kiji.express.flow.QualifiedColumnInputSpec.Builder.build]].
*
* @param qualifier of the column from which to read.
* @return this builder.
*/
def withQualifier(qualifier: String): Builder = monitor.synchronized {
require(qualifier != null, "Input column qualifier may not be null.")
require(mQualifier.isEmpty, "Input column qualifier already set to: " + mQualifier.get)
mQualifier = Some(qualifier)
this
}
/**
* Name of the Kiji column family from which to read.
*
* @return the name of the Kiji column family from which to read.
*/
def family: Option[String] = monitor.synchronized(mFamily)
/**
* Name of the Kiji column qualifier from which to read.
*
* @return the name of the Kiji column qualifier from which to read.
*/
def qualifier: Option[String] = monitor.synchronized(mQualifier)
/**
* Configure the input spec to read the specified maximum versions.
*
* @param maxVersions to read back from the requested column (default is only most recent).
* @return this builder.
*/
def withMaxVersions(maxVersions: Int): Builder = monitor.synchronized {
require(mMaxVersions.isEmpty, "Max versions already set to: " + mMaxVersions.get)
require(0 < maxVersions, "Max versions must be strictly positive, instead got " + maxVersions)
mMaxVersions = Some(maxVersions)
this
}
/**
* The maximum number of versions requested for reading.
*
* @return the maximum versions to read back from requested column.
*/
def maxVersions: Option[Int] = monitor.synchronized(mMaxVersions)
/**
* Configure the input spec to read using the given FilterSpec.
*
* @param filterSpec defining the filter which will be used to read this column.
* @return this builder.
*/
def withFilterSpec(filterSpec: ColumnFilterSpec): Builder = monitor.synchronized {
require(filterSpec != null, "Filter spec may not be null.")
require(mFilterSpec.isEmpty, "Filter spec already set to: " + mFilterSpec.get)
mFilterSpec = Some(filterSpec)
this
}
/**
* Specification of the filter to use when reading this column.
*
* @return a specification of the filter to use when reading this column.
*/
def filterSpec: Option[ColumnFilterSpec] = monitor.synchronized(mFilterSpec)
/**
* Configure the input spec to page the read data according to the given specification.
*
* @param pagingSpec options specifying the maximum number of cells to retrieve from Kiji.
* @return this builder.
*/
def withPagingSpec(pagingSpec: PagingSpec): Builder = monitor.synchronized {
require(pagingSpec != null, "Paging spec may not be null.")
require(mPagingSpec.isEmpty, "Paging spec already set to: " + mPagingSpec.get)
mPagingSpec = Some(pagingSpec)
this
}
/**
* Paging specification containing the maximum number of cells to retrieve from Kiji.
*
* @return paging specification containing the maximum number of cells to retrieve from Kiji.
*/
def pagingSpec: Option[PagingSpec] = monitor.synchronized(mPagingSpec)
/**
* Configure the input spec to read using the given SchemaSpec.
*
* @param schemaSpec defining the Schema which will be used to read this column.
* @return this builder.
*/
def withSchemaSpec(schemaSpec: SchemaSpec): Builder = monitor.synchronized {
require(schemaSpec != null, "Schema spec may not be null.")
require(mSchemaSpec.isEmpty, "Schema spec already set to: " + mSchemaSpec.get)
mSchemaSpec = Some(schemaSpec)
this
}
/**
* Specification of the Schema to use when reading this column.
*
* @return a specification of the Schema to use when reading this column.
*/
def schemaSpec: Option[SchemaSpec] = monitor.synchronized(mSchemaSpec)
/**
* Build a new QualifiedColumnInputSpec from the values stored in this builder.
*
* @throws IllegalStateException if the builder is not in a valid state to be built.
* @return a new QualifiedColumnInputSpec from the values stored in this builder.
*/
def build: QualifiedColumnInputSpec = monitor.synchronized {
new QualifiedColumnInputSpec(
mFamily.getOrElse(
throw new IllegalStateException("Input column family must be specified.")),
mQualifier.getOrElse(
throw new IllegalStateException("Input column qualifier must be specified.")),
mMaxVersions.getOrElse(ColumnInputSpec.DEFAULT_MAX_VERSIONS),
mFilterSpec.getOrElse(ColumnInputSpec.DEFAULT_COLUMN_FILTER_SPEC),
mPagingSpec.getOrElse(ColumnInputSpec.DEFAULT_PAGING_SPEC),
mSchemaSpec.getOrElse(ColumnInputSpec.DEFAULT_SCHEMA_SPEC)
)
}
override def toString: String = monitor.synchronized {
Objects.toStringHelper(classOf[Builder])
.add("family", mFamily)
.add("qualifier", mQualifier)
.add("max_versions", mMaxVersions)
.add("filter_spec", mFilterSpec)
.add("paging_spec", mPagingSpec)
.add("schema_spec", mSchemaSpec)
.toString
}
}
/**
* Companion object providing factory methods for creating new instances of
* [[org.kiji.express.flow.QualifiedColumnInputSpec.Builder]].
*/
@ApiAudience.Public
@ApiStability.Stable
object Builder {
/**
* Create a new empty QualifiedColumnInputSpec.Builder.
*
* @return a new empty QualifiedColumnInputSpec.Builder.
*/
private[express] def apply(): Builder = new Builder(None, None, None, None, None, None)
/**
* Create a new QualifiedColumnInputSpec.Builder as a copy of the given Builder.
*
* @param other Builder to copy.
* @return a new QualifiedColumnInputSpec.Builder as a copy of the given Builder.
*/
private[express] def apply(other: Builder): Builder = other.monitor.synchronized {
// synchronize to get a consistent snapshot of other
new Builder(other.family,
other.qualifier,
other.maxVersions,
other.filterSpec,
other.pagingSpec,
other.schemaSpec)
}
}
}
/**
* Specifies a request for versions of cells from a column family.
*
* Basic column family example:
* {{{
* // Request the latest version of data stored in the "matrix" column family.
* val myColumnFamilySpec: ColumnFamilyInputSpec =
* ColumnFamilyInputSpec.builder
* .withFamily("matrix")
* .withMaxVersions(1)
* .build
* }}}
*
* Filters can be applied to the column qualifier of cells in a column family.
* {{{
* // Request cells from the "hits" column that are from columns with qualifiers that begin with
* // the string "http://www.wibidata.com/".
* val myFilteredColumnSpec: ColumnFamilyInputSpec =
* ColumnFamilyInputSpec.builder
* .withFamily("hits")
* .withMaxVersions(flow.all)
* .withFilterSpec(RegexQualifierFilterSpec("http://www\.wibidata\.com/.*")
* .build
* }}}
*
* Paging can be enabled on a column input specification causing blocks of cells to be retrieved
* from Kiji at a time:
* {{{
* // Request cells from the "metadata" column family retrieving 1000 cells per block.
* val myPagedColumn: ColumnFamilyInputSpec =
* ColumnFamilyInputSpec.builder
* .withFamily("metadata")
* .withMaxVersions(flow.all)
* .withPagingSpec(PagingSpec.Cells(1000))
* .build
* }}}
*
* If compiled avro classes are being used, a class that data should be read as can be specified:
* {{{
* // Request cells from the "users" column family containing User records.
* val myColumnSpec: ColumnFamilyInputSpec =
* ColumnFamilyInputSpec.builder
* .withFamily("users")
* .withMaxVersions(1)
* .withSchemaSpec(SchemaSpec.Specific(classOf[User]))
* .build
* }}}
*
* To see more information about reading data from a Kiji table, see
* [[org.kiji.express.flow.KijiInput]].
*
* @param family of columns the requested data belongs to.
* @param maxVersions to read back from the requested column family (default is only most recent).
* @param filterSpec to use when reading back cells (default is NoColumnFilterSpec).
* @param pagingSpec options specifying the maximum number of cells to retrieve from Kiji per page.
* @param schemaSpec specifies the schema to use when reading cells. Defaults to
* [[org.kiji.express.flow.SchemaSpec.Writer]].
*/
@ApiAudience.Public
@ApiStability.Stable
final class ColumnFamilyInputSpec private(
val family: String,
val maxVersions: Int = ColumnInputSpec.DEFAULT_MAX_VERSIONS,
val filterSpec: ColumnFilterSpec = ColumnInputSpec.DEFAULT_COLUMN_FILTER_SPEC,
val pagingSpec: PagingSpec = ColumnInputSpec.DEFAULT_PAGING_SPEC,
val schemaSpec: SchemaSpec = ColumnInputSpec.DEFAULT_SCHEMA_SPEC
) extends ColumnInputSpec with Serializable {
if (family.contains(':')) {
throw new KijiInvalidNameException("Cannot have a ':' in family name for column family request")
}
override def columnName: KijiColumnName = new KijiColumnName(family)
override def toString: String = Objects.toStringHelper(classOf[ColumnFamilyInputSpec])
.add("family", family)
.add("max_versions", maxVersions)
.add("filter_spec", filterSpec)
.add("paging_spec", pagingSpec)
.add("schema_spec", schemaSpec)
.toString
override def hashCode: Int =
Objects.hashCode(
family,
maxVersions: java.lang.Integer,
filterSpec,
pagingSpec,
schemaSpec)
override def equals(obj: Any): Boolean = obj match {
case other: ColumnFamilyInputSpec => {
family == other.family &&
maxVersions == other.maxVersions &&
filterSpec == other.filterSpec &&
pagingSpec == other.pagingSpec &&
schemaSpec == other.schemaSpec
}
case _ => false
}
}
/**
* Provides factory functions for creating [[org.kiji.express.flow.ColumnFamilyInputSpec]]
* instances.
*/
@ApiAudience.Public
@ApiStability.Stable
object ColumnFamilyInputSpec {
/**
* Create a new ColumnFamilyInputSpec from the given parameters.
*
* @param family of columns the requested data belongs to.
* @param maxVersions to read back from the requested column (default is only most recent).
* @param filterSpec to use when reading back cells (default is NoColumnFilterSpec).
* @param pagingSpec options specifying the maximum number of cells to retrieve
* from Kiji per page.
* @param schemaSpec specification with which to read data.
* @return a new column input spec with supplied options.
*/
private[express] def apply(
family: String,
maxVersions: Int = ColumnInputSpec.DEFAULT_MAX_VERSIONS,
filterSpec: ColumnFilterSpec = ColumnInputSpec.DEFAULT_COLUMN_FILTER_SPEC,
pagingSpec: PagingSpec = ColumnInputSpec.DEFAULT_PAGING_SPEC,
schemaSpec: SchemaSpec = ColumnInputSpec.DEFAULT_SCHEMA_SPEC
): ColumnFamilyInputSpec = {
new ColumnFamilyInputSpec(
family,
maxVersions,
filterSpec,
pagingSpec,
schemaSpec)
}
/**
* Decompose the given object into its constituent parts if it is an instance of
* ColumnFamilyInputSpec.
*
* @param target object to decompose if it is a ColumnFamilyInputSpec.
* @return the fields used to construct the target.
* (family, maxVersions, filterSpec, pagingSpec, schemaSpec)
*/
private[express] def unapply(
target: Any
): Option[(
String,
Int,
ColumnFilterSpec,
PagingSpec,
SchemaSpec)] = PartialFunction.condOpt(target) {
case qcis: ColumnFamilyInputSpec => (
qcis.family,
qcis.maxVersions,
qcis.filterSpec,
qcis.pagingSpec,
qcis.schemaSpec)
}
/**
* A request for data from a Kiji table column family.
* This construct method is used by Java builders for ColumnInputSpec.
* Scala users ought to use the Builder APIs.
*
* @param column family name of the requested data.
* @param maxVersions to read back from the requested column (default is only most recent).
* @param filterSpec to use when reading back cells (default is NoColumnFilterSpec).
* @param pagingSpec options specifying the maximum number of cells to retrieve from Kiji per
* page. Defaults to [[org.kiji.express.flow.PagingSpec.Off]].
* @param schemaSpec specifies the schema to use when reading cells. Defaults to
* [[org.kiji.express.flow.SchemaSpec.Writer]].
* @return a new column input spec with supplied options.
*/
private[express] def construct(
column: KijiColumnName,
maxVersions: java.lang.Integer,
filterSpec: ColumnFilterSpec,
pagingSpec: PagingSpec,
schemaSpec: SchemaSpec
): ColumnFamilyInputSpec = {
// Construct ColumnFamilyInputSpec
ColumnFamilyInputSpec(
column.getFamily(),
Option(maxVersions) match {
case None => ColumnInputSpec.DEFAULT_MAX_VERSIONS
case _ => maxVersions
},
Option(filterSpec).getOrElse(ColumnInputSpec.DEFAULT_COLUMN_FILTER_SPEC),
Option(pagingSpec).getOrElse(ColumnInputSpec.DEFAULT_PAGING_SPEC),
Option(schemaSpec).getOrElse(ColumnInputSpec.DEFAULT_SCHEMA_SPEC)
)
}
/**
* Create a new ColumnFamilyInputSpec.Builder.
*
* @return a new ColumnFamilyInputSpec.Builder.
*/
def builder: Builder = Builder()
/**
* Create a new ColumnFamilyInputSpec.Builder as a copy of the given Builder.
*
* @param other Builder to copy.
* @return a new ColumnFamilyInputSpec.Builder as a copy of the given Builder.
*/
def builder(other: Builder): Builder = Builder(other)
/**
* Builder for ColumnFamilyInputSpec.
*
* @param mFamily optional family with which to initialize this builder.
* @param mMaxVersions optional maxVersions with which to initialize this builder.
* @param mFilterSpec optional FilterSpec with which to initialize this builder.
* @param mPagingSpec optional PagingSpec with which to initialize this builder.
* @param mSchemaSpec optional SchemaSpec with which to initialize this builder.
*/
@ApiAudience.Public
@ApiStability.Stable
final class Builder private(
private[this] var mFamily: Option[String],
private[this] var mMaxVersions: Option[Int],
private[this] var mFilterSpec: Option[ColumnFilterSpec],
private[this] var mPagingSpec: Option[PagingSpec],
private[this] var mSchemaSpec: Option[SchemaSpec]
) {
/** protects read and write access to private var fields. */
private val monitor = new AnyRef
/**
* Configure the input spec to read the given Kiji column family.
*
* @param column family into which to read the values
* @return this builder.
*/
def withColumn(column: KijiColumnName): Builder = monitor.synchronized {
require(column != null, "Input column may not be null.")
require(!column.isFullyQualified, "Input column may not be fully qualified.")
require(mFamily.isEmpty, "Input column already set to: " + mFamily.get)
mFamily = Some(column.getFamily)
this
}
/**
* Configure the input spec to read from the given Kiji column family.
*
* @param family of the column from which to read.
* @return this builder.
*/
def withFamily(family: String): Builder = monitor.synchronized {
require(family != null, "Input column family may not be null.")
require(mFamily.isEmpty, "Input column family already set to: " + mFamily.get)
mFamily = Some(family)
this
}
/**
* Name of the Kiji column family from which to read.
*
* @return the name of the Kiji column family from which to read.
*/
def family: Option[String] = monitor.synchronized(mFamily)
/**
* Configure the input spec to read the specified maximum versions.
*
* @param maxVersions to read back from the requested column (default is only most recent).
* @return this builder.
*/
def withMaxVersions(maxVersions: Int): Builder = monitor.synchronized {
require(mMaxVersions.isEmpty, "Max versions already set to: " + mMaxVersions.get)
require(0 < maxVersions, "Max versions must be strictly positive, instead got " + maxVersions)
mMaxVersions = Some(maxVersions)
this
}
/**
* The maximum number of versions requested for reading.
*
* @return the maximum versions to read back from requested column.
*/
def maxVersions: Option[Int] = monitor.synchronized(mMaxVersions)
/**
* Configure the input spec to read using the given FilterSpec.
*
* @param filterSpec defining the filter which will be used to read this column.
* @return this builder.
*/
def withFilterSpec(filterSpec: ColumnFilterSpec): Builder = monitor.synchronized {
require(filterSpec != null, "Filter spec may not be null.")
require(mFilterSpec.isEmpty, "Filter spec already set to: " + mFilterSpec.get)
mFilterSpec = Some(filterSpec)
this
}
/**
* Specification of the filter to use when reading this column.
*
* @return a specification of the filter to use when reading this column.
*/
def filterSpec: Option[ColumnFilterSpec] = monitor.synchronized(mFilterSpec)
/**
* Configure the input spec to page the read data according to the given specification.
*
* @param pagingSpec options specifying the maximum number of cells to retrieve from Kiji.
* @return this builder.
*/
def withPagingSpec(pagingSpec: PagingSpec): Builder = monitor.synchronized {
require(pagingSpec != null, "Paging spec may not be null.")
require(mPagingSpec.isEmpty, "Paging spec already set to: " + mPagingSpec.get)
mPagingSpec = Some(pagingSpec)
this
}
/**
* Paging specification containing the maximum number of cells to retrieve from Kiji.
*
* @return paging specification containing the maximum number of cells to retrieve from Kiji.
*/
def pagingSpec: Option[PagingSpec] = monitor.synchronized(mPagingSpec)
/**
* Configure the input spec to read using the given SchemaSpec.
*
* @param schemaSpec defining the Schema which will be used to read this column.
* @return this builder.
*/
def withSchemaSpec(schemaSpec: SchemaSpec): Builder = monitor.synchronized {
require(schemaSpec != null, "Schema spec may not be null.")
require(mSchemaSpec.isEmpty, "Schema spec already set to: " + mSchemaSpec.get)
mSchemaSpec = Some(schemaSpec)
this
}
/**
* Specification of the Schema to use when reading this column.
*
* @return a specification of the Schema to use when reading this column.
*/
def schemaSpec: Option[SchemaSpec] = monitor.synchronized(mSchemaSpec)
/**
* Build a new ColumnFamilyInputSpec from the values stored in this builder.
*
* @throws IllegalStateException if the builder is not in a valid state to be built.
* @return a new ColumnFamilyInputSpec from the values stored in this builder.
*/
def build: ColumnFamilyInputSpec = monitor.synchronized {
new ColumnFamilyInputSpec(
mFamily.getOrElse(
throw new IllegalStateException("Input column family must be specified.")),
mMaxVersions.getOrElse(ColumnInputSpec.DEFAULT_MAX_VERSIONS),
mFilterSpec.getOrElse(ColumnInputSpec.DEFAULT_COLUMN_FILTER_SPEC),
mPagingSpec.getOrElse(ColumnInputSpec.DEFAULT_PAGING_SPEC),
mSchemaSpec.getOrElse(ColumnInputSpec.DEFAULT_SCHEMA_SPEC)
)
}
override def toString: String = monitor.synchronized {
Objects.toStringHelper(classOf[Builder])
.add("family", mFamily)
.add("max_versions", mMaxVersions)
.add("filter_spec", mFilterSpec)
.add("paging_spec", mPagingSpec)
.add("schema_spec", mSchemaSpec)
.toString
}
}
/**
* Companion object providing factory methods for creating new instances of
* [[org.kiji.express.flow.ColumnFamilyInputSpec.Builder]].
*/
@ApiAudience.Public
@ApiStability.Stable
object Builder {
/**
* Create a new empty ColumnFamilyInputSpec.Builder.
*
* @return a new empty ColumnFamilyInputSpec.Builder.
*/
private[express] def apply(): Builder = new Builder(None, None, None, None, None)
/**
* Create a new ColumnFamilyInputSpec.Builder as a copy of the given Builder.
*
* @param other Builder to copy.
* @return a new ColumnFamilyInputSpec.Builder as a copy of the given Builder.
*/
private[express] def apply(other: Builder): Builder = other.monitor.synchronized {
// synchronize to get a consistent snapshot of other
new Builder(other.family,
other.maxVersions,
other.filterSpec,
other.pagingSpec,
other.schemaSpec)
}
}
}
| kijiproject/kiji-express | kiji-express/src/main/scala/org/kiji/express/flow/ColumnInputSpec.scala | Scala | apache-2.0 | 37,583 |
package com.criteo.sre.storage.sgrastar.singularity
package cassandra
import java.nio.ByteBuffer
import java.util.{Set => JSet}
import lucene.{MetricsIndex, MetricsIndexManager}
import org.apache.cassandra.config.ColumnDefinition
import org.apache.cassandra.cql3.CFDefinition
import org.apache.cassandra.db._
import org.apache.cassandra.db.index._
import org.apache.cassandra.dht.LongToken
import org.apache.cassandra.dht.Murmur3Partitioner
import org.apache.lucene.analysis.Token
import org.slf4j.LoggerFactory
import scala.collection.JavaConversions._
class RowIndex
extends PerRowSecondaryIndex {
private val log = LoggerFactory.getLogger(classOf[RowIndex])
private var columnDefinition: ColumnDefinition = null
private var tableDefinition: CFDefinition = null
private var keyspaceName = ""
private var tableName = ""
private var indexName = ""
private var fullyQualifiedIndexName = ""
private var luceneIndex: MetricsIndex = null
private var cassandraSearcher: RowIndexSearcher = null
private val murmur3 = new Murmur3Partitioner()
override def getIndexCfs(): ColumnFamilyStore =
// We're not using CFs in order to store index data
null
override def getIndexName(): String =
indexName
/** Called by C* after setting the columnDefs, but before setting the baseCfs
*/
override def validateOptions() =
assert(columnDefs != null && columnDefs.size() == 1)
override def init() = synchronized {
validateOptions()
assert(baseCfs != null)
log.info("Initializing index")
columnDefinition = columnDefs.iterator.next
tableDefinition = baseCfs.metadata.getCfDef
keyspaceName = baseCfs.metadata.ksName
tableName = baseCfs.name
indexName = columnDefinition.getIndexName
fullyQualifiedIndexName = keyspaceName + "." + tableName + "." + indexName
luceneIndex = MetricsIndexManager.getOrCreateKeyspaceIndex(keyspaceName)
cassandraSearcher = new RowIndexSearcher(baseCfs, luceneIndex, columnDefinition.name)
log.info(
"Index initialized with ks={}, cf={}, name={}",
keyspaceName,
tableName,
indexName
)
}
override def indexes(col: ByteBuffer): Boolean =
// We will not be indexing any column
false
/** Called upon CF updates (insert/delete of rows).
*/
override def index(key: ByteBuffer, cf: ColumnFamily) = {
val metric = Metric fromCompositeKey key
if (cf.isMarkedForDelete) {
luceneIndex.delete(metric)
} else {
val token = murmur3.getToken(key).token
luceneIndex.insert(metric, token)
}
}
/** Called when dropping a whole row during cleanup.
*/
override def delete(dk: DecoratedKey) = {
val metric = Metric fromCompositeKey dk.key
luceneIndex.delete(metric)
}
override def forceBlockingFlush() =
luceneIndex.commit(true)
override def getLiveSize(): Long =
luceneIndex.size
override protected def createSecondaryIndexSearcher(columns: JSet[ByteBuffer]): SecondaryIndexSearcher =
cassandraSearcher
/** Called upon index removal,
* no-op because we share indices and this should not happen.
*/
override def invalidate() =
log.info("{} - Invalidate (no-op)", fullyQualifiedIndexName)
/** Called upon index alteration (column addition/removal),
* no-op because we share indices and this should not happen.
*/
override def reload() =
log.info("{} - Reload (no-op)", fullyQualifiedIndexName)
/** Called upon removing a column index,
* no-op because we share indices and this should not happen.
*/
override def removeIndex(column: ByteBuffer) =
log.info("{} - RemoveIndex (no-op)", fullyQualifiedIndexName)
/** Called upon truncating a CF,
* no-op because we share indices and this should not happen.
*/
override def truncateBlocking(truncatedAt: Long) =
log.info("{} - Truncate (no-op)", fullyQualifiedIndexName)
}
| dpanth3r/cassandra-graphite-poc | src/main/scala/sgrastar/singularity/cassandra/RowIndex.scala | Scala | apache-2.0 | 3,910 |
package priv
package object sp {
def owner = 0
def opponent = 1
val playerIds :List[Int] = List(owner, opponent)
val nbSlots = 6
val baseSlotRange = 0 until nbSlots
val baseSlotList = baseSlotRange.toList
val some0 = Option(0)
def adjacents(n: Int) = List(n - 1, n + 1).filter(inSlotRange _)
def slotInterval(n: Int, m: Int) = (math.max(0, n) to math.min(5, m))
type PlayerId = Int
def other(id: PlayerId) = if (id == owner) opponent else owner
def inSlotRange(n: Int) = n > -1 && n < 6
}
| illim/freespectrogdx | core/src/main/scala/priv/sp/package.scala | Scala | gpl-3.0 | 519 |
/*
* This file is part of the diffson project.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package diffson
case class PatchException(msg: String) extends Exception(msg)
class PointerException(msg: String) extends Exception(msg)
| gnieh/diffson | core/src/main/scala/diffson/exceptions.scala | Scala | apache-2.0 | 731 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.scheduler.cluster
import java.util.concurrent.Semaphore
import java.util.concurrent.atomic.AtomicBoolean
import scala.concurrent.Future
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.deploy.{ApplicationDescription, Command}
import org.apache.spark.deploy.client.{StandaloneAppClient, StandaloneAppClientListener}
import org.apache.spark.internal.{config, Logging}
import org.apache.spark.internal.config.Tests.IS_TESTING
import org.apache.spark.launcher.{LauncherBackend, SparkAppHandle}
import org.apache.spark.resource.ResourceUtils
import org.apache.spark.rpc.RpcEndpointAddress
import org.apache.spark.scheduler._
import org.apache.spark.util.Utils
/**
* A [[SchedulerBackend]] implementation for Spark's standalone cluster manager.
*/
private[spark] class StandaloneSchedulerBackend(
scheduler: TaskSchedulerImpl,
sc: SparkContext,
masters: Array[String])
extends CoarseGrainedSchedulerBackend(scheduler, sc.env.rpcEnv)
with StandaloneAppClientListener
with Logging {
private var client: StandaloneAppClient = null
private val stopping = new AtomicBoolean(false)
private val launcherBackend = new LauncherBackend() {
override protected def conf: SparkConf = sc.conf
override protected def onStopRequest(): Unit = stop(SparkAppHandle.State.KILLED)
}
@volatile var shutdownCallback: StandaloneSchedulerBackend => Unit = _
@volatile private var appId: String = _
private val registrationBarrier = new Semaphore(0)
private val maxCores = conf.get(config.CORES_MAX)
private val totalExpectedCores = maxCores.getOrElse(0)
override def start(): Unit = {
super.start()
// SPARK-21159. The scheduler backend should only try to connect to the launcher when in client
// mode. In cluster mode, the code that submits the application to the Master needs to connect
// to the launcher instead.
if (sc.deployMode == "client") {
launcherBackend.connect()
}
// The endpoint for executors to talk to us
val driverUrl = RpcEndpointAddress(
sc.conf.get(config.DRIVER_HOST_ADDRESS),
sc.conf.get(config.DRIVER_PORT),
CoarseGrainedSchedulerBackend.ENDPOINT_NAME).toString
val args = Seq(
"--driver-url", driverUrl,
"--executor-id", "{{EXECUTOR_ID}}",
"--hostname", "{{HOSTNAME}}",
"--cores", "{{CORES}}",
"--app-id", "{{APP_ID}}",
"--worker-url", "{{WORKER_URL}}")
val extraJavaOpts = sc.conf.get(config.EXECUTOR_JAVA_OPTIONS)
.map(Utils.splitCommandString).getOrElse(Seq.empty)
val classPathEntries = sc.conf.get(config.EXECUTOR_CLASS_PATH)
.map(_.split(java.io.File.pathSeparator).toSeq).getOrElse(Nil)
val libraryPathEntries = sc.conf.get(config.EXECUTOR_LIBRARY_PATH)
.map(_.split(java.io.File.pathSeparator).toSeq).getOrElse(Nil)
// When testing, expose the parent class path to the child. This is processed by
// compute-classpath.{cmd,sh} and makes all needed jars available to child processes
// when the assembly is built with the "*-provided" profiles enabled.
val testingClassPath =
if (sys.props.contains(IS_TESTING.key)) {
sys.props("java.class.path").split(java.io.File.pathSeparator).toSeq
} else {
Nil
}
// Start executors with a few necessary configs for registering with the scheduler
val sparkJavaOpts = Utils.sparkJavaOpts(conf, SparkConf.isExecutorStartupConf)
val javaOpts = sparkJavaOpts ++ extraJavaOpts
val command = Command("org.apache.spark.executor.CoarseGrainedExecutorBackend",
args, sc.executorEnvs, classPathEntries ++ testingClassPath, libraryPathEntries, javaOpts)
val webUrl = sc.ui.map(_.webUrl).getOrElse("")
val coresPerExecutor = conf.getOption(config.EXECUTOR_CORES.key).map(_.toInt)
// If we're using dynamic allocation, set our initial executor limit to 0 for now.
// ExecutorAllocationManager will send the real initial limit to the Master later.
val initialExecutorLimit =
if (Utils.isDynamicAllocationEnabled(conf)) {
Some(0)
} else {
None
}
val executorResourceReqs = ResourceUtils.parseResourceRequirements(conf,
config.SPARK_EXECUTOR_PREFIX)
val appDesc = ApplicationDescription(sc.appName, maxCores, sc.executorMemory, command,
webUrl, sc.eventLogDir, sc.eventLogCodec, coresPerExecutor, initialExecutorLimit,
resourceReqsPerExecutor = executorResourceReqs)
client = new StandaloneAppClient(sc.env.rpcEnv, masters, appDesc, this, conf)
client.start()
launcherBackend.setState(SparkAppHandle.State.SUBMITTED)
waitForRegistration()
launcherBackend.setState(SparkAppHandle.State.RUNNING)
}
override def stop(): Unit = {
stop(SparkAppHandle.State.FINISHED)
}
override def connected(appId: String): Unit = {
logInfo("Connected to Spark cluster with app ID " + appId)
this.appId = appId
notifyContext()
launcherBackend.setAppId(appId)
}
override def disconnected(): Unit = {
notifyContext()
if (!stopping.get) {
logWarning("Disconnected from Spark cluster! Waiting for reconnection...")
}
}
override def dead(reason: String): Unit = {
notifyContext()
if (!stopping.get) {
launcherBackend.setState(SparkAppHandle.State.KILLED)
logError("Application has been killed. Reason: " + reason)
try {
scheduler.error(reason)
} finally {
// Ensure the application terminates, as we can no longer run jobs.
sc.stopInNewThread()
}
}
}
override def executorAdded(fullId: String, workerId: String, hostPort: String, cores: Int,
memory: Int): Unit = {
logInfo("Granted executor ID %s on hostPort %s with %d core(s), %s RAM".format(
fullId, hostPort, cores, Utils.megabytesToString(memory)))
}
override def executorRemoved(
fullId: String, message: String, exitStatus: Option[Int], workerLost: Boolean): Unit = {
val reason: ExecutorLossReason = exitStatus match {
case Some(code) => ExecutorExited(code, exitCausedByApp = true, message)
case None => SlaveLost(message, workerLost = workerLost)
}
logInfo("Executor %s removed: %s".format(fullId, message))
removeExecutor(fullId.split("/")(1), reason)
}
override def workerRemoved(workerId: String, host: String, message: String): Unit = {
logInfo("Worker %s removed: %s".format(workerId, message))
removeWorker(workerId, host, message)
}
override def sufficientResourcesRegistered(): Boolean = {
totalCoreCount.get() >= totalExpectedCores * minRegisteredRatio
}
override def applicationId(): String =
Option(appId).getOrElse {
logWarning("Application ID is not initialized yet.")
super.applicationId
}
/**
* Request executors from the Master by specifying the total number desired,
* including existing pending and running executors.
*
* @return whether the request is acknowledged.
*/
protected override def doRequestTotalExecutors(requestedTotal: Int): Future[Boolean] = {
Option(client) match {
case Some(c) => c.requestTotalExecutors(requestedTotal)
case None =>
logWarning("Attempted to request executors before driver fully initialized.")
Future.successful(false)
}
}
/**
* Kill the given list of executors through the Master.
* @return whether the kill request is acknowledged.
*/
protected override def doKillExecutors(executorIds: Seq[String]): Future[Boolean] = {
Option(client) match {
case Some(c) => c.killExecutors(executorIds)
case None =>
logWarning("Attempted to kill executors before driver fully initialized.")
Future.successful(false)
}
}
private def waitForRegistration() = {
registrationBarrier.acquire()
}
private def notifyContext() = {
registrationBarrier.release()
}
private def stop(finalState: SparkAppHandle.State): Unit = {
if (stopping.compareAndSet(false, true)) {
try {
super.stop()
if (client != null) {
client.stop()
}
val callback = shutdownCallback
if (callback != null) {
callback(this)
}
} finally {
launcherBackend.setState(finalState)
launcherBackend.close()
}
}
}
}
| jkbradley/spark | core/src/main/scala/org/apache/spark/scheduler/cluster/StandaloneSchedulerBackend.scala | Scala | apache-2.0 | 9,152 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package collection
package mutable
import generic._
/**
* Base trait for mutable sorted set.
*
* @define Coll `mutable.SortedSet`
* @define coll mutable sorted set
*
* @author Lucien Pereira
*
*/
trait SortedSet[A] extends scala.collection.SortedSet[A] with scala.collection.SortedSetLike[A,SortedSet[A]]
with mutable.Set[A] with mutable.SetLike[A, SortedSet[A]] {
/** Needs to be overridden in subclasses. */
override def empty: SortedSet[A] = SortedSet.empty[A]
}
/**
* A template for mutable sorted set companion objects.
*
* @define Coll `mutable.SortedSet`
* @define coll mutable sorted set
* @define factoryInfo
* This object provides a set of operations needed to create sorted sets of type mutable.SortedSet.
* @define sortedSetCanBuildFromInfo
* Standard `CanBuildFrom` instance for sorted sets.
*
* @author Lucien Pereira
*
*/
object SortedSet extends MutableSortedSetFactory[SortedSet] {
def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = new SortedSetCanBuildFrom[A]
def empty[A](implicit ord: Ordering[A]): SortedSet[A] = TreeSet.empty[A]
// Force a declaration here so that BitSet (which does not inherit from SortedSetFactory) can be more specific
override implicit def newCanBuildFrom[A](implicit ord : Ordering[A]): CanBuildFrom[Coll, A, SortedSet[A]] = super.newCanBuildFrom
}
/** Explicit instantiation of the `SortedSet` trait to reduce class file size in subclasses. */
abstract class AbstractSortedSet[A] extends scala.collection.mutable.AbstractSet[A] with SortedSet[A]
| felixmulder/scala | src/library/scala/collection/mutable/SortedSet.scala | Scala | bsd-3-clause | 2,114 |
package org.jetbrains.plugins.scala.editor.selectioner
import java.util
import com.intellij.codeInsight.editorActions.ExtendWordSelectionHandlerBase
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.util.TextRange
import com.intellij.psi.{PsiElement, TokenType}
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScBlockExpr
/**
* @author yole
*/
class ScalaCodeBlockSelectioner extends ExtendWordSelectionHandlerBase {
def canSelect(e: PsiElement): Boolean = e.isInstanceOf[ScBlockExpr]
override def select(e: PsiElement, editorText: CharSequence, cursorOffset: Int, editor: Editor): util.List[TextRange] = {
var firstChild = e.getNode.getFirstChildNode
var lastChild = e.getNode.getLastChildNode
if (firstChild.getElementType == ScalaTokenTypes.tLBRACE && lastChild.getElementType == ScalaTokenTypes.tRBRACE) {
while(firstChild.getTreeNext != null && firstChild.getTreeNext.getElementType == TokenType.WHITE_SPACE) {
firstChild = firstChild.getTreeNext
}
while(lastChild.getTreePrev != null && lastChild.getTreePrev.getElementType == TokenType.WHITE_SPACE) {
lastChild = lastChild.getTreePrev
}
val start = firstChild.getTextRange.getEndOffset
val end = lastChild.getTextRange.getStartOffset
if (start >= end) new util.ArrayList[TextRange]() // '{ }' case
else ExtendWordSelectionHandlerBase.expandToWholeLine(editorText, new TextRange(start, end))
} else new util.ArrayList[TextRange]
}
} | ilinum/intellij-scala | src/org/jetbrains/plugins/scala/editor/selectioner/ScalaCodeBlockSelectioner.scala | Scala | apache-2.0 | 1,570 |
package scala.tools.nsc
package ast
trait Positions extends scala.reflect.internal.Positions {
self: Global =>
class ValidatingPosAssigner extends PosAssigner {
var pos: Position = _
override def traverse(t: Tree) {
if (t eq EmptyTree) ()
else if (t.pos == NoPosition) super.traverse(t setPos pos)
else if (globalPhase.id <= currentRun.picklerPhase.id) {
// When we prune due to encountering a position, traverse the
// pruned children so we can warn about those lacking positions.
t.children foreach { c =>
if (!c.canHaveAttrs) ()
else if (c.pos == NoPosition) {
reporter.warning(t.pos, " Positioned tree has unpositioned child in phase " + globalPhase)
inform("parent: " + treeSymStatus(t))
inform(" child: " + treeSymStatus(c) + "\\n")
}
}
}
}
}
override protected[this] lazy val posAssigner: PosAssigner =
if (settings.Yrangepos && settings.debug || settings.Yposdebug) new ValidatingPosAssigner
else new DefaultPosAssigner
}
| felixmulder/scala | src/compiler/scala/tools/nsc/ast/Positions.scala | Scala | bsd-3-clause | 1,082 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.v2
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions.{Attribute, GenericRowWithSchema}
import org.apache.spark.sql.catalyst.util.StringUtils
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.NamespaceHelper
import org.apache.spark.sql.connector.catalog.SupportsNamespaces
/**
* Physical plan node for showing namespaces.
*/
case class ShowNamespacesExec(
output: Seq[Attribute],
catalog: SupportsNamespaces,
namespace: Option[Seq[String]],
pattern: Option[String])
extends V2CommandExec {
override protected def run(): Seq[InternalRow] = {
val namespaces = namespace.map { ns =>
if (ns.nonEmpty) {
catalog.listNamespaces(ns.toArray)
} else {
catalog.listNamespaces()
}
}
.getOrElse(catalog.listNamespaces())
val rows = new ArrayBuffer[InternalRow]()
val encoder = RowEncoder(schema).resolveAndBind()
namespaces.map(_.quoted).map { ns =>
if (pattern.map(StringUtils.filterPattern(Seq(ns), _).nonEmpty).getOrElse(true)) {
rows += encoder
.toRow(new GenericRowWithSchema(Array(ns), schema))
.copy()
}
}
rows
}
}
| jkbradley/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowNamespacesExec.scala | Scala | apache-2.0 | 2,179 |
package net.walend.disentangle.graph
/**
* A graph with undirected zero or one edges between any pair of nodes.
*
* @author dwalend
* @since v0.2.1
*/
trait Undigraph[Node] extends Graph[Node] {
trait UndigraphInnerNodeTrait extends InnerNodeTrait {
def innerEdges:Set[InnerEdgeType]
def outerEdges:Set[OuterEdgeType]
}
/**
* The type of InnerNodeTrait for this digraph representation
*/
type InnerNodeType <: UndigraphInnerNodeTrait
trait UndigraphInnerEdgeTrait extends InnerEdgeTrait {
def nodePair: NodePair[InnerNodeType]
override def selfEdge: Boolean = nodePair._1 == nodePair._2
override def other(node: InnerNodeType): InnerNodeType = nodePair.other(node)
}
type InnerEdgeType <: UndigraphInnerEdgeTrait
}
/**
* A pair of interchangable nodes, often used in Undigraph. Order of the nodes doesn't matter.
*
* @author dwalend
* @since v0.2.1
*/
case class NodePair[+A](_1: A, _2: A) {
def other[B >: A](node:B):A = {
if(node == _1) _2
else if (node == _2) _1
else throw new IllegalArgumentException(s"This NodePair contains ${_1} and ${_2}, not node.")
}
def contains[B >: A](elem: B): Boolean =
elem == _1 || elem == _2
override def equals(that: Any): Boolean =
that match {
case that: NodePair[_] =>
(that canEqual this) &&
(((this._1 == that._1) &&
(this._2 == that._2)) ||
((this._1 == that._2) &&
(this._2 == that._1)))
case _ => false
}
override def hashCode:Int = _1.hashCode + _2.hashCode
} | dwalend/Disentangle | graph/shared/src/main/scala/net/walend/disentangle/graph/Undigraph.scala | Scala | mit | 1,586 |
/*
* Copyright 2014β2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.fs.mount
import slamdata.Predef._
import quasar._
import quasar.common.JoinType
import quasar.contrib.pathy._
import quasar.contrib.scalaz.eitherT._
import quasar.effect.{Failure, KeyValueStore, MonotonicSeq}
import quasar.fp._, free._
import quasar.fs._, InMemory.InMemState
import quasar.fs.mount.cache.{VCache, ViewCache}, VCache.VCacheKVS
import quasar.frontend.logicalplan.{Free => _, free => _, _}
import quasar.sql._, ExprArbitrary._
import quasar.std._, IdentityLib.Squash, StdLib._, set._
import java.time.Instant
import scala.concurrent.duration._
import eu.timepit.refined.auto._
import matryoshka._
import matryoshka.data.Fix
import matryoshka.implicits._
import monocle.macros.Lenses
import pathy.{Path => PPath}, PPath._
import pathy.scalacheck.PathyArbitrary._
import scalaz.{Failure => _, Node => _, _}, Scalaz._
class ViewFileSystemSpec extends quasar.Qspec with TreeMatchers {
import TraceFS._
import FileSystemError._
import Mounting.PathTypeMismatch
val lpf = new LogicalPlanR[Fix[LogicalPlan]]
val query = QueryFile.Ops[FileSystem]
val read = ReadFile.Ops[FileSystem]
val write = WriteFile.Ops[FileSystem]
val manage = ManageFile.Ops[FileSystem]
val mounting = Mounting.Ops[ViewFileSystem]
@Lenses
case class VS(
seq: Long, handles: view.State.ViewHandles, vcache: Map[AFile, ViewCache],
mountConfigs: Map[APath, MountConfig], fs: InMemState)
object VS {
def empty = VS(0, Map.empty, Map.empty, Map.empty, InMemState.empty)
def emptyWithViews(views: Map[AFile, Fix[Sql]]) =
mountConfigs.set(views.map { case (p, expr) =>
p -> MountConfig.viewConfig(ScopedExpr(expr, Nil), Variables.empty)
})(empty)
}
// TODO[scalaz]: Shadow the scalaz.Monad.monadMTMAB SI-2712 workaround
import StateT.stateTMonadState
type Errs = MountingError \\/ PathTypeMismatch
type VSF[F[_], A] = StateT[F, VS, A]
type VST[A] = VSF[Trace, A]
type ErrsT[F[_], A] = EitherT[F, Errs, A]
type Traced[A] = ErrsT[VST, A]
type VSS[A] = State[VS, A]
type VFS[A] = ErrsT[VSS, A]
def runMounting[F[_]](implicit F: MonadState[F, VS]): Mounting ~> F =
free.foldMapNT(KeyValueStore.impl.toState[F](VS.mountConfigs)) compose Mounter.trivial[MountConfigs]
def runVCache[F[_]](implicit F: MonadState[F, VS]): VCacheKVS ~> F =
KeyValueStore.impl.toState[F](VS.vcache)
def runViewFileSystem[F[_]](
runFileSystem: FileSystem ~> F
)(implicit
F0: MonadState[F, VS],
F1: MonadError[F, Errs]
): ViewFileSystem ~> F =
ViewFileSystem.interpret[F](
runMounting[F],
Failure.toError[F, Errs] compose Failure.mapError[PathTypeMismatch, Errs](_.right),
Failure.toError[F, Errs] compose Failure.mapError[MountingError, Errs](_.left),
KeyValueStore.impl.toState[F](VS.handles),
KeyValueStore.impl.toState[F](VS.vcache),
MonotonicSeq.toState[F](VS.seq),
runFileSystem)
def traceViewFs(paths: Map[ADir, Set[Node]]): ViewFileSystem ~> Traced =
runViewFileSystem[Traced](
liftMT[VST, ErrsT] compose
liftMT[Trace, VSF] compose
interpretFileSystem[Trace](qfTrace(paths), rfTrace, wfTrace, mfTrace))
case class ViewInterpResultTrace[A](renderedTrees: Vector[RenderedTree], vs: VS, result: Errs \\/ A)
def viewInterpTrace[A](views: Map[AFile, Fix[Sql]], paths: Map[ADir, Set[Node]], t: Free[FileSystem, A])
: ViewInterpResultTrace[A] =
viewInterpTrace(views, Map.empty[AFile, ViewCache], List.empty[AFile], paths, t)
def viewInterpTrace[A](vcache: Map[AFile, ViewCache], t: Free[FileSystem, A])
: ViewInterpResultTrace[A] =
viewInterpTrace(Map.empty[AFile, Fix[Sql]], vcache, List.empty[AFile], Map.empty[ADir, Set[Node]], t)
def viewInterpTrace[A](
views: Map[AFile, Fix[Sql]], vcache: Map[AFile, ViewCache], files: List[AFile], paths: Map[ADir, Set[Node]], t: Free[FileSystem, A])
: ViewInterpResultTrace[A] = {
val mountViews: Free[ViewFileSystem, Unit] =
views.toList.traverse_ { case (loc, expr) => mounting.mountView(loc, ScopedExpr(expr, Nil), Variables.empty) }
val initVCache: Free[ViewFileSystem, Unit] =
vcache.toList.traverse_ { case (f, vc) => VCacheKVS.Ops[ViewFileSystem].put(f, vc) }
val toBeTraced: Free[ViewFileSystem, A] =
mountViews *> initVCache *> t.flatMapSuspension(view.fileSystem[ViewFileSystem])
val (renderedTrees, (vs, r)) =
toBeTraced.foldMap(traceViewFs(paths))
.run.run(VS(0, Map.empty, Map.empty, Map.empty, InMemState.empty)).run
ViewInterpResultTrace(renderedTrees, vs, r)
}
case class ViewInterpResult[A](vs: VS, result: Errs \\/ A)
def viewInterp[A](
views: Map[AFile, Fix[Sql]],
files: List[AFile],
f: Free[FileSystem, A]
) : ViewInterpResult[A] = {
val viewfs: ViewFileSystem ~> VFS =
runViewFileSystem[VFS](liftMT[VSS, ErrsT] compose zoomNT[Id](VS.fs) compose InMemory.fileSystem)
val memState = InMemState.fromFiles(files.strengthR(Vector[Data]()).toMap)
val (vs, r) =
f.foldMap(free.foldMapNT(viewfs) compose view.fileSystem[ViewFileSystem])
.run.run(VS.fs.set(memState)(VS.emptyWithViews(views)))
ViewInterpResult(vs, r)
}
implicit val RenderedTreeRenderTree = new RenderTree[RenderedTree] {
def render(t: RenderedTree) = t
}
"ReadFile.open" should {
"translate simple read to query" in {
val p = rootDir[Sandboxed] </> dir("view") </> file("simpleZips")
val expr = sqlE"select * from `/zips`"
val lp = queryPlan(expr, Variables.empty, rootDir, 0L, None).run.run._2
.valueOr(e => scala.sys.error("Unexpected semantic errors during compilation: " + e.shows))
val views = Map(p -> expr)
val f = (for {
h <- read.unsafe.open(p, 0L, None)
_ <- read.unsafe.read(h)
_ <- EitherT.rightT(read.unsafe.close(h))
} yield ()).run
val exp = (for {
h <- query.unsafe.eval(lp)
_ <- query.transforms.fsErrToExec(query.unsafe.more(h))
_ <- query.transforms.fsErrToExec(EitherT.rightT(query.unsafe.close(h)))
} yield ()).run.run
viewInterpTrace(views, Map(), f).renderedTrees must beTree(traceInterp(exp, Map())._1)
}
"translate limited read to query" in {
val p = rootDir[Sandboxed] </> dir("view") </> file("simpleZips")
val expr = sqlE"select * from `/zips`"
val views = Map(p -> expr)
val f = (for {
h <- read.unsafe.open(p, 5L, Some(10L))
_ <- read.unsafe.read(h)
_ <- EitherT.rightT(read.unsafe.close(h))
} yield ()).run
val expQ =
Fix(Take(
Fix(Drop(
Fix(Squash(lpf.read(rootDir </> file("zips")))),
lpf.constant(Data.Int(5)))),
lpf.constant(Data.Int(10))))
val exp = (for {
h <- query.unsafe.eval(expQ)
_ <- query.transforms.fsErrToExec(
query.unsafe.more(h))
_ <- query.transforms.fsErrToExec(
EitherT.rightT(query.unsafe.close(h)))
} yield ()).run.run
viewInterpTrace(views, Map(), f).renderedTrees must beTree(traceInterp(exp, Map())._1)
}
"translate read with view-view reference" in {
val p0 = rootDir[Sandboxed] </> dir("view") </> file("view0")
val p1 = rootDir[Sandboxed] </> dir("view") </> file("view1")
val views = Map(
p0 -> sqlE"select * from `/zips`",
p1 -> sqlE"select * from view0")
val f = (for {
h <- read.unsafe.open(p1, 0L, None)
_ <- read.unsafe.read(h)
_ <- EitherT.rightT(read.unsafe.close(h))
} yield ()).run
val expQ = Fix(Squash(lpf.read(rootDir </> file("zips"))))
val exp = (for {
h <- query.unsafe.eval(expQ)
_ <- query.transforms.fsErrToExec(
query.unsafe.more(h))
_ <- query.transforms.fsErrToExec(
EitherT.rightT(query.unsafe.close(h)))
} yield ()).run.run
viewInterpTrace(views, Map(), f).renderedTrees must beTree(traceInterp(exp, Map())._1)
}
"read from closed handle (error)" in {
val p = rootDir[Sandboxed] </> dir("view") </> file("simpleZips")
val expr = sqlE"select * from zips"
val views = Map(p -> expr)
val f = (for {
h <- read.unsafe.open(p, 0L, None)
_ <- EitherT.rightT(read.unsafe.close(h))
_ <- read.unsafe.read(h)
} yield ()).run
viewInterpTrace(views, Map(), f).result must_=== \\/.right(\\/.left((unknownReadHandle(ReadFile.ReadHandle(p, 0)))))
}
"double close (no-op)" in {
val p = rootDir[Sandboxed] </> dir("view") </> file("simpleZips")
val expr = sqlE"select * from zips"
val views = Map(p -> expr)
val f = (for {
h <- read.unsafe.open(p, 0L, None)
_ <- EitherT.rightT(read.unsafe.close(h))
_ <- EitherT.rightT(read.unsafe.close(h))
} yield ()).run
viewInterpTrace(views, Map(), f).result must_=== \\/.right(\\/.right(()))
}
}
"WriteFile.open" should {
"fail with view path" in {
val p = rootDir[Sandboxed] </> dir("view") </> file("simpleZips")
val expr = sqlE"select * from zips"
val views = Map(p -> expr)
val f = write.unsafe.open(p).run
viewInterpTrace(views, Map(), f) must_=== ViewInterpResultTrace(
Vector.empty,
VS.emptyWithViews(views),
\\/.right(\\/.left(FileSystemError.pathErr(PathError.invalidPath(p, "Cannot write to a view.")))))
}
}
"ManageFile.move" should {
import ManageFile._, PathPair._, MoveSemantics._
val srcPath = rootDir </> dir("view") </> file("simpleZips")
val dstPath = rootDir </> dir("foo") </> file("bar")
val expr = sqlE"select * from zips"
def moveShouldSucceed(views: Map[AFile, Fix[Sql]], files: List[AFile], moveSemantic: MoveSemantics) = {
val f = manage.move(fileToFile(srcPath, dstPath), moveSemantic).run
viewInterp(views, files, f) must_=== ViewInterpResult(
VS.emptyWithViews(Map(dstPath -> expr)),
\\/.right(\\/.right(())))
}
def moveShouldFail
(views: Map[AFile, Fix[Sql]], files: List[AFile], moveSemantic: MoveSemantics, pathError: PathError) = {
val f = manage.move(fileToFile(srcPath, dstPath), moveSemantic).run
viewInterp(views, files, f) must_=== ViewInterpResult(
VS.fs.set(InMemState.fromFiles(files.strengthR(Vector[Data]()).toMap))(VS.emptyWithViews(views)),
\\/.right(\\/.left(FileSystemError.pathErr(pathError))))
}
"succeed when destination view exists and semantic is Overwrite" in
moveShouldSucceed(Map(srcPath -> expr, dstPath -> expr), Nil, Overwrite)
"succeed when destination file exists and semantic is Overwrite" in
moveShouldSucceed(Map(srcPath -> expr), List(dstPath), Overwrite)
"succeed when destination doesn't exist and semantic is Overwrite" in
moveShouldSucceed(Map(srcPath -> expr), Nil, Overwrite)
"succeed when destination doesn't exist and semantic is FailIfExists" in
moveShouldSucceed(Map(srcPath -> expr), Nil, FailIfExists)
"fail when destination view exists and semantic is FailIfExists" in
moveShouldFail(Map(srcPath -> expr, dstPath -> expr), Nil, FailIfExists, PathError.pathExists(dstPath))
"fail when destination file exists and semantic is FailIfExists" in
moveShouldFail(Map(srcPath -> expr), List(dstPath), FailIfExists, PathError.pathExists(dstPath))
"succeed when destination view exists and semantic is FailIfMissing" in
moveShouldSucceed(Map(srcPath -> expr, dstPath -> expr), Nil, FailIfMissing)
"succeed when destination file exists and semantic is FailIfMissing" in
moveShouldSucceed(Map(srcPath -> expr), List(dstPath), FailIfMissing)
"fail when destination doesn't exist and semantic is FailIfMissing" in
moveShouldFail(Map(srcPath -> expr), Nil, FailIfMissing, PathError.pathNotFound(dstPath))
"succeed when src and dst directory is outside the underlying filesystem" >> {
val v1 = rootDir[Sandboxed] </> dir("view") </> file("viewA")
val v2 = rootDir[Sandboxed] </> dir("view") </> file("viewB")
val destDir = rootDir[Sandboxed] </> dir("zoo")
val expr = sqlE"select * from zips"
val f = manage.move(dirToDir(rootDir </> dir("view"), destDir), MoveSemantics.FailIfExists).run
viewInterp(Map(v1 -> expr, v2 -> expr), Nil, f) must_=== ViewInterpResult(
VS.emptyWithViews(Map((destDir </> file("viewA")) -> expr, (destDir </> file("viewB")) -> expr)),
\\/.right(\\/.right(())))
}
"move view and file subpaths" in {
val srcDir = rootDir[Sandboxed] </> dir("view")
val destDir = rootDir[Sandboxed] </> dir("zoo")
val viewFile = file("simpleZips")
val dataFile = file("complexFile")
val expr = sqlE"select * from zips"
val f = manage.move(dirToDir(srcDir, destDir), MoveSemantics.FailIfExists).run
viewInterp(Map((srcDir </> viewFile) -> expr), List(srcDir </> dataFile), f) must_=== ViewInterpResult(
VS.emptyWithViews(Map((destDir </> viewFile) -> expr))
.copy(fs = InMemState.fromFiles(List(destDir </> dataFile).map(_ -> Vector[Data]()).toMap)),
\\/.right(\\/.right(())))
}
"move view cache" >> prop { (f1: AFile, f2: AFile) =>
val expr = sqlB"Ξ±"
val viewCache = ViewCache(
MountConfig.ViewConfig(expr, Variables.empty), None, None, 0, None, None,
600L, Instant.ofEpochSecond(0), ViewCache.Status.Pending, None, f1, None)
val vc = Map(f1 -> viewCache)
val f = manage.move(fileToFile(f1, f2), MoveSemantics.FailIfExists).run
viewInterpTrace(vc, f) must_=== ViewInterpResultTrace(
Vector.empty,
VS.empty.copy(vcache = Map(f2 -> viewCache)),
\\/.right(\\/.right(())))
}
}
"ManageFile.delete" should {
"delete with view path" in {
val p = rootDir[Sandboxed] </> dir("view") </> file("simpleZips")
val expr = sqlE"select * from zips"
val views = Map(p -> expr)
val f = manage.delete(p).run
viewInterpTrace(views, Map(), f) must_=== ViewInterpResultTrace(Vector.empty, VS.empty, \\/.right(\\/.right(())))
}
"delete with view subpath" in {
val vp = rootDir[Sandboxed] </> dir("view")
val p = vp </> file("simpleZips")
val expr = sqlE"select * from zips"
val views = Map(p -> expr)
val f = manage.delete(vp).run
viewInterpTrace(views, Map(), f) must_=== ViewInterpResultTrace(
traceInterp(f, Map())._1,
VS.empty,
\\/.right(\\/.right(())))
}
"delete with view cache" >> prop { (p: AFile) =>
val expr = sqlB"Ξ±"
val viewCache = ViewCache(
MountConfig.ViewConfig(expr, Variables.empty), None, None, 0, None, None,
600L, Instant.ofEpochSecond(0), ViewCache.Status.Pending, None, p, None)
val vc = Map(p -> viewCache)
val f = manage.delete(p).run
viewInterpTrace(vc, f) must_=== ViewInterpResultTrace(
traceInterp(f, Map())._1,
VS.empty,
\\/.right(\\/.right(())))
}
}
"QueryFile.exec" should {
"handle simple query" in {
val p = rootDir[Sandboxed] </> dir("view") </> file("simpleZips")
val expr = sqlE"select * from `/zips`"
val views = Map(p -> expr)
val f = query.execute(lpf.read(rootDir </> dir("view") </> file("simpleZips")), rootDir </> file("tmp")).run.run
val exp = query.execute(Fix(Squash(lpf.read(rootDir </> file("zips")))), rootDir </> file("tmp")).run.run
viewInterpTrace(views, Map(), f).renderedTrees must beTree(traceInterp(exp, Map())._1)
}
}
"QueryFile.eval" should {
"handle simple query" in {
val p = rootDir[Sandboxed] </> dir("view") </> file("simpleZips")
val expr = sqlE"select * from `/zips`"
val views = Map(p -> expr)
val f = (for {
h <- query.unsafe.eval(lpf.read(rootDir </> dir("view") </> file("simpleZips")))
_ <- query.transforms.fsErrToExec(
query.unsafe.more(h))
_ <- query.transforms.toExec(
query.unsafe.close(h))
} yield ()).run.run
val exp = (for {
h <- query.unsafe.eval(Fix(Squash(lpf.read(rootDir </> file("zips")))))
_ <- query.transforms.fsErrToExec(
query.unsafe.more(h))
_ <- query.transforms.toExec(
query.unsafe.close(h))
} yield ()).run.run
viewInterpTrace(views, Map(), f).renderedTrees must beTree(traceInterp(exp, Map())._1)
}
}
"QueryFile.explain" should {
"handle simple query" in {
val p = rootDir[Sandboxed] </> dir("view") </> file("simpleZips")
val expr = sqlE"select * from `/zips`"
val views = Map(p -> expr)
val f = query.explain(lpf.read(rootDir </> dir("view") </> file("simpleZips"))).run.run
val exp = query.explain(Fix(Squash(lpf.read(rootDir </> file("zips"))))).run.run
viewInterpTrace(views, Map(), f).renderedTrees must beTree(traceInterp(exp, Map())._1)
}
}
"QueryFile.ls" should {
def twoNodes(aDir: ADir) =
Map(aDir -> Set[Node](Node.Data(FileName("afile")), Node.ImplicitDir(DirName("adir"))))
"preserve files and dirs in the presence of non-conflicting views" >> prop { (aDir: ADir) =>
val expr = sqlE"select * from zips"
val views = Map(
(aDir </> file("view1")) -> expr,
(aDir </> dir("views") </> file("view2")) -> expr)
val f = query.ls(aDir).run
viewInterpTrace(views, twoNodes(aDir), f) must_=== ViewInterpResultTrace(
traceInterp(f, twoNodes(aDir))._1,
VS.emptyWithViews(views),
\\/.right(\\/.right(Set(
Node.Data(FileName("afile")),
Node.ImplicitDir(DirName("adir")),
Node.View(FileName("view1")),
Node.ImplicitDir(DirName("views"))))))
}
"overlay files and dirs with conflicting paths" >> prop { (aDir: ADir) =>
val expr = sqlE"select * from zips"
val views = Map(
(aDir </> file("afile")) -> expr,
(aDir </> dir("adir") </> file("view1")) -> expr)
val f = query.ls(aDir).run
viewInterpTrace(views, twoNodes(aDir), f) must_=== ViewInterpResultTrace(
traceInterp(f, twoNodes(aDir))._1,
VS.emptyWithViews(views),
\\/.right(\\/.right(Set(
Node.View(FileName("afile")),
Node.Data(FileName("afile")),
Node.ImplicitDir(DirName("adir")))))) // no conflict with same dir
}
"preserve empty dir result" >> prop { (aDir: ADir) =>
val views = Map[AFile, Fix[Sql]]()
val f = query.ls(aDir).run
viewInterpTrace(views, Map(aDir -> Set()), f) must_=== ViewInterpResultTrace(
traceInterp(f, Map(aDir -> Set()))._1,
VS.emptyWithViews(views),
\\/.right(\\/.right((Set()))))
}
"preserve error for non-existent dir" >> prop { (aDir: ADir) =>
(aDir =/= rootDir) ==> {
val views = Map[AFile, Fix[Sql]]()
val f = query.ls(aDir).run
viewInterpTrace(views, Map(), f) must_=== ViewInterpResultTrace(
traceInterp(f, Map())._1,
VS.emptyWithViews(views),
\\/.right(\\/.left(FileSystemError.pathErr(PathError.pathNotFound(aDir)))))
}
}
"preserve empty dir result at root" in {
val views = Map[AFile, Fix[Sql]]()
val f = query.ls(rootDir).run
viewInterpTrace(views, Map(), f) must_=== ViewInterpResultTrace(
traceInterp(f, Map())._1,
VS.emptyWithViews(views),
\\/.right(\\/.right((Set()))))
}
}
"QueryFile.fileExists" should {
"behave as underlying interpreter" >> prop { file: AFile =>
val program = query.fileExists(file)
val ops = traceInterp(program, Map())._1
val hasFile = {
val paths = Map(fileParent(file) -> Set[Node](Node.Data(fileName(file))))
viewInterpTrace(Map(), paths, program) must_=== ViewInterpResultTrace(ops, VS.empty, \\/.right(true))
}
val noFile = {
viewInterpTrace(Map(), Map(), program) must_=== ViewInterpResultTrace(ops, VS.empty, \\/.right(false))
}
hasFile and noFile
}
"return true if there is a view at that path" >> prop { (file: AFile, expr: Fix[Sql]) =>
val views = Map(file -> expr)
val program = query.fileExists(file)
viewInterp(views, Nil, program) must_=== ViewInterpResult(
VS.emptyWithViews(views),
\\/.right(true))
}
}
"resolveViewRefs" >> {
def unsafeParse(sqlQry: String): Fix[Sql] =
sql.fixParser.parseExpr(sqlQry).valueOr(_ => scala.sys.error("Expected sql query to parse but it did not"))
type Eff[A] = Coproduct[Mounting, VCacheKVS, A]
def resolvedRefsVC[A](
views: Map[AFile, Fix[Sql]], vcache: Map[AFile, ViewCache], lp: Fix[LogicalPlan]
): FileSystemError \\/ Fix[LogicalPlan] =
view.resolveViewRefs[Eff](lp).run
.foldMap(runMounting[State[VS, ?]] :+: runVCache[State[VS, ?]])
.eval(VS.emptyWithViews(views).copy(vcache = vcache))
def resolvedRefs[A](views: Map[AFile, Fix[Sql]], lp: Fix[LogicalPlan]): FileSystemError \\/ Fix[LogicalPlan] =
resolvedRefsVC(views, Map.empty, lp)
val nineteenSixty = Instant.parse("1960-01-01T00:00:00.00Z")
"no match" >> {
resolvedRefs(Map(), lpf.read(rootDir </> file("zips"))) must
beRightDisjunction.like { case r => r must beTreeEqual(lpf.read(rootDir </> file("zips"))) }
}
"trivial read" >> {
val p = rootDir </> dir("view") </> file("justZips")
val vs = Map[AFile, Fix[Sql]](p -> sqlE"select * from `/zips`")
resolvedRefs(vs, lpf.read(p)) must beRightDisjunction.like {
case r => r must beTreeEqual(
Fix(Squash(lpf.read(rootDir </> file("zips"))))
)
}
}
"trivial view cache read" >> {
val fa = rootDir </> file("a")
val fb = rootDir </> file("b")
val viewCache =
ViewCache(
MountConfig.ViewConfig(sqlB"Ξ±", Variables.empty), None, None, 0, None, None,
4.seconds.toSeconds, nineteenSixty, ViewCache.Status.Successful, None, fb, None)
resolvedRefsVC(Map.empty, Map(fa -> viewCache), lpf.read(fa)) must beRightDisjunction.like {
case r => r must beTreeEqual(
lpf.read(rootDir </> file("b")))
}
}
"trivial read with relative path" >> {
val p = rootDir </> dir("foo") </> file("justZips")
val vs = Map[AFile, Fix[Sql]](p -> sqlE"select * from zips")
resolvedRefs(vs, lpf.read(p)) must beRightDisjunction.like {
case r => r must beTreeEqual(
Fix(Squash(lpf.read(rootDir </> dir("foo") </> file("zips"))))
)
}
}
"non-trivial" >> {
val inner = sqlE"select city, state from `/zips` order by state"
val p = rootDir </> dir("view") </> file("simpleZips")
val outer =
Take(
Drop(
lpf.read(p),
lpf.constant(Data.Int(5))).embed,
lpf.constant(Data.Int(10))).embed
val innerLP =
quasar.precompile[Fix[LogicalPlan]](inner, Variables.empty, fileParent(p)).run.value.toOption.get
val vs = Map[AFile, Fix[Sql]](p -> inner)
val exp = quasar.preparePlan(Take(
Drop(
innerLP,
lpf.constant(Data.Int(5))).embed,
lpf.constant(Data.Int(10))).embed).run.value.toOption.get
resolvedRefs(vs, outer) must beRightDisjunction.like {
case r => r must beTreeEqual(exp)
}
}
"multi-level" >> {
val vs = Map[AFile, Fix[Sql]](
(rootDir </> dir("view") </> file("view1")) ->
sqlE"select * from `/zips`",
(rootDir </> dir("view") </> file("view2")) ->
sqlE"select * from view1")
resolvedRefs(vs, lpf.read(rootDir </> dir("view") </> file("view2"))) must
beRightDisjunction.like { case r => r must beTreeEqual(
Squash(lpf.read(rootDir </> file("zips"))).embed)
}
}
"multi-level with view cache" >> {
val vs = Map[AFile, Fix[Sql]](
(rootDir </> file("view")) ->
sqlE"select * from vcache")
val dest = rootDir </> file("dest")
val vcache = Map[AFile, ViewCache](
(rootDir </> file("vcache")) -> ViewCache(
MountConfig.ViewConfig(sqlB"Ξ±", Variables.empty), None, None, 0, None, None,
4.seconds.toSeconds, nineteenSixty, ViewCache.Status.Successful, None, dest, None))
resolvedRefsVC(vs, vcache, lpf.read(rootDir </> file("view"))) must beRightDisjunction.like {
case r => r must beTreeEqual(
Squash(lpf.read(dest)).embed)
}
}
// Several tests for edge cases with view references:
"multiple references" >> {
// NB: joining a view to itself means two expanded reads. The main point is
// that these references should not be mistaken for a circular reference.
val vp = rootDir </> dir("view") </> file("view1")
val zp = rootDir </> file("zips")
val vs = Map[AFile, Fix[Sql]](
vp -> sqlE"select * from `/zips`")
val q = lpf.join(
lpf.read(vp),
lpf.read(vp),
JoinType.Inner,
JoinCondition('__leftJoin0, '__rightJoin1, lpf.constant(Data.Bool(true))))
val exp = lpf.join(
Squash(lpf.read(zp)).embed,
Squash(lpf.read(zp)).embed,
JoinType.Inner,
JoinCondition('__leftJoin2, '__rightJoin3, lpf.constant(Data.Bool(true))))
resolvedRefs(vs, q) must beRightDisjunction.like { case r => r must beTreeEqual(exp) }
}
"self reference" >> {
// NB: resolves to a read on the underlying collection, allowing a view
// to act like a filter or decorator for an existing collection.
val p = rootDir </> dir("foo") </> file("bar")
val q = unsafeParse(s"select * from `${posixCodec.printPath(p)}` limit 10")
val qlp =
quasar.queryPlan(q, Variables.empty, rootDir, 0L, None)
.run.value.valueOr(e => scala.sys.error("Unexpected error compiling sql query: " + e.shows))
val vs = Map[AFile, Fix[Sql]](p -> q)
resolvedRefs(vs, lpf.read(p)) must beRightDisjunction.like { case r => r must beTreeEqual(qlp) }
}
"circular reference" >> {
// NB: this situation probably results from user error, but since this is
// now the _only_ way the view definitions can be ill-formed, it seems
// like a shame to introduce `\\/` just to handle this case. Instead,
// the inner reference is treated the same way as self-references, and
// left un-expanded. That means the user will see an error when the query
// is evaluated and there turns out to be no actual file called "view2".
val v1p = rootDir </> dir("view") </> file("view1")
val v2p = rootDir </> dir("view") </> file("view2")
val vs = Map[AFile, Fix[Sql]](
v1p -> unsafeParse(s"select * from `${posixCodec.printPath(v2p)}` offset 5"),
v2p -> unsafeParse(s"select * from `${posixCodec.printPath(v1p)}` limit 10"))
resolvedRefs(vs, lpf.read(v2p)) must beRightDisjunction.like {
case r => r must beTreeEqual(
Take(
Squash(Drop(
Squash(lpf.read(v2p)).embed,
lpf.constant(Data.Int(5))).embed).embed,
lpf.constant(Data.Int(10))).embed
)
}
}
}
}
| jedesah/Quasar | core/src/test/scala/quasar/fs/mount/ViewFileSystemSpec.scala | Scala | apache-2.0 | 28,000 |
package controllers
import play.api.i18n.{Lang, Messages, MessagesProvider}
import helpers.Forms._
import java.time.Instant
import models._
import java.util.Locale
import play.api.i18n.{Lang, Messages}
import play.api.data.Forms._
import java.sql.Connection
import javax.inject.{Inject, Singleton}
import constraints.FormConstraints
import controllers.NeedLogin.Authenticated
import play.api.data.Form
import play.api.db.Database
import play.api.mvc.{AnyContent, MessagesAbstractController, MessagesControllerComponents}
import play.api.data.validation.Constraints._
@Singleton
class ProfileMaintenance @Inject() (
cc: MessagesControllerComponents,
fc: FormConstraints,
authenticated: Authenticated,
userMetadataMaintenance: UserMetadataMaintenance,
implicit val modifyUserProfileRepo: ModifyUserProfileRepo,
implicit val db: Database,
implicit val entryUserRegistrationRepo: EntryUserRegistrationRepo,
implicit val storeUserRepo: StoreUserRepo,
implicit val shoppingCartItemRepo: ShoppingCartItemRepo
) extends MessagesAbstractController(cc) with I18n {
def changeProfileForm(implicit mp: MessagesProvider) = Form(
mapping(
"firstName" -> text.verifying(fc.firstNameConstraint: _*),
"middleName" -> optional(text),
"lastName" -> text.verifying(fc.lastNameConstraint: _*),
"email" -> email.verifying(fc.emailConstraint: _*),
"password" -> text.verifying(fc.passwordConstraint: _*),
"photoUrl" -> optional(text.verifying(maxLength(1024))),
"firstNameKana" -> optional(text.verifying(maxLength(64))),
"middleNameKana" -> optional(text.verifying(maxLength(64))),
"lastNameKana" -> optional(text.verifying(maxLength(64))),
"telNo0" -> optional(text.verifying(maxLength(64))),
"telNo1" -> optional(text.verifying(maxLength(64))),
"telNo2" -> optional(text.verifying(maxLength(64))),
"joinedDate" -> optional(instant(Messages("joind.date.format"))),
"birthMonthDay" -> optional(text.verifying(userMetadataMaintenance.birthMonthDayConstraint)),
"profileComment" -> optional(text.verifying(maxLength(8192)))
)(ModifyUserProfile.apply)(ModifyUserProfile.unapply)
)
def index() = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login = request.login
Ok(views.html.profileMaintenance())
}
def changeProfile() = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login: LoginSession = request.login
val form = changeProfileForm.fill(modifyUserProfileRepo(login))
request.acceptLanguages.head match {
case `japanese` =>
Ok(views.html.changeUserProfileJa(form))
case `japan` =>
Ok(views.html.changeUserProfileJa(form))
case _ =>
Ok(views.html.changeUserProfileJa(form))
}
}
def doChangeProfile() = authenticated { implicit request: AuthMessagesRequest[AnyContent] =>
implicit val login: LoginSession = request.login
changeProfileForm.bindFromRequest.fold(
formWithErrors => {
BadRequest(views.html.changeUserProfileJa(formWithErrors))
},
updated => {
if (login.storeUser.passwordMatch(updated.password)) {
db.withConnection { implicit conn =>
updated.save(login)
}
Redirect(routes.Application.index).flashing("message" -> Messages("userProfileUpdated"))
}
else {
BadRequest(
views.html.changeUserProfileJa(
changeProfileForm.fill(updated).withError(
"password", "currentPasswordNotMatch"
)
)
)
}
}
)
}
}
| ruimo/store2 | app/controllers/ProfileMaintenance.scala | Scala | apache-2.0 | 3,667 |
package com.github.tminglei.slickpg
import java.sql.{Timestamp, Time, Date}
import java.util.UUID
import org.scalatest.FunSuite
import scala.collection.mutable.Buffer
import slick.driver.PostgresDriver
import slick.jdbc.GetResult
import scala.concurrent.Await
import scala.concurrent.duration._
class PgArraySupportSuite extends FunSuite {
import utils.SimpleArrayUtils._
//-- additional definitions
case class Institution(value: Long)
case class MarketFinancialProduct(value: String)
object MyPostgresDriver1 extends PostgresDriver with PgArraySupport {
override val api = new API with ArrayImplicits with MyArrayImplicitsPlus {}
///
trait MyArrayImplicitsPlus {
implicit val simpleLongBufferTypeMapper = new SimpleArrayJdbcType[Long]("int8").to(_.toBuffer)
implicit val simpleStrVectorTypeMapper = new SimpleArrayJdbcType[String]("text").to(_.toVector)
implicit val institutionListTypeWrapper = new SimpleArrayJdbcType[Institution]("int8")
.basedOn[Long](_.value, new Institution(_)).to(_.toList)
implicit val marketFinancialProductWrapper = new SimpleArrayJdbcType[MarketFinancialProduct]("text")
.basedOn[String](_.value, new MarketFinancialProduct(_)).to(_.toList)
///
implicit val advancedStringListTypeMapper = new AdvancedArrayJdbcType[String]("text",
fromString(identity)(_).orNull, mkString(identity))
}
}
//////////////////////////////////////////////////////////////////////////
import MyPostgresDriver1.api._
val db = Database.forURL(url = dbUrl, driver = "org.postgresql.Driver")
case class ArrayBean(
id: Long,
intArr: List[Int],
longArr: Buffer[Long],
shortArr: List[Short],
strList: List[String],
strArr: Option[Vector[String]],
uuidArr: List[UUID],
institutions: List[Institution],
mktFinancialProducts: Option[List[MarketFinancialProduct]]
)
class ArrayTestTable(tag: Tag) extends Table[ArrayBean](tag, "ArrayTest") {
def id = column[Long]("id", O.AutoInc, O.PrimaryKey)
def intArr = column[List[Int]]("intArray", O.Default(Nil))
def longArr = column[Buffer[Long]]("longArray")
def shortArr = column[List[Short]]("shortArray")
def strList = column[List[String]]("stringList")
def strArr = column[Option[Vector[String]]]("stringArray")
def uuidArr = column[List[UUID]]("uuidArray")
def institutions = column[List[Institution]]("institutions")
def mktFinancialProducts = column[Option[List[MarketFinancialProduct]]]("mktFinancialProducts")
def * = (id, intArr, longArr, shortArr, strList, strArr, uuidArr, institutions, mktFinancialProducts) <> (ArrayBean.tupled, ArrayBean.unapply)
}
val ArrayTests = TableQuery[ArrayTestTable]
//------------------------------------------------------------------------------
val uuid1 = UUID.randomUUID()
val uuid2 = UUID.randomUUID()
val uuid3 = UUID.randomUUID()
val testRec1 = ArrayBean(33L, List(101, 102, 103), Buffer(1L, 3L, 5L, 7L), List(1,7), List("robert}; drop table students--"),
Some(Vector("str1", "str3")), List(uuid1, uuid2), List(Institution(113)), None)
val testRec2 = ArrayBean(37L, List(101, 103), Buffer(11L, 31L, 5L), Nil, List(""),
Some(Vector("str11", "str3")), List(uuid1, uuid2, uuid3), List(Institution(579)), Some(List(MarketFinancialProduct("product1"))))
val testRec3 = ArrayBean(41L, List(103, 101), Buffer(11L, 5L, 31L), List(35,77), Nil,
Some(Vector("(s)", "str5", "str3")), List(uuid1, uuid3), Nil, Some(List(MarketFinancialProduct("product3"), MarketFinancialProduct("product x"))))
test("Array Lifted support") {
Await.result(db.run(
DBIO.seq(
(ArrayTests.schema) create,
ArrayTests forceInsertAll List(testRec1, testRec2, testRec3)
).andThen(
DBIO.seq(
ArrayTests.sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3) === r)
),
// all
ArrayTests.filter(101.bind === _.intArr.any).sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3) === r)
),
// all
ArrayTests.filter(5L.bind <= _.longArr.all).sortBy(_.id).to[List].result.map(
r => assert(List(testRec2, testRec3) === r)
),
// @>
ArrayTests.filter(_.strArr @> Vector("str3")).sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3) === r)
),
ArrayTests.filter(_.strArr @> Vector("str3").bind).sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3) === r)
),
// <@
ArrayTests.filter(Vector("str3").bind <@: _.strArr).sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3) === r)
),
// &&
ArrayTests.filter(_.longArr @& Buffer(5L, 17L).bind).sortBy(_.id).to[List].result.map(
r => assert(List(testRec1, testRec2, testRec3) === r)
),
// length
ArrayTests.filter(_.longArr.length() > 3.bind).sortBy(_.id).to[List].result.map(
r => assert(List(testRec1) === r)
),
// unnest
ArrayTests.filter(5L.bind <= _.longArr.all).map(_.strArr.unnest).to[List].result.map(
r => assert((testRec2.strArr.get ++ testRec3.strArr.get).toList === r.map(_.orNull))
),
// concatenate
ArrayTests.filter(_.id === 33L.bind).map(_.intArr ++ List(105, 107).bind).result.head.map(
r => assert(List(101, 102, 103, 105, 107) === r)
),
ArrayTests.filter(_.id === 33L.bind).map(List(105, 107).bind ++ _.intArr).result.head.map(
r => assert(List(105, 107, 101, 102, 103) === r)
),
ArrayTests.filter(_.id === 33L.bind).map(_.intArr + 105.bind).result.head.map(
r => assert(List(101, 102, 103, 105) === r)
),
ArrayTests.filter(_.id === 33L.bind).map(105.bind +: _.intArr).result.head.map(
r => assert(List(105, 101, 102, 103) === r)
)
)
).andFinally(
(ArrayTests.schema) drop
).transactionally
), Duration.Inf)
}
//------------------------------------------------------------------------
case class ArrayBean1(
id: Long,
uuidArr: List[UUID],
strArr: Seq[String],
longArr: Seq[Long],
intArr: List[Int],
shortArr: Vector[Short],
floatArr: List[Float],
doubleArr: List[Double],
boolArr: Seq[Boolean],
dateArr: List[Date],
timeArr: List[Time],
tsArr: Seq[Timestamp]
)
test("Array Plain SQL support") {
import MyPostgresDriver.plainAPI._
implicit val getArrarBean1Result = GetResult(r =>
ArrayBean1(r.nextLong(),
r.nextArray[UUID]().toList,
r.nextArray[String](),
r.nextArray[Long](),
r.nextArray[Int]().toList,
r.nextArray[Short]().to[Vector],
r.nextArray[Float]().toList,
r.nextArray[Double]().toList,
r.nextArray[Boolean](),
r.nextArray[Date]().toList,
r.nextArray[Time]().toList,
r.nextArray[Timestamp]()
)
)
val b = ArrayBean1(101L, List(UUID.randomUUID()), List("tewe", "ttt"), List(111L), List(1, 2), Vector(3, 5), List(1.2f, 43.32f), List(21.35d), List(true, true),
List(new Date(System.currentTimeMillis())), List(new Time(System.currentTimeMillis())), List(new Timestamp(System.currentTimeMillis())))
Await.result(db.run(
DBIO.seq(
sqlu"""create table ArrayTest1(
id int8 not null primary key,
uuid_arr uuid[] not null,
str_arr text[] not null,
long_arr int8[] not null,
int_arr int4[] not null,
short_arr int2[] not null,
float_arr float4[] not null,
double_arr float8[] not null,
bool_arr bool[] not null,
date_arr date[] not null,
time_arr time[] not null,
ts_arr timestamp[] not null)
""",
///
sqlu"insert into ArrayTest1 values(${b.id}, ${b.uuidArr}, ${b.strArr}, ${b.longArr}, ${b.intArr}, ${b.shortArr}, ${b.floatArr}, ${b.doubleArr}, ${b.boolArr}, ${b.dateArr}, ${b.timeArr}, ${b.tsArr})",
sql"select * from ArrayTest1 where id = ${b.id}".as[ArrayBean1].head.map(
f => {
b.uuidArr.zip(f.uuidArr).map(r => assert(r._1 === r._2))
b.strArr.zip(f.strArr).map(r => assert(r._1 === r._2))
b.longArr.zip(f.longArr).map(r => assert(r._1 === r._2))
b.intArr.zip(f.intArr).map(r => assert(r._1 === r._2))
b.shortArr.zip(f.shortArr).map(r => assert(r._1 === r._2))
b.floatArr.zip(f.floatArr).map(r => assert(Math.abs(r._1 - r._2) < 0.01f))
b.doubleArr.zip(f.doubleArr).map(r => assert(Math.abs(r._1 - r._2) < 0.01d))
b.boolArr.zip(f.boolArr).map(r => assert(r._1 === r._2))
b.dateArr.zip(f.dateArr).map(r => assert(r._1.toString === r._2.toString))
b.timeArr.zip(f.timeArr).map(r => assert(r._1.toString === r._2.toString))
b.tsArr.zip(f.tsArr).map(r => assert(r._1.toString === r._2.toString))
}
),
///
sqlu"drop table if exists ArrayTest1 cascade"
).transactionally
), Duration.Inf)
}
}
| bearrito/slick-pg | src/test/scala/com/github/tminglei/slickpg/PgArraySupportSuite.scala | Scala | bsd-2-clause | 9,451 |
/*
* Copyright (C) 2010-2014 GRNET S.A.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gr.grnet.pithosj.core.command
import com.twitter.finagle.httpx.Method.Head
import com.twitter.finagle.httpx.{Response, Status}
import gr.grnet.common.date.DateParsers
import gr.grnet.pithosj.core.ServiceInfo
import gr.grnet.pithosj.core.keymap.{PithosHeaderKeys, PithosResultKeys}
case class GetObjectInfoCommand(
serviceInfo: ServiceInfo,
container: String,
path: String
) extends PithosCommandSkeleton[GetObjectInfoResultData] {
/**
* The HTTP method by which the command is implemented.
*/
def httpMethod = Head
/**
* A set of all the HTTP status codes that are considered a success for this command.
*/
def successStatuses = Set(200).map(Status.fromCode)
/**
* Computes that URL path parts that will follow the Pithos+ server URL
* in the HTTP call.
*/
def serverRootPathElements = Seq(serviceInfo.rootPath, serviceInfo.uuid, container, path)
/**
* Type-safe keys for `HTTP` response headers that are specific to this command.
* These usually correspond to Pithos-specific headers, not general-purpose
* `HTTP` response headers but there may be exceptions.
*
* Each command must document which keys it supports.
*/
override val responseHeaderKeys = Seq(
PithosHeaderKeys.Standard.ETag,
PithosHeaderKeys.Standard.Content_Type,
PithosHeaderKeys.Standard.Content_Length,
PithosHeaderKeys.Standard.Last_Modified,
PithosHeaderKeys.Pithos.X_Object_Hash,
PithosHeaderKeys.Pithos.X_Object_Modified_By,
PithosHeaderKeys.Pithos.X_Object_Version_Timestamp,
PithosHeaderKeys.Pithos.X_Object_UUID,
PithosHeaderKeys.Pithos.X_Object_Version
)
override val resultDataKeys = Seq(
PithosResultKeys.Commands.Container,
PithosResultKeys.Commands.Path
)
def buildResultData(response: Response, startMillis: Long, stopMillis: Long): GetObjectInfoResultData = {
val responseHeaders = response.headerMap
GetObjectInfoResultData(
container = container,
path = path,
ETag = responseHeaders.get(PithosHeaderKeys.Standard.ETag.name),
Content_Type = responseHeaders.get(PithosHeaderKeys.Standard.Content_Type.name),
Content_Length = responseHeaders.get(PithosHeaderKeys.Standard.Content_Length.name).map(_.toLong),
Last_Modified = responseHeaders.get(PithosHeaderKeys.Standard.Last_Modified.name).map(DateParsers.parse(_, DateParsers.Format2Parser)),
X_Object_Hash = responseHeaders.get(PithosHeaderKeys.Pithos.X_Object_Hash.name),
X_Object_Modified_By = responseHeaders.get(PithosHeaderKeys.Pithos.X_Object_Modified_By.name),
X_Object_Version_Timestamp = responseHeaders.get(PithosHeaderKeys.Pithos.X_Object_Version_Timestamp.name).map(DateParsers.parse(_, DateParsers.Format2Parser)),
X_Object_UUID = responseHeaders.get(PithosHeaderKeys.Pithos.X_Object_UUID.name),
X_Object_Version = responseHeaders.get(PithosHeaderKeys.Pithos.X_Object_Version.name)
)
}
}
| grnet/pithos-j | src/main/scala/gr/grnet/pithosj/core/command/GetObjectInfoCommand.scala | Scala | gpl-3.0 | 3,639 |
package gitbucket.core.controller
import gitbucket.core.model.WebHook
import gitbucket.core.pulls.html
import gitbucket.core.service.CommitStatusService
import gitbucket.core.service.MergeService
import gitbucket.core.service.IssuesService._
import gitbucket.core.service.PullRequestService._
import gitbucket.core.service.RepositoryService.RepositoryInfo
import gitbucket.core.service._
import gitbucket.core.util.ControlUtil._
import gitbucket.core.util.Directory._
import gitbucket.core.util.Implicits._
import gitbucket.core.util._
import io.github.gitbucket.scalatra.forms._
import org.eclipse.jgit.api.Git
import org.eclipse.jgit.lib.PersonIdent
import scala.collection.JavaConverters._
class PullRequestsController extends PullRequestsControllerBase
with RepositoryService with AccountService with IssuesService with PullRequestService with MilestonesService with LabelsService
with CommitsService with ActivityService with WebHookPullRequestService
with ReadableUsersAuthenticator with ReferrerAuthenticator with WritableUsersAuthenticator
with CommitStatusService with MergeService with ProtectedBranchService
trait PullRequestsControllerBase extends ControllerBase {
self: RepositoryService with AccountService with IssuesService with MilestonesService with LabelsService
with CommitsService with ActivityService with PullRequestService with WebHookPullRequestService
with ReadableUsersAuthenticator with ReferrerAuthenticator with WritableUsersAuthenticator
with CommitStatusService with MergeService with ProtectedBranchService =>
val pullRequestForm = mapping(
"title" -> trim(label("Title" , text(required, maxlength(100)))),
"content" -> trim(label("Content", optional(text()))),
"targetUserName" -> trim(text(required, maxlength(100))),
"targetBranch" -> trim(text(required, maxlength(100))),
"requestUserName" -> trim(text(required, maxlength(100))),
"requestRepositoryName" -> trim(text(required, maxlength(100))),
"requestBranch" -> trim(text(required, maxlength(100))),
"commitIdFrom" -> trim(text(required, maxlength(40))),
"commitIdTo" -> trim(text(required, maxlength(40))),
"assignedUserName" -> trim(optional(text())),
"milestoneId" -> trim(optional(number())),
"labelNames" -> trim(optional(text()))
)(PullRequestForm.apply)
val mergeForm = mapping(
"message" -> trim(label("Message", text(required)))
)(MergeForm.apply)
case class PullRequestForm(
title: String,
content: Option[String],
targetUserName: String,
targetBranch: String,
requestUserName: String,
requestRepositoryName: String,
requestBranch: String,
commitIdFrom: String,
commitIdTo: String,
assignedUserName: Option[String],
milestoneId: Option[Int],
labelNames: Option[String]
)
case class MergeForm(message: String)
get("/:owner/:repository/pulls")(referrersOnly { repository =>
val q = request.getParameter("q")
if(Option(q).exists(_.contains("is:issue"))){
redirect(s"/${repository.owner}/${repository.name}/issues?q=" + StringUtil.urlEncode(q))
} else {
searchPullRequests(None, repository)
}
})
get("/:owner/:repository/pull/:id")(referrersOnly { repository =>
params("id").toIntOpt.flatMap{ issueId =>
val owner = repository.owner
val name = repository.name
getPullRequest(owner, name, issueId) map { case(issue, pullreq) =>
using(Git.open(getRepositoryDir(owner, name))){ git =>
val (commits, diffs) =
getRequestCompareInfo(owner, name, pullreq.commitIdFrom, owner, name, pullreq.commitIdTo)
html.pullreq(
issue, pullreq,
(commits.flatten.map(commit => getCommitComments(owner, name, commit.id, true)).flatten.toList ::: getComments(owner, name, issueId))
.sortWith((a, b) => a.registeredDate before b.registeredDate),
getIssueLabels(owner, name, issueId),
getAssignableUserNames(owner, name),
getMilestonesWithIssueCount(owner, name),
getLabels(owner, name),
commits,
diffs,
isEditable(repository),
isManageable(repository),
repository,
flash.toMap.map(f => f._1 -> f._2.toString))
}
}
} getOrElse NotFound()
})
ajaxGet("/:owner/:repository/pull/:id/mergeguide")(referrersOnly { repository =>
params("id").toIntOpt.flatMap{ issueId =>
val owner = repository.owner
val name = repository.name
getPullRequest(owner, name, issueId) map { case(issue, pullreq) =>
val hasConflict = LockUtil.lock(s"${owner}/${name}"){
checkConflict(owner, name, pullreq.branch, issueId)
}
val hasMergePermission = hasDeveloperRole(owner, name, context.loginAccount)
val branchProtection = getProtectedBranchInfo(owner, name, pullreq.branch)
val mergeStatus = PullRequestService.MergeStatus(
hasConflict = hasConflict,
commitStatues = getCommitStatues(owner, name, pullreq.commitIdTo),
branchProtection = branchProtection,
branchIsOutOfDate = JGitUtil.getShaByRef(owner, name, pullreq.branch) != Some(pullreq.commitIdFrom),
needStatusCheck = context.loginAccount.map{ u =>
branchProtection.needStatusCheck(u.userName)
}.getOrElse(true),
hasUpdatePermission = hasDeveloperRole(pullreq.requestUserName, pullreq.requestRepositoryName, context.loginAccount) &&
context.loginAccount.map{ u =>
!getProtectedBranchInfo(pullreq.requestUserName, pullreq.requestRepositoryName, pullreq.requestBranch).needStatusCheck(u.userName)
}.getOrElse(false),
hasMergePermission = hasMergePermission,
commitIdTo = pullreq.commitIdTo)
html.mergeguide(
mergeStatus,
issue,
pullreq,
repository,
getRepository(pullreq.requestUserName, pullreq.requestRepositoryName).get)
}
} getOrElse NotFound()
})
get("/:owner/:repository/pull/:id/delete/*")(writableUsersOnly { repository =>
params("id").toIntOpt.map { issueId =>
val branchName = multiParams("splat").head
val userName = context.loginAccount.get.userName
if(repository.repository.defaultBranch != branchName){
using(Git.open(getRepositoryDir(repository.owner, repository.name))){ git =>
git.branchDelete().setForce(true).setBranchNames(branchName).call()
recordDeleteBranchActivity(repository.owner, repository.name, userName, branchName)
}
}
createComment(repository.owner, repository.name, userName, issueId, branchName, "delete_branch")
redirect(s"/${repository.owner}/${repository.name}/pull/${issueId}")
} getOrElse NotFound()
})
post("/:owner/:repository/pull/:id/update_branch")(writableUsersOnly { baseRepository =>
(for {
issueId <- params("id").toIntOpt
loginAccount <- context.loginAccount
(issue, pullreq) <- getPullRequest(baseRepository.owner, baseRepository.name, issueId)
owner = pullreq.requestUserName
name = pullreq.requestRepositoryName
if hasDeveloperRole(owner, name, context.loginAccount)
} yield {
val repository = getRepository(owner, name).get
val branchProtection = getProtectedBranchInfo(owner, name, pullreq.requestBranch)
if(branchProtection.needStatusCheck(loginAccount.userName)){
flash += "error" -> s"branch ${pullreq.requestBranch} is protected need status check."
} else {
LockUtil.lock(s"${owner}/${name}"){
val alias = if(pullreq.repositoryName == pullreq.requestRepositoryName && pullreq.userName == pullreq.requestUserName){
pullreq.branch
} else {
s"${pullreq.userName}:${pullreq.branch}"
}
val existIds = using(Git.open(Directory.getRepositoryDir(owner, name))) { git => JGitUtil.getAllCommitIds(git) }.toSet
pullRemote(owner, name, pullreq.requestBranch, pullreq.userName, pullreq.repositoryName, pullreq.branch, loginAccount,
s"Merge branch '${alias}' into ${pullreq.requestBranch}") match {
case None => // conflict
flash += "error" -> s"Can't automatic merging branch '${alias}' into ${pullreq.requestBranch}."
case Some(oldId) =>
// update pull request
updatePullRequests(owner, name, pullreq.requestBranch)
using(Git.open(Directory.getRepositoryDir(owner, name))) { git =>
// after update branch
val newCommitId = git.getRepository.resolve(s"refs/heads/${pullreq.requestBranch}")
val commits = git.log.addRange(oldId, newCommitId).call.iterator.asScala.map(c => new JGitUtil.CommitInfo(c)).toList
commits.foreach { commit =>
if(!existIds.contains(commit.id)){
createIssueComment(owner, name, commit)
}
}
// record activity
recordPushActivity(owner, name, loginAccount.userName, pullreq.branch, commits)
// close issue by commit message
if(pullreq.requestBranch == repository.repository.defaultBranch){
commits.map { commit =>
closeIssuesFromMessage(commit.fullMessage, loginAccount.userName, owner, name)
}
}
// call web hook
callPullRequestWebHookByRequestBranch("synchronize", repository, pullreq.requestBranch, baseUrl, loginAccount)
callWebHookOf(owner, name, WebHook.Push) {
for {
ownerAccount <- getAccountByUserName(owner)
} yield {
WebHookService.WebHookPushPayload(git, loginAccount, pullreq.requestBranch, repository, commits, ownerAccount, oldId = oldId, newId = newCommitId)
}
}
}
flash += "info" -> s"Merge branch '${alias}' into ${pullreq.requestBranch}"
}
}
}
redirect(s"/${repository.owner}/${repository.name}/pull/${issueId}")
}) getOrElse NotFound()
})
post("/:owner/:repository/pull/:id/merge", mergeForm)(writableUsersOnly { (form, repository) =>
params("id").toIntOpt.flatMap { issueId =>
val owner = repository.owner
val name = repository.name
LockUtil.lock(s"${owner}/${name}"){
getPullRequest(owner, name, issueId).map { case (issue, pullreq) =>
using(Git.open(getRepositoryDir(owner, name))) { git =>
// mark issue as merged and close.
val loginAccount = context.loginAccount.get
createComment(owner, name, loginAccount.userName, issueId, form.message, "merge")
createComment(owner, name, loginAccount.userName, issueId, "Close", "close")
updateClosed(owner, name, issueId, true)
// record activity
recordMergeActivity(owner, name, loginAccount.userName, issueId, form.message)
// merge git repository
mergePullRequest(git, pullreq.branch, issueId,
s"Merge pull request #${issueId} from ${pullreq.requestUserName}/${pullreq.requestBranch}\\n\\n" + form.message,
new PersonIdent(loginAccount.fullName, loginAccount.mailAddress))
val (commits, _) = getRequestCompareInfo(owner, name, pullreq.commitIdFrom,
pullreq.requestUserName, pullreq.requestRepositoryName, pullreq.commitIdTo)
// close issue by content of pull request
val defaultBranch = getRepository(owner, name).get.repository.defaultBranch
if(pullreq.branch == defaultBranch){
commits.flatten.foreach { commit =>
closeIssuesFromMessage(commit.fullMessage, loginAccount.userName, owner, name)
}
closeIssuesFromMessage(issue.title + " " + issue.content.getOrElse(""), loginAccount.userName, owner, name)
closeIssuesFromMessage(form.message, loginAccount.userName, owner, name)
}
updatePullRequests(owner, name, pullreq.branch)
// call web hook
callPullRequestWebHook("closed", repository, issueId, context.baseUrl, context.loginAccount.get)
// notifications
Notifier().toNotify(repository, issue, "merge"){
Notifier.msgStatus(s"${context.baseUrl}/${owner}/${name}/pull/${issueId}")
}
redirect(s"/${owner}/${name}/pull/${issueId}")
}
}
}
} getOrElse NotFound()
})
get("/:owner/:repository/compare")(referrersOnly { forkedRepository =>
val headBranch:Option[String] = params.get("head")
(forkedRepository.repository.originUserName, forkedRepository.repository.originRepositoryName) match {
case (Some(originUserName), Some(originRepositoryName)) => {
getRepository(originUserName, originRepositoryName).map { originRepository =>
using(
Git.open(getRepositoryDir(originUserName, originRepositoryName)),
Git.open(getRepositoryDir(forkedRepository.owner, forkedRepository.name))
){ (oldGit, newGit) =>
val newBranch = headBranch.getOrElse(JGitUtil.getDefaultBranch(newGit, forkedRepository).get._2)
val oldBranch = originRepository.branchList.find( _ == newBranch).getOrElse(JGitUtil.getDefaultBranch(oldGit, originRepository).get._2)
redirect(s"/${forkedRepository.owner}/${forkedRepository.name}/compare/${originUserName}:${oldBranch}...${newBranch}")
}
} getOrElse NotFound()
}
case _ => {
using(Git.open(getRepositoryDir(forkedRepository.owner, forkedRepository.name))){ git =>
JGitUtil.getDefaultBranch(git, forkedRepository).map { case (_, defaultBranch) =>
redirect(s"/${forkedRepository.owner}/${forkedRepository.name}/compare/${defaultBranch}...${headBranch.getOrElse(defaultBranch)}")
} getOrElse {
redirect(s"/${forkedRepository.owner}/${forkedRepository.name}")
}
}
}
}
})
get("/:owner/:repository/compare/*...*")(referrersOnly { forkedRepository =>
val Seq(origin, forked) = multiParams("splat")
val (originOwner, originId) = parseCompareIdentifie(origin, forkedRepository.owner)
val (forkedOwner, forkedId) = parseCompareIdentifie(forked, forkedRepository.owner)
(for(
originRepositoryName <- if(originOwner == forkedOwner) {
// Self repository
Some(forkedRepository.name)
} else if(forkedRepository.repository.originUserName.isEmpty){
// when ForkedRepository is the original repository
getForkedRepositories(forkedRepository.owner, forkedRepository.name).find(_._1 == originOwner).map(_._2)
} else if(Some(originOwner) == forkedRepository.repository.originUserName){
// Original repository
forkedRepository.repository.originRepositoryName
} else {
// Sibling repository
getUserRepositories(originOwner).find { x =>
x.repository.originUserName == forkedRepository.repository.originUserName &&
x.repository.originRepositoryName == forkedRepository.repository.originRepositoryName
}.map(_.repository.repositoryName)
};
originRepository <- getRepository(originOwner, originRepositoryName)
) yield {
using(
Git.open(getRepositoryDir(originRepository.owner, originRepository.name)),
Git.open(getRepositoryDir(forkedRepository.owner, forkedRepository.name))
){ case (oldGit, newGit) =>
val (oldId, newId) =
if(originRepository.branchList.contains(originId) && forkedRepository.branchList.contains(forkedId)){
// Branch name
val rootId = JGitUtil.getForkedCommitId(oldGit, newGit,
originRepository.owner, originRepository.name, originId,
forkedRepository.owner, forkedRepository.name, forkedId)
(Option(oldGit.getRepository.resolve(rootId)), Option(newGit.getRepository.resolve(forkedId)))
} else {
// Commit id
(Option(oldGit.getRepository.resolve(originId)), Option(newGit.getRepository.resolve(forkedId)))
}
(oldId, newId) match {
case (Some(oldId), Some(newId)) => {
val (commits, diffs) = getRequestCompareInfo(
originRepository.owner, originRepository.name, oldId.getName,
forkedRepository.owner, forkedRepository.name, newId.getName)
val title = if(commits.flatten.length == 1){
commits.flatten.head.shortMessage
} else {
val text = forkedId.replaceAll("[\\\\-_]", " ")
text.substring(0, 1).toUpperCase + text.substring(1)
}
html.compare(
title,
commits,
diffs,
(forkedRepository.repository.originUserName, forkedRepository.repository.originRepositoryName) match {
case (Some(userName), Some(repositoryName)) => (userName, repositoryName) :: getForkedRepositories(userName, repositoryName)
case _ => (forkedRepository.owner, forkedRepository.name) :: getForkedRepositories(forkedRepository.owner, forkedRepository.name)
},
commits.flatten.map(commit => getCommitComments(forkedRepository.owner, forkedRepository.name, commit.id, false)).flatten.toList,
originId,
forkedId,
oldId.getName,
newId.getName,
forkedRepository,
originRepository,
forkedRepository,
hasDeveloperRole(originRepository.owner, originRepository.name, context.loginAccount),
getAssignableUserNames(originRepository.owner, originRepository.name),
getMilestones(originRepository.owner, originRepository.name),
getLabels(originRepository.owner, originRepository.name)
)
}
case (oldId, newId) =>
redirect(s"/${forkedRepository.owner}/${forkedRepository.name}/compare/" +
s"${originOwner}:${oldId.map(_ => originId).getOrElse(originRepository.repository.defaultBranch)}..." +
s"${forkedOwner}:${newId.map(_ => forkedId).getOrElse(forkedRepository.repository.defaultBranch)}")
}
}
}) getOrElse NotFound()
})
ajaxGet("/:owner/:repository/compare/*...*/mergecheck")(readableUsersOnly { forkedRepository =>
val Seq(origin, forked) = multiParams("splat")
val (originOwner, tmpOriginBranch) = parseCompareIdentifie(origin, forkedRepository.owner)
val (forkedOwner, tmpForkedBranch) = parseCompareIdentifie(forked, forkedRepository.owner)
(for(
originRepositoryName <- if(originOwner == forkedOwner){
Some(forkedRepository.name)
} else {
forkedRepository.repository.originRepositoryName.orElse {
getForkedRepositories(forkedRepository.owner, forkedRepository.name).find(_._1 == originOwner).map(_._2)
}
};
originRepository <- getRepository(originOwner, originRepositoryName)
) yield {
using(
Git.open(getRepositoryDir(originRepository.owner, originRepository.name)),
Git.open(getRepositoryDir(forkedRepository.owner, forkedRepository.name))
){ case (oldGit, newGit) =>
val originBranch = JGitUtil.getDefaultBranch(oldGit, originRepository, tmpOriginBranch).get._2
val forkedBranch = JGitUtil.getDefaultBranch(newGit, forkedRepository, tmpForkedBranch).get._2
val conflict = LockUtil.lock(s"${originRepository.owner}/${originRepository.name}"){
checkConflict(originRepository.owner, originRepository.name, originBranch,
forkedRepository.owner, forkedRepository.name, forkedBranch)
}
html.mergecheck(conflict)
}
}) getOrElse NotFound()
})
post("/:owner/:repository/pulls/new", pullRequestForm)(readableUsersOnly { (form, repository) =>
defining(repository.owner, repository.name){ case (owner, name) =>
val manageable = isManageable(repository)
val editable = isEditable(repository)
if(editable) {
val loginUserName = context.loginAccount.get.userName
val issueId = createIssue(
owner = repository.owner,
repository = repository.name,
loginUser = loginUserName,
title = form.title,
content = form.content,
assignedUserName = if (manageable) form.assignedUserName else None,
milestoneId = if (manageable) form.milestoneId else None,
isPullRequest = true)
createPullRequest(
originUserName = repository.owner,
originRepositoryName = repository.name,
issueId = issueId,
originBranch = form.targetBranch,
requestUserName = form.requestUserName,
requestRepositoryName = form.requestRepositoryName,
requestBranch = form.requestBranch,
commitIdFrom = form.commitIdFrom,
commitIdTo = form.commitIdTo)
// insert labels
if (manageable) {
form.labelNames.map { value =>
val labels = getLabels(owner, name)
value.split(",").foreach { labelName =>
labels.find(_.labelName == labelName).map { label =>
registerIssueLabel(repository.owner, repository.name, issueId, label.labelId)
}
}
}
}
// fetch requested branch
fetchAsPullRequest(owner, name, form.requestUserName, form.requestRepositoryName, form.requestBranch, issueId)
// record activity
recordPullRequestActivity(owner, name, loginUserName, issueId, form.title)
// call web hook
callPullRequestWebHook("opened", repository, issueId, context.baseUrl, context.loginAccount.get)
getIssue(owner, name, issueId.toString) foreach { issue =>
// extract references and create refer comment
createReferComment(owner, name, issue, form.title + " " + form.content.getOrElse(""), context.loginAccount.get)
// notifications
Notifier().toNotify(repository, issue, form.content.getOrElse("")) {
Notifier.msgPullRequest(s"${context.baseUrl}/${owner}/${name}/pull/${issueId}")
}
}
redirect(s"/${owner}/${name}/pull/${issueId}")
} else Unauthorized()
}
})
/**
* Parses branch identifier and extracts owner and branch name as tuple.
*
* - "owner:branch" to ("owner", "branch")
* - "branch" to ("defaultOwner", "branch")
*/
private def parseCompareIdentifie(value: String, defaultOwner: String): (String, String) =
if(value.contains(':')){
val array = value.split(":")
(array(0), array(1))
} else {
(defaultOwner, value)
}
private def searchPullRequests(userName: Option[String], repository: RepositoryService.RepositoryInfo) =
defining(repository.owner, repository.name){ case (owner, repoName) =>
val page = IssueSearchCondition.page(request)
// retrieve search condition
val condition = IssueSearchCondition(request)
gitbucket.core.issues.html.list(
"pulls",
searchIssue(condition, true, (page - 1) * PullRequestLimit, PullRequestLimit, owner -> repoName),
page,
getAssignableUserNames(owner, repoName),
getMilestones(owner, repoName),
getLabels(owner, repoName),
countIssue(condition.copy(state = "open" ), true, owner -> repoName),
countIssue(condition.copy(state = "closed"), true, owner -> repoName),
condition,
repository,
isEditable(repository),
isManageable(repository))
}
/**
* Tests whether an logged-in user can manage pull requests.
*/
private def isManageable(repository: RepositoryInfo)(implicit context: Context): Boolean = {
hasDeveloperRole(repository.owner, repository.name, context.loginAccount)
}
/**
* Tests whether an logged-in user can post pull requests.
*/
private def isEditable(repository: RepositoryInfo)(implicit context: Context): Boolean = {
repository.repository.options.issuesOption match {
case "ALL" => !repository.repository.isPrivate && context.loginAccount.isDefined
case "PUBLIC" => hasGuestRole(repository.owner, repository.name, context.loginAccount)
case "PRIVATE" => hasDeveloperRole(repository.owner, repository.name, context.loginAccount)
case "DISABLE" => false
}
}
}
| zhoffice/gitbucket | src/main/scala/gitbucket/core/controller/PullRequestsController.scala | Scala | apache-2.0 | 25,021 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import java.io.{File, BufferedWriter, FileWriter}
import java.util.Properties
import kafka.server.KafkaConfig
import org.apache.kafka.common.utils.Java
object JaasTestUtils {
case class Krb5LoginModule(useKeyTab: Boolean,
storeKey: Boolean,
keyTab: String,
principal: String,
debug: Boolean,
serviceName: Option[String]) extends JaasModule {
def name =
if (Java.isIbmJdk)
"com.ibm.security.auth.module.Krb5LoginModule"
else
"com.sun.security.auth.module.Krb5LoginModule"
def entries: Map[String, String] =
if (Java.isIbmJdk)
Map(
"principal" -> principal,
"credsType" -> "both"
) ++ (if (useKeyTab) Map("useKeytab" -> s"file:$keyTab") else Map.empty)
else
Map(
"useKeyTab" -> useKeyTab.toString,
"storeKey" -> storeKey.toString,
"keyTab" -> keyTab,
"principal" -> principal
) ++ serviceName.map(s => Map("serviceName" -> s)).getOrElse(Map.empty)
}
case class PlainLoginModule(username: String,
password: String,
debug: Boolean = false,
validUsers: Map[String, String] = Map.empty) extends JaasModule {
def name = "org.apache.kafka.common.security.plain.PlainLoginModule"
def entries: Map[String, String] = Map(
"username" -> username,
"password" -> password
) ++ validUsers.map { case (user, pass) => s"user_$user" -> pass }
}
case class ZkDigestModule(debug: Boolean = false,
entries: Map[String, String] = Map.empty) extends JaasModule {
def name = "org.apache.zookeeper.server.auth.DigestLoginModule"
}
case class ScramLoginModule(username: String,
password: String,
debug: Boolean = false,
tokenProps: Map[String, String] = Map.empty) extends JaasModule {
def name = "org.apache.kafka.common.security.scram.ScramLoginModule"
def entries: Map[String, String] = Map(
"username" -> username,
"password" -> password
) ++ tokenProps.map { case (name, value) => name -> value }
}
case class OAuthBearerLoginModule(username: String,
debug: Boolean = false) extends JaasModule {
def name = "org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule"
def entries: Map[String, String] = Map(
"unsecuredLoginStringClaim_sub" -> username
)
}
sealed trait JaasModule {
def name: String
def debug: Boolean
def entries: Map[String, String]
override def toString: String = {
s"""$name required
| debug=$debug
| ${entries.map { case (k, v) => s"""$k="$v"""" }.mkString("", "\\n| ", ";")}
|""".stripMargin
}
}
case class JaasSection(contextName: String, modules: Seq[JaasModule]) {
override def toString: String = {
s"""|$contextName {
| ${modules.mkString("\\n ")}
|};
|""".stripMargin
}
}
private val ZkServerContextName = "Server"
private val ZkClientContextName = "Client"
private val ZkUserSuperPasswd = "adminpasswd"
private val ZkUser = "fpj"
private val ZkUserPassword = "fpjsecret"
val KafkaServerContextName = "KafkaServer"
val KafkaServerPrincipalUnqualifiedName = "kafka"
private val KafkaServerPrincipal = KafkaServerPrincipalUnqualifiedName + "/[email protected]"
val KafkaClientContextName = "KafkaClient"
val KafkaClientPrincipalUnqualifiedName = "client"
private val KafkaClientPrincipal = KafkaClientPrincipalUnqualifiedName + "@EXAMPLE.COM"
val KafkaClientPrincipalUnqualifiedName2 = "client2"
private val KafkaClientPrincipal2 = KafkaClientPrincipalUnqualifiedName2 + "@EXAMPLE.COM"
val KafkaPlainUser = "plain-user"
private val KafkaPlainPassword = "plain-user-secret"
val KafkaPlainUser2 = "plain-user2"
val KafkaPlainPassword2 = "plain-user2-secret"
val KafkaPlainAdmin = "plain-admin"
private val KafkaPlainAdminPassword = "plain-admin-secret"
val KafkaScramUser = "scram-user"
val KafkaScramPassword = "scram-user-secret"
val KafkaScramUser2 = "scram-user2"
val KafkaScramPassword2 = "scram-user2-secret"
val KafkaScramAdmin = "scram-admin"
val KafkaScramAdminPassword = "scram-admin-secret"
val KafkaOAuthBearerUser = "oauthbearer-user"
val KafkaOAuthBearerUser2 = "oauthbearer-user2"
val KafkaOAuthBearerAdmin = "oauthbearer-admin"
val serviceName = "kafka"
def saslConfigs(saslProperties: Option[Properties]): Properties = {
val result = saslProperties match {
case Some(properties) => properties
case None => new Properties
}
// IBM Kerberos module doesn't support the serviceName JAAS property, hence it needs to be
// passed as a Kafka property
if (Java.isIbmJdk && !result.contains(KafkaConfig.SaslKerberosServiceNameProp))
result.put(KafkaConfig.SaslKerberosServiceNameProp, serviceName)
result
}
def writeJaasContextsToFile(jaasSections: Seq[JaasSection]): File = {
val jaasFile = TestUtils.tempFile()
writeToFile(jaasFile, jaasSections)
jaasFile
}
// Returns the dynamic configuration, using credentials for user #1
def clientLoginModule(mechanism: String, keytabLocation: Option[File], serviceName: String = serviceName): String =
kafkaClientModule(mechanism, keytabLocation, KafkaClientPrincipal, KafkaPlainUser, KafkaPlainPassword, KafkaScramUser, KafkaScramPassword, KafkaOAuthBearerUser, serviceName).toString
def tokenClientLoginModule(tokenId: String, password: String): String = {
ScramLoginModule(
tokenId,
password,
debug = false,
Map(
"tokenauth" -> "true"
)).toString
}
def zkSections: Seq[JaasSection] = Seq(
JaasSection(ZkServerContextName, Seq(ZkDigestModule(debug = false,
Map("user_super" -> ZkUserSuperPasswd, s"user_$ZkUser" -> ZkUserPassword)))),
JaasSection(ZkClientContextName, Seq(ZkDigestModule(debug = false,
Map("username" -> ZkUser, "password" -> ZkUserPassword))))
)
def kafkaServerSection(contextName: String, mechanisms: Seq[String], keytabLocation: Option[File]): JaasSection = {
val modules = mechanisms.map {
case "GSSAPI" =>
Krb5LoginModule(
useKeyTab = true,
storeKey = true,
keyTab = keytabLocation.getOrElse(throw new IllegalArgumentException("Keytab location not specified for GSSAPI")).getAbsolutePath,
principal = KafkaServerPrincipal,
debug = true,
serviceName = Some(serviceName))
case "PLAIN" =>
PlainLoginModule(
KafkaPlainAdmin,
KafkaPlainAdminPassword,
debug = false,
Map(
KafkaPlainAdmin -> KafkaPlainAdminPassword,
KafkaPlainUser -> KafkaPlainPassword,
KafkaPlainUser2 -> KafkaPlainPassword2
))
case "SCRAM-SHA-256" | "SCRAM-SHA-512" =>
ScramLoginModule(
KafkaScramAdmin,
KafkaScramAdminPassword,
debug = false)
case "OAUTHBEARER" =>
OAuthBearerLoginModule(KafkaOAuthBearerAdmin)
case mechanism => throw new IllegalArgumentException("Unsupported server mechanism " + mechanism)
}
JaasSection(contextName, modules)
}
// consider refactoring if more mechanisms are added
private def kafkaClientModule(mechanism: String,
keytabLocation: Option[File], clientPrincipal: String,
plainUser: String, plainPassword: String,
scramUser: String, scramPassword: String,
oauthBearerUser: String, serviceName: String = serviceName): JaasModule = {
mechanism match {
case "GSSAPI" =>
Krb5LoginModule(
useKeyTab = true,
storeKey = true,
keyTab = keytabLocation.getOrElse(throw new IllegalArgumentException("Keytab location not specified for GSSAPI")).getAbsolutePath,
principal = clientPrincipal,
debug = true,
serviceName = Some(serviceName)
)
case "PLAIN" =>
PlainLoginModule(
plainUser,
plainPassword
)
case "SCRAM-SHA-256" | "SCRAM-SHA-512" =>
ScramLoginModule(
scramUser,
scramPassword
)
case "OAUTHBEARER" =>
OAuthBearerLoginModule(
oauthBearerUser
)
case mechanism => throw new IllegalArgumentException("Unsupported client mechanism " + mechanism)
}
}
/*
* Used for the static JAAS configuration and it uses the credentials for client#2
*/
def kafkaClientSection(mechanism: Option[String], keytabLocation: Option[File]): JaasSection = {
JaasSection(KafkaClientContextName, mechanism.map(m =>
kafkaClientModule(m, keytabLocation, KafkaClientPrincipal2, KafkaPlainUser2, KafkaPlainPassword2, KafkaScramUser2, KafkaScramPassword2, KafkaOAuthBearerUser2)).toSeq)
}
private def jaasSectionsToString(jaasSections: Seq[JaasSection]): String =
jaasSections.mkString
private def writeToFile(file: File, jaasSections: Seq[JaasSection]) {
val writer = new BufferedWriter(new FileWriter(file))
try writer.write(jaasSectionsToString(jaasSections))
finally writer.close()
}
}
| mihbor/kafka | core/src/test/scala/unit/kafka/utils/JaasTestUtils.scala | Scala | apache-2.0 | 10,259 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs105.boxes
import uk.gov.hmrc.ct.accounts.frs105.retriever.Frs105AccountsBoxRetriever
import uk.gov.hmrc.ct.box._
case class AC455(value: Option[Int]) extends CtBoxIdentifier(name = "Current assets (current PoA)")
with CtOptionalInteger
with Input
with ValidatableBox[Frs105AccountsBoxRetriever] {
override def validate(boxRetriever: Frs105AccountsBoxRetriever): Set[CtValidation] = {
collectErrors(
validateMoney(value, min = 0)
)
}
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs105/boxes/AC455.scala | Scala | apache-2.0 | 1,102 |
package it.unipd.dei.diversity.mllib
import it.unimi.dsi.fastutil.objects.Object2LongOpenHashMap
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.ml.linalg.{SQLDataTypes, Vectors}
import org.apache.spark.ml.param._
import org.apache.spark.ml.util.{Identifiable, MLWritable, MLWriter}
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types.{ArrayType, StringType, StructType}
import org.apache.spark.sql.{DataFrame, Dataset}
import scala.math.Ordering
// TODO Add binary parameter
trait TfIdfParams extends Params with InputCol with OutputCol {
/**
* Max size of the vocabulary.
* CountVectorizer will build a vocabulary that only considers the top
* vocabSize terms ordered by term frequency across the corpus.
*
* Default: 2^18^
*
* @group param
*/
val vocabSize: IntParam =
new IntParam(this, "vocabSize", "max size of the vocabulary", ParamValidators.gt(0))
/** @group getParam */
def getVocabSize: Int = $(vocabSize)
/**
* Specifies the minimum number of different documents a term must appear in to be included
* in the vocabulary.
* If this is an integer greater than or equal to 1, this specifies the number of documents
* the term must appear in; if this is a double in [0,1), then this specifies the fraction of
* documents.
*
* Default: 1.0
*
* @group param
*/
val minDF: DoubleParam = new DoubleParam(this, "minDF", "Specifies the minimum number of" +
" different documents a term must appear in to be included in the vocabulary." +
" If this is an integer >= 1, this specifies the number of documents the term must" +
" appear in; if this is a double in [0,1), then this specifies the fraction of documents.",
ParamValidators.gtEq(0.0))
/** @group getParam */
def getMinDF: Double = $(minDF)
/**
* Filter to ignore rare words in a document. For each document, terms with
* frequency/count less than the given threshold are ignored.
* If this is an integer greater than or equal to 1, then this specifies a count (of times the
* term must appear in the document);
* if this is a double in [0,1), then this specifies a fraction (out of the document's token
* count).
*
*
* Default: 1.0
*
* @group param
*/
val minTF: DoubleParam = new DoubleParam(this, "minTF", "Filter to ignore rare words in" +
" a document. For each document, terms with frequency/count less than the given threshold are" +
" ignored. If this is an integer >= 1, then this specifies a count (of times the term must" +
" appear in the document); if this is a double in [0,1), then this specifies a fraction (out" +
" of the document's token count). Note that the parameter is only used in transform of" +
" CountVectorizerModel and does not affect fitting.", ParamValidators.gtEq(0.0))
/** @group getParam */
def getMinTF: Double = $(minTF)
setDefault(vocabSize -> (1 << 18), minDF -> 1.0, minTF -> 1.0)
/** Validates and transforms the input schema. */
protected def validateAndTransformSchema(schema: StructType): StructType = {
val typeCandidates = List(new ArrayType(StringType, true), new ArrayType(StringType, false))
SchemaUtils.checkColumnTypes(schema, $(inputCol), typeCandidates)
SchemaUtils.appendColumn(schema, $(outputCol), SQLDataTypes.VectorType)
}
}
/**
* More efficient (especially for garbage collection pressure) implementation of Tf-Idf
*/
class TfIdf(override val uid: String)
extends Estimator[TfIdfModel] with TfIdfParams {
def this() = this(Identifiable.randomUID("tf-idf"))
def setVocabSize(value: Int): this.type = set(vocabSize, value)
def setMinDF(value: Double): this.type = set(minDF, value)
def setMinTF(value: Double): this.type = set(minTF, value)
override def fit(dataset: Dataset[_]): TfIdfModel = {
transformSchema(dataset.schema, logging = true)
val vocSize = $(vocabSize)
val input = dataset.select($(inputCol)).rdd.map(_.getAs[Seq[String]](0))
val minDf = if ($(minDF) >= 1.0) {
$(minDF)
} else {
$(minDF) * input.cache().count()
}
val totalDocs = dataset.sparkSession.sparkContext.longAccumulator("TF-IDF: Document count")
val wordAndDocCounts: RDD[(String, (Long, Long))] = input.mapPartitions { docs =>
val wc = new Object2LongOpenHashMap[String]()
for (tokens <- docs) {
totalDocs.add(1L)
for (w <- tokens) {
if (wc.containsKey(w)) {
wc.put(w, wc.getLong(w) + 1)
} else {
wc.put(w, 1L)
}
}
}
new Iterator[(String, (Long, Long))]() {
val it = wc.object2LongEntrySet().fastIterator()
override def hasNext: Boolean = it.hasNext
override def next(): (String, (Long, Long)) = {
val entry = it.next()
(entry.getKey, (entry.getLongValue, 1L))
}
}
}.reduceByKey { case ((wc1, df1), (wc2, df2)) =>
(wc1 + wc2, df1 + df2)
}.filter { case (word, (wc, df)) =>
df >= minDf
}.cache()
val fullVocabSize = wordAndDocCounts.count()
val topWords = wordAndDocCounts
.top(math.min(fullVocabSize, vocSize).toInt)(Ordering.by({case (_, (count, _)) => count}))
require(topWords.length > 0, "The vocabulary size should be > 0. Lower minDF as necessary.")
val numDocs: Long = totalDocs.value
require(numDocs > 0, "Counted only 0 documents!")
println(s"Total number of documents $numDocs")
val vocab = Array.ofDim[String](topWords.length)
val idfArr = Array.ofDim[Double](topWords.length)
for (i <- topWords.indices) {
topWords(i) match {
case (word, (wordCount, documentCount)) =>
vocab(i) = word
val df = documentCount.toDouble / numDocs
idfArr(i) = math.log((numDocs + 1) / (df + 1))
//println(s"Word $word: wc=$wordCount, idf=${idfArr(i)} (df = $df, # docs appearing=${documentCount})")
require(idfArr(i) >= 0, s"Negative idf for word $word: ${idfArr(i)}")
}
}
copyValues(new TfIdfModel(uid, vocab, idfArr).setParent(this))
}
override def copy(extra: ParamMap): Estimator[TfIdfModel] = defaultCopy(extra)
@DeveloperApi
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
}
object TfIdf {
/**
* A growable buffer of strings whose content can be sorted in place
*/
private class Buffer(private var _backingArray: Array[String]) {
var _size: Int = 0
def size: Int = _size
def apply(i: Int): String = {
if (i >= _size) throw new IndexOutOfBoundsException(s"$i >= ${_size}")
_backingArray(i)
}
def sortInPlace(): Unit = {
java.util.Arrays.sort(_backingArray, 0, _size, Ordering[String])
}
def clear(): Unit = _size = 0
def ensureCapacity(c: Int) = {
if (_backingArray.length < c) {
val largerArr = Array.ofDim[String](c)
System.arraycopy(_backingArray, 0, largerArr, 0, _size)
_backingArray = largerArr
}
}
def appendAll(seq: Seq[String]): Unit = {
ensureCapacity(_size + seq.size)
for (e <- seq) {
_backingArray(_size) = e
_size += 1
}
}
}
private object Buffer {
def apply(): Buffer = new Buffer(Array[String]())
}
}
class TfIdfModel(override val uid: String,
val vocabulary: Array[String],
val invDocFrequency: Array[Double])
extends Model[TfIdfModel] with MLWritable with TfIdfParams {
def this(vocabulary: Array[String], invDocFrequency: Array[Double]) =
this(Identifiable.randomUID("tf-idf"), vocabulary, invDocFrequency)
override def copy(extra: ParamMap): TfIdfModel = {
val copied = new TfIdfModel(uid, vocabulary, invDocFrequency).setParent(parent)
copyValues(copied, extra)
}
override def write: MLWriter = ???
// maps each word to its index in the vocabulary
private var broadcastVocabDict: Option[Broadcast[Map[String, Int]]] = None
private var broadcastInvDocFreq: Option[Broadcast[Array[Double]]] = None
override def transform(dataset: Dataset[_]): DataFrame = {
transformSchema(dataset.schema, logging = true)
if (broadcastVocabDict.isEmpty) {
broadcastVocabDict = Some(
dataset.sparkSession.sparkContext.broadcast(vocabulary.zipWithIndex.toMap))
}
if (broadcastInvDocFreq.isEmpty) {
broadcastInvDocFreq = Some(dataset.sparkSession.sparkContext.broadcast(invDocFrequency))
}
val dictBr = broadcastVocabDict.get
val invFreqBr = broadcastInvDocFreq.get
val minTf: Double = getMinTF
val vectorizer = udf { (document: Seq[String]) =>
val termCounts = Array.ofDim[Double](dictBr.value.size)
var tokenCount = 0L
document.foreach { term =>
dictBr.value.get(term) match {
case Some(index) => termCounts(index) += 1.0
case None => // ignore terms not in the vocabulary
}
tokenCount += 1
}
val effectiveMinTF = if (minTf >= 1.0) minTf else tokenCount * minTf
val effectiveCounts = termCounts.view // using a view should avoid allocation
.zipWithIndex
.filter(_._2 >= effectiveMinTF)
.map({case (tf, idx) => (idx, tf*invFreqBr.value(idx))})
Vectors.sparse(dictBr.value.size, effectiveCounts)
}
dataset.withColumn($(outputCol), vectorizer(col($(inputCol))))
}
@DeveloperApi
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
}
| Cecca/diversity-maximization | mllib/src/main/scala/it/unipd/dei/diversity/mllib/TfIdf.scala | Scala | gpl-3.0 | 9,726 |
package com.expedia.gps.geo.reactive101.client.scala
import java.util.concurrent.TimeUnit
import com.codahale.metrics.{ConsoleReporter, MetricRegistry, Timer}
import org.json4s.{DefaultFormats, Formats}
import org.slf4j.LoggerFactory
/**
* TODO.
*/
trait AbstractTest {
val rootLogger = LoggerFactory.getLogger(org.slf4j.Logger.ROOT_LOGGER_NAME).asInstanceOf[ch.qos.logback.classic.Logger]
rootLogger.setLevel(ch.qos.logback.classic.Level.OFF)
protected implicit val jsonFormats: Formats = DefaultFormats
private val NB_CALLS: Int = 1000
val metrics = new MetricRegistry()
val reporter = ConsoleReporter.forRegistry(metrics)
.convertRatesTo(TimeUnit.SECONDS)
.convertDurationsTo(TimeUnit.MILLISECONDS)
.build()
val mainTimer: Timer = metrics.timer(s"Multiple call")
val subTimer: Timer = metrics.timer(s"Multiple call.sub")
}
| olmartinATexpedia/reactive101 | src/test/scala/com/expedia/gps/geo/reactive101/client/scala/AbstractTest.scala | Scala | apache-2.0 | 867 |
package org.orbroker
import org.orbroker._
import callback._
import adapt._
import exception._
import java.sql._
import java.util.LinkedHashMap
import scala.collection.JavaConversions._
import scala.collection.mutable.Buffer
/** Package for enriching O/R Broker. */
package object enrich {
implicit final class RichQuery(val qry: Queryable) extends AnyVal {
/**
* Execute unordered query and merge values with identical keys.
* @param id Query id
* @param parms Optional parameters
* @param kf Key function. Get key value from object
* @param merger Merging function
* @return Collection of extracted object, in the order first encountered from result set
*/
def selectUnordered[T](token: Token[T], parms: (String, Any)*)(kf: T => Any)(merger: (T, T) => T): Iterable[T] = {
val map = new LinkedHashMap[Any, T]
qry.select(token, parms: _*) { rows =>
rows.foreach { t =>
val key = kf(t)
val ot = map.get(key)
if (ot != null) { // Replace with merged object
map.put(key, merger(ot, t))
} else { // Store new object
map.put(key, t)
}
}
}
map.values
}
/**
* Execute query and append result to buffer.
* @param queryID The SQL statement id
* @param buffer The buffer to append to
* @param parms Optional parameters
* @return Buffer expansion count
*/
def selectToBuffer[T](token: Token[T], buffer: Buffer[T], parms: (String, Any)*): Int = {
val preSize = buffer.size
qry.select(token, parms: _*) { rows =>
rows.foreach { t =>
buffer += t
}
}
buffer.size - preSize
}
}
/**
* Pimped version of [[org.orbroker.OutParms]].
* @author Nils Kilden-Pedersen
*
*/
implicit class RichOutParms(val out: OutParms) extends AnyVal {
/**
* Extract objects from ResultSet parameter.
* @param parmName
* @param extractor
* @param receiver
*/
def extract[T, R](parmName: String)(extractor: QueryExtractor[T])(receiver: Iterator[T] => R): Option[R] = {
for (rs β out(parmName).opt[ResultSet]) yield extractor match {
case je: JoinExtractor[_] => out.mapResult(new SafeJoinExtractor(je), rs, receiver)
case _ => out.mapResult(extractor, rs, receiver)
}
}
/**
* Extract at most one object from ResultSet parameter.
* @param parmName
* @param extractor
* @return Some object or None if no rows were returned
*/
@throws(classOf[MoreThanOneException])
def extractOne[T](parmName: String)(extractor: QueryExtractor[T]): Option[T] = {
var maybe: Option[T] = None
extract(parmName)(extractor) { rows =>
rows.foreach { t =>
if (maybe.isEmpty) {
maybe = Some(t)
} else {
throw new MoreThanOneException("Statement '%s' with ResultSet parameter \\"%s\\" returned more than one result".format(out.id.name, parmName))
}
}
}
maybe
}
/**
* Extract all objects from ResultSet parameter.
* @param parmName
* @param extractor
* @param receiver
*/
def extractAll[T](parmName: String)(extractor: QueryExtractor[T]): IndexedSeq[T] = {
extract(parmName)(extractor) { rows =>
rows.foldLeft(new scala.collection.mutable.ArrayBuffer[T](64)) {
case (buffer, t) =>
buffer += t
buffer
}
}.getOrElse(IndexedSeq.empty)
}
}
}
| nilskp/orbroker | src/main/scala/org/orbroker/enrich/package.scala | Scala | mit | 3,675 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.