code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package akka.persistence.pg.perf
import java.util.concurrent.atomic.AtomicInteger
import akka.actor.{ActorLogging, Props}
import akka.persistence.PersistentActor
import akka.persistence.pg.perf.Messages.{Alter, Altered}
class PersistAllActor(id: Int) extends PersistentActor with ActorLogging {
override val persistenceId: String = s"PersistAllActor_$id"
override def receiveRecover: Receive = { case _ => }
override def receiveCommand: Receive = {
case Alter(txt) =>
val created = System.currentTimeMillis()
val events = 1 to 10 map { i =>
Altered(s"${txt}_$i", created)
}
persistAll(events) { _ =>
sender ! txt
}
}
}
object PersistAllActor {
private val id = new AtomicInteger(0)
def reset() = id.set(0)
def props = Props(new PersistAllActor(id.incrementAndGet()))
}
| WegenenVerkeer/akka-persistence-postgresql | modules/akka-persistence-pg/src/test/scala/akka/persistence/pg/perf/PersistAllActor.scala | Scala | mit | 845 |
package sttp.client3.ziojson
trait SttpZioJsonApiExtensions {}
| softwaremill/sttp | json/zio1-json/src/main/scalajs/sttp/client3/ziojson/SttpZioJsonApiExtensions.scala | Scala | apache-2.0 | 64 |
package object Macros {
def foo: Unit = macro Impls.foo
}
object Test extends dotty.runtime.LegacyApp {
import Macros._
foo
} | yusuke2255/dotty | tests/disabled/macro/run/macro-term-declared-in-package-object/Macros_Test_2.scala | Scala | bsd-3-clause | 132 |
// scalac: -Xfatal-warnings
//
sealed trait A2[T1]
final class B2[T1, T2] extends A2[T1]
sealed trait A[T]
final class B[T] extends A[T]
sealed trait A1[T]
trait B1[T] extends A1[T]
trait C1[T] extends A1[T]
trait D1[T] extends A1[Int]
trait E1[T] extends B1[Int]
trait F1[T] extends B1[T]
object MiscUnchecked {
/* nowarn */ def knownType1(x: A[Int]) = x match { case _: B[Int] if true => 1 }
/* nowarn */ def knownType2(x: B[Int]) = x match { case _: A[Int] if true => 1 }
/* nowarn */ def tparamLeakage1(x: Any) = x match { case Array() => 1 }
/* nowarn */ def tparamLeakage2(x: Any) = x match { case List() => 1 }
// E1[Double] implies B1[Int], but B1[Int] does not imply E1[Double], even if .isInstanceOf[E1[_]]
// F1[Int] implies B1[Int], and B1[Int] implies F1[Int]
/* nowarn */ def peerTypes1(x: B1[Int]) = x match { case _: C1[Int] => true }
/* warn */ def peerTypes2(x: B1[Int]) = x match { case _: E1[Double] => true }
/* warn */ def peerTypes3(x: B1[_]) = x match { case _: F1[Double] => true }
/* nowarn */ def peerTypes4(x: B1[Int]) = x match { case _: F1[Int] => true }
/* warn */ def twotypes1[T](x: B2[T, Int]) = x match { case _: A2[Int] => true }
/* nowarn */ def twotypes2[T](x: B2[Int, T]) = x match { case _: A2[Int] => true }
/* nowarn */ def twotypes3(x: A2[Int]) = x match { case _: B2[Int, _] => true }
/* nowarn */ def twotypes4[T](x: A2[T]) = x match { case _: B2[T, _] => true }
/* warn */ def twotypes5[T](x: A2[T]) = x match { case _: B2[_, Int] => true }
}
object Arrays {
def f1(x: Any) = x match {
/* nowarn */ case _: Array[Int] => ()
/* nowarn */ case _: Array[Boolean] => ()
/* nowarn */ case _: Array[String] => ()
/* warn */ case _: Array[List[String]] => ()
/* nowarn */ case _: Array[Array[String]] => ()
/* nowarn */ case _: Array[Array[Array[String]]] => ()
/* warn */ case _: Array[Array[List[String]]] => ()
}
def f2(x: Array[_]) = x match {
/* nowarn */ case _: Array[Int] => ()
/* nowarn */ case _: Array[Boolean] => ()
/* nowarn */ case _: Array[String] => ()
/* warn */ case _: Array[List[String]] => ()
/* nowarn */ case _: Array[Array[String]] => ()
/* nowarn */ case _: Array[Array[Array[String]]] => ()
/* warn */ case _: Array[Array[List[String]]] => ()
}
def f3[T](x: Array[T]) = x match {
/* nowarn */ case _: Array[Int] => ()
/* nowarn */ case _: Array[Boolean] => ()
/* nowarn */ case _: Array[String] => ()
/* warn */ case _: Array[List[String]] => ()
/* nowarn */ case _: Array[Array[String]] => ()
/* warn */ case _: Array[List[Array[String]]] => ()
/* warn */ case _: Array[Array[List[String]]] => ()
}
}
object Matching {
class Q {
type A
type B <: A
def f(xs: Iterable[B]) = xs match {
/* nowarn */ case xs: List[A] => xs.head
/* nowarn */ case xs: Seq[B] => xs.head
/* warn */ case xs: Set[A] => xs.head
}
def f2[T <: B](xs: Iterable[T]) = xs match {
/* nowarn */ case xs: List[B with T] => xs.head
/* nowarn */ case xs: Seq[A] => xs.head
/* nowarn */ case xs: Set[T] => xs.head
}
}
}
| scala/scala | test/files/neg/unchecked3.scala | Scala | apache-2.0 | 3,369 |
package com.varunvats.practice.string
import com.varunvats.practice.sorting.UnitSpec
class LongestSubstringUniqueCharactersSpec extends UnitSpec {
val givenAString = afterWord("given a string")
"The longest sub-string (containing unique characters) finder" when givenAString {
"that is empty" must {
"return an empty string" in {
LongestSubstringUniqueCharacters.find("") shouldBe ""
}
}
"containing only one character" must {
"return a string containing only that character" in {
LongestSubstringUniqueCharacters.find("z") shouldBe "z"
}
}
"containing two different characters" must {
"return a string containing the same two characters" in {
LongestSubstringUniqueCharacters.find("xz") shouldBe "xz"
}
}
"containing three same characters" must {
"return a string containing only one character" in {
LongestSubstringUniqueCharacters.find("ppp") shouldBe "p"
}
}
"containing the sub-string of interest at the beginning of the string" must {
"return the sub-string of interest" in {
val hayStack = "xapqmnox"
val needle = "xapqmno"
LongestSubstringUniqueCharacters.find(hayStack) shouldBe needle
}
}
"containing the sub-string of interest at the end of the string" must {
"return the sub-string of interest" in {
val hayStack = "xapqmnoxab"
val needle = "pqmnoxab"
LongestSubstringUniqueCharacters.find(hayStack) shouldBe needle
}
}
"containing the sub-string of interest in the middle of the string" must {
"return the sub-string of interest" in {
val hayStack = "xaoiabpolmnbo"
val needle = "iabpolmn"
LongestSubstringUniqueCharacters.find(hayStack) shouldBe needle
}
}
"containing two sub-strings of interest" must {
"return the sub-string of interest occurring first in the string" in {
val hayStack = "pqlamobxyzp"
val needle = "pqlamobxyz"
LongestSubstringUniqueCharacters.find(hayStack) shouldBe needle
}
}
}
}
| varunvats/practice | jvm/src/test/scala/com/varunvats/practice/string/LongestSubstringUniqueCharactersSpec.scala | Scala | mit | 2,123 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming
import java.io.File
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.language.{implicitConversions, postfixOps}
import scala.util.Random
import org.apache.hadoop.conf.Configuration
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.concurrent.Eventually._
import org.apache.spark.{SparkConf, SparkException, SparkFunSuite}
import org.apache.spark.internal.Logging
import org.apache.spark.storage.StreamBlockId
import org.apache.spark.streaming.receiver.BlockManagerBasedStoreResult
import org.apache.spark.streaming.scheduler._
import org.apache.spark.streaming.util._
import org.apache.spark.streaming.util.WriteAheadLogSuite._
import org.apache.spark.util.{Clock, ManualClock, SystemClock, Utils}
class ReceivedBlockTrackerSuite
extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
val hadoopConf = new Configuration()
val streamId = 1
var allReceivedBlockTrackers = new ArrayBuffer[ReceivedBlockTracker]()
var checkpointDirectory: File = null
var conf: SparkConf = null
before {
conf = new SparkConf().setMaster("local[2]").setAppName("ReceivedBlockTrackerSuite")
checkpointDirectory = Utils.createTempDir()
}
after {
allReceivedBlockTrackers.foreach { _.stop() }
Utils.deleteRecursively(checkpointDirectory)
}
test("block addition, and block to batch allocation") {
val receivedBlockTracker = createTracker(setCheckpointDir = false)
receivedBlockTracker.isWriteAheadLogEnabled should be (false) // should be disable by default
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
val blockInfos = generateBlockInfos()
blockInfos.map(receivedBlockTracker.addBlock)
// Verify added blocks are unallocated blocks
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual blockInfos
receivedBlockTracker.hasUnallocatedReceivedBlocks should be (true)
// Allocate the blocks to a batch and verify that all of them have been allocated
receivedBlockTracker.allocateBlocksToBatch(1)
receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos
receivedBlockTracker.getBlocksOfBatch(1) shouldEqual Map(streamId -> blockInfos)
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldBe empty
receivedBlockTracker.hasUnallocatedReceivedBlocks should be (false)
// Allocate no blocks to another batch
receivedBlockTracker.allocateBlocksToBatch(2)
receivedBlockTracker.getBlocksOfBatchAndStream(2, streamId) shouldBe empty
receivedBlockTracker.getBlocksOfBatch(2) shouldEqual Map(streamId -> Seq.empty)
// Verify that older batches have no operation on batch allocation,
// will return the same blocks as previously allocated.
receivedBlockTracker.allocateBlocksToBatch(1)
receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos
blockInfos.map(receivedBlockTracker.addBlock)
receivedBlockTracker.allocateBlocksToBatch(2)
receivedBlockTracker.getBlocksOfBatchAndStream(2, streamId) shouldBe empty
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual blockInfos
}
test("recovery and cleanup with write ahead logs") {
val manualClock = new ManualClock
// Set the time increment level to twice the rotation interval so that every increment creates
// a new log file
def incrementTime() {
val timeIncrementMillis = 2000L
manualClock.advance(timeIncrementMillis)
}
// Generate and add blocks to the given tracker
def addBlockInfos(tracker: ReceivedBlockTracker): Seq[ReceivedBlockInfo] = {
val blockInfos = generateBlockInfos()
blockInfos.map(tracker.addBlock)
blockInfos
}
// Print the data present in the log ahead files in the log directory
def printLogFiles(message: String) {
val fileContents = getWriteAheadLogFiles().map { file =>
(s"\\n>>>>> $file: <<<<<\\n${getWrittenLogData(file).mkString("\\n")}")
}.mkString("\\n")
logInfo(s"\\n\\n=====================\\n$message\\n$fileContents\\n=====================\\n")
}
// Set WAL configuration
conf.set("spark.streaming.driver.writeAheadLog.rollingIntervalSecs", "1")
require(WriteAheadLogUtils.getRollingIntervalSecs(conf, isDriver = true) === 1)
// Start tracker and add blocks
val tracker1 = createTracker(clock = manualClock)
tracker1.isWriteAheadLogEnabled should be (true)
val blockInfos1 = addBlockInfos(tracker1)
tracker1.getUnallocatedBlocks(streamId).toList shouldEqual blockInfos1
// Verify whether write ahead log has correct contents
val expectedWrittenData1 = blockInfos1.map(BlockAdditionEvent)
getWrittenLogData() shouldEqual expectedWrittenData1
getWriteAheadLogFiles() should have size 1
incrementTime()
// Recovery without recovery from WAL and verify list of unallocated blocks is empty
val tracker1_ = createTracker(clock = manualClock, recoverFromWriteAheadLog = false)
tracker1_.getUnallocatedBlocks(streamId) shouldBe empty
tracker1_.hasUnallocatedReceivedBlocks should be (false)
// Restart tracker and verify recovered list of unallocated blocks
val tracker2 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
val unallocatedBlocks = tracker2.getUnallocatedBlocks(streamId).toList
unallocatedBlocks shouldEqual blockInfos1
unallocatedBlocks.foreach { block =>
block.isBlockIdValid() should be (false)
}
// Allocate blocks to batch and verify whether the unallocated blocks got allocated
val batchTime1 = manualClock.getTimeMillis()
tracker2.allocateBlocksToBatch(batchTime1)
tracker2.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1
tracker2.getBlocksOfBatch(batchTime1) shouldEqual Map(streamId -> blockInfos1)
// Add more blocks and allocate to another batch
incrementTime()
val batchTime2 = manualClock.getTimeMillis()
val blockInfos2 = addBlockInfos(tracker2)
tracker2.allocateBlocksToBatch(batchTime2)
tracker2.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
// Verify whether log has correct contents
val expectedWrittenData2 = expectedWrittenData1 ++
Seq(createBatchAllocation(batchTime1, blockInfos1)) ++
blockInfos2.map(BlockAdditionEvent) ++
Seq(createBatchAllocation(batchTime2, blockInfos2))
getWrittenLogData() shouldEqual expectedWrittenData2
// Restart tracker and verify recovered state
incrementTime()
val tracker3 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
tracker3.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1
tracker3.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
tracker3.getUnallocatedBlocks(streamId) shouldBe empty
// Cleanup first batch but not second batch
val oldestLogFile = getWriteAheadLogFiles().head
incrementTime()
tracker3.cleanupOldBatches(batchTime2, waitForCompletion = true)
// Verify that the batch allocations have been cleaned, and the act has been written to log
tracker3.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual Seq.empty
getWrittenLogData(getWriteAheadLogFiles().last) should contain(createBatchCleanup(batchTime1))
// Verify that at least one log file gets deleted
eventually(timeout(10 seconds), interval(10 millisecond)) {
getWriteAheadLogFiles() should not contain oldestLogFile
}
printLogFiles("After clean")
// Restart tracker and verify recovered state, specifically whether info about the first
// batch has been removed, but not the second batch
incrementTime()
val tracker4 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
tracker4.getUnallocatedBlocks(streamId) shouldBe empty
tracker4.getBlocksOfBatchAndStream(batchTime1, streamId) shouldBe empty // should be cleaned
tracker4.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
}
test("disable write ahead log when checkpoint directory is not set") {
// When checkpoint is disabled, then the write ahead log is disabled
val tracker1 = createTracker(setCheckpointDir = false)
tracker1.isWriteAheadLogEnabled should be (false)
}
test("parallel file deletion in FileBasedWriteAheadLog is robust to deletion error") {
conf.set("spark.streaming.driver.writeAheadLog.rollingIntervalSecs", "1")
require(WriteAheadLogUtils.getRollingIntervalSecs(conf, isDriver = true) === 1)
val addBlocks = generateBlockInfos()
val batch1 = addBlocks.slice(0, 1)
val batch2 = addBlocks.slice(1, 3)
val batch3 = addBlocks.slice(3, addBlocks.length)
assert(getWriteAheadLogFiles().length === 0)
// list of timestamps for files
val t = Seq.tabulate(5)(i => i * 1000)
writeEventsManually(getLogFileName(t(0)), Seq(createBatchCleanup(t(0))))
assert(getWriteAheadLogFiles().length === 1)
// The goal is to create several log files which should have been cleaned up.
// If we face any issue during recovery, because these old files exist, then we need to make
// deletion more robust rather than a parallelized operation where we fire and forget
val batch1Allocation = createBatchAllocation(t(1), batch1)
writeEventsManually(getLogFileName(t(1)), batch1.map(BlockAdditionEvent) :+ batch1Allocation)
writeEventsManually(getLogFileName(t(2)), Seq(createBatchCleanup(t(1))))
val batch2Allocation = createBatchAllocation(t(3), batch2)
writeEventsManually(getLogFileName(t(3)), batch2.map(BlockAdditionEvent) :+ batch2Allocation)
writeEventsManually(getLogFileName(t(4)), batch3.map(BlockAdditionEvent))
// We should have 5 different log files as we called `writeEventsManually` with 5 different
// timestamps
assert(getWriteAheadLogFiles().length === 5)
// Create the tracker to recover from the log files. We're going to ask the tracker to clean
// things up, and then we're going to rewrite that data, and recover using a different tracker.
// They should have identical data no matter what
val tracker = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
def compareTrackers(base: ReceivedBlockTracker, subject: ReceivedBlockTracker): Unit = {
subject.getBlocksOfBatchAndStream(t(3), streamId) should be(
base.getBlocksOfBatchAndStream(t(3), streamId))
subject.getBlocksOfBatchAndStream(t(1), streamId) should be(
base.getBlocksOfBatchAndStream(t(1), streamId))
subject.getBlocksOfBatchAndStream(t(0), streamId) should be(Nil)
}
// ask the tracker to clean up some old files
tracker.cleanupOldBatches(t(3), waitForCompletion = true)
assert(getWriteAheadLogFiles().length === 3)
val tracker2 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker2)
// rewrite first file
writeEventsManually(getLogFileName(t(0)), Seq(createBatchCleanup(t(0))))
assert(getWriteAheadLogFiles().length === 4)
// make sure trackers are consistent
val tracker3 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker3)
// rewrite second file
writeEventsManually(getLogFileName(t(1)), batch1.map(BlockAdditionEvent) :+ batch1Allocation)
assert(getWriteAheadLogFiles().length === 5)
// make sure trackers are consistent
val tracker4 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker4)
}
/**
* Create tracker object with the optional provided clock. Use fake clock if you
* want to control time by manually incrementing it to test log clean.
*/
def createTracker(
setCheckpointDir: Boolean = true,
recoverFromWriteAheadLog: Boolean = false,
clock: Clock = new SystemClock): ReceivedBlockTracker = {
val cpDirOption = if (setCheckpointDir) Some(checkpointDirectory.toString) else None
val tracker = new ReceivedBlockTracker(
conf, hadoopConf, Seq(streamId), clock, recoverFromWriteAheadLog, cpDirOption)
allReceivedBlockTrackers += tracker
tracker
}
/** Generate blocks infos using random ids */
def generateBlockInfos(): Seq[ReceivedBlockInfo] = {
List.fill(5)(ReceivedBlockInfo(streamId, Some(0L), None,
BlockManagerBasedStoreResult(StreamBlockId(streamId, math.abs(Random.nextInt)), Some(0L))))
}
/**
* Write received block tracker events to a file manually.
*/
def writeEventsManually(filePath: String, events: Seq[ReceivedBlockTrackerLogEvent]): Unit = {
val writer = HdfsUtils.getOutputStream(filePath, hadoopConf)
events.foreach { event =>
val bytes = Utils.serialize(event)
writer.writeInt(bytes.size)
writer.write(bytes)
}
writer.close()
}
/** Get all the data written in the given write ahead log file. */
def getWrittenLogData(logFile: String): Seq[ReceivedBlockTrackerLogEvent] = {
getWrittenLogData(Seq(logFile))
}
/** Get the log file name for the given log start time. */
def getLogFileName(time: Long, rollingIntervalSecs: Int = 1): String = {
checkpointDirectory.toString + File.separator + "receivedBlockMetadata" +
File.separator + s"log-$time-${time + rollingIntervalSecs * 1000}"
}
/**
* Get all the data written in the given write ahead log files. By default, it will read all
* files in the test log directory.
*/
def getWrittenLogData(logFiles: Seq[String] = getWriteAheadLogFiles)
: Seq[ReceivedBlockTrackerLogEvent] = {
logFiles.flatMap {
file => new FileBasedWriteAheadLogReader(file, hadoopConf).toSeq
}.flatMap { byteBuffer =>
val validBuffer = if (WriteAheadLogUtils.isBatchingEnabled(conf, isDriver = true)) {
Utils.deserialize[Array[Array[Byte]]](byteBuffer.array()).map(ByteBuffer.wrap)
} else {
Array(byteBuffer)
}
validBuffer.map(b => Utils.deserialize[ReceivedBlockTrackerLogEvent](b.array()))
}.toList
}
/** Get all the write ahead log files in the test directory */
def getWriteAheadLogFiles(): Seq[String] = {
import ReceivedBlockTracker._
val logDir = checkpointDirToLogDir(checkpointDirectory.toString)
getLogFilesInDirectory(logDir).map { _.toString }
}
/** Create batch allocation object from the given info */
def createBatchAllocation(time: Long, blockInfos: Seq[ReceivedBlockInfo])
: BatchAllocationEvent = {
BatchAllocationEvent(time, AllocatedBlocks(Map((streamId -> blockInfos))))
}
/** Create batch clean object from the given info */
def createBatchCleanup(time: Long, moreTimes: Long*): BatchCleanupEvent = {
BatchCleanupEvent((Seq(time) ++ moreTimes).map(Time.apply))
}
implicit def millisToTime(milliseconds: Long): Time = Time(milliseconds)
implicit def timeToMillis(time: Time): Long = time.milliseconds
}
| Panos-Bletsos/spark-cost-model-optimizer | streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala | Scala | apache-2.0 | 15,903 |
package org.purview.core.report
import java.io.Serializable
import org.purview.core.data.Color
sealed trait ReportLevel extends NotNull with Serializable {
val name: String
}
trait LevelColor extends ReportLevel {
def color: Color
}
object Debug extends ReportLevel
with LevelColor {
val name = "Debug"
val color = Color(1, 0, 0.5f, 0)
}
object Information extends ReportLevel
with LevelColor {
val name = "Information"
val color = Color.Green
}
object Warning extends ReportLevel
with LevelColor {
val name = "Warning"
val color = Color.Yellow
}
object Error extends ReportLevel
with LevelColor {
val name = "Error"
val color = Color.Red
}
object Critical extends ReportLevel
with LevelColor {
val name = "Critical"
val color = Color(1, 0, 0, 0)
}
| mroth23/purview | purview-core/src/main/scala/org/purview/core/report/ReportLevel.scala | Scala | apache-2.0 | 872 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s
import java.util.Locale
import io.fabric8.kubernetes.api.model.{LocalObjectReference, LocalObjectReferenceBuilder, Pod}
import org.apache.spark.SparkConf
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.submit._
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.ConfigEntry
import org.apache.spark.resource.ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID
import org.apache.spark.util.Utils
/**
* Structure containing metadata for Kubernetes logic to build Spark pods.
*/
private[spark] abstract class KubernetesConf(val sparkConf: SparkConf) {
val resourceNamePrefix: String
def labels: Map[String, String]
def environment: Map[String, String]
def annotations: Map[String, String]
def secretEnvNamesToKeyRefs: Map[String, String]
def secretNamesToMountPaths: Map[String, String]
def volumes: Seq[KubernetesVolumeSpec]
def appName: String = get("spark.app.name", "spark")
def namespace: String = get(KUBERNETES_NAMESPACE)
def imagePullPolicy: String = get(CONTAINER_IMAGE_PULL_POLICY)
def imagePullSecrets: Seq[LocalObjectReference] = {
sparkConf
.get(IMAGE_PULL_SECRETS)
.map { secret =>
new LocalObjectReferenceBuilder().withName(secret).build()
}
}
def workerDecommissioning: Boolean =
sparkConf.get(org.apache.spark.internal.config.DECOMMISSION_ENABLED)
def nodeSelector: Map[String, String] =
KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf, KUBERNETES_NODE_SELECTOR_PREFIX)
def contains(config: ConfigEntry[_]): Boolean = sparkConf.contains(config)
def get[T](config: ConfigEntry[T]): T = sparkConf.get(config)
def get(conf: String): String = sparkConf.get(conf)
def get(conf: String, defaultValue: String): String = sparkConf.get(conf, defaultValue)
def getOption(key: String): Option[String] = sparkConf.getOption(key)
}
private[spark] class KubernetesDriverConf(
sparkConf: SparkConf,
val appId: String,
val mainAppResource: MainAppResource,
val mainClass: String,
val appArgs: Array[String],
val proxyUser: Option[String])
extends KubernetesConf(sparkConf) {
override val resourceNamePrefix: String = {
val custom = if (Utils.isTesting) get(KUBERNETES_DRIVER_POD_NAME_PREFIX) else None
custom.getOrElse(KubernetesConf.getResourceNamePrefix(appName))
}
override def labels: Map[String, String] = {
val presetLabels = Map(
SPARK_APP_ID_LABEL -> appId,
SPARK_ROLE_LABEL -> SPARK_POD_DRIVER_ROLE)
val driverCustomLabels = KubernetesUtils.parsePrefixedKeyValuePairs(
sparkConf, KUBERNETES_DRIVER_LABEL_PREFIX)
presetLabels.keys.foreach { key =>
require(
!driverCustomLabels.contains(key),
s"Label with key $key is not allowed as it is reserved for Spark bookkeeping operations.")
}
driverCustomLabels ++ presetLabels
}
override def environment: Map[String, String] = {
KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf, KUBERNETES_DRIVER_ENV_PREFIX)
}
override def annotations: Map[String, String] = {
KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf, KUBERNETES_DRIVER_ANNOTATION_PREFIX)
}
def serviceAnnotations: Map[String, String] = {
KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf,
KUBERNETES_DRIVER_SERVICE_ANNOTATION_PREFIX)
}
override def secretNamesToMountPaths: Map[String, String] = {
KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf, KUBERNETES_DRIVER_SECRETS_PREFIX)
}
override def secretEnvNamesToKeyRefs: Map[String, String] = {
KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf, KUBERNETES_DRIVER_SECRET_KEY_REF_PREFIX)
}
override def volumes: Seq[KubernetesVolumeSpec] = {
KubernetesVolumeUtils.parseVolumesWithPrefix(sparkConf, KUBERNETES_DRIVER_VOLUMES_PREFIX)
}
}
private[spark] class KubernetesExecutorConf(
sparkConf: SparkConf,
val appId: String,
val executorId: String,
val driverPod: Option[Pod],
val resourceProfileId: Int = DEFAULT_RESOURCE_PROFILE_ID)
extends KubernetesConf(sparkConf) with Logging {
override val resourceNamePrefix: String = {
get(KUBERNETES_EXECUTOR_POD_NAME_PREFIX).getOrElse(
KubernetesConf.getResourceNamePrefix(appName))
}
override def labels: Map[String, String] = {
val presetLabels = Map(
SPARK_EXECUTOR_ID_LABEL -> executorId,
SPARK_APP_ID_LABEL -> appId,
SPARK_ROLE_LABEL -> SPARK_POD_EXECUTOR_ROLE,
SPARK_RESOURCE_PROFILE_ID_LABEL -> resourceProfileId.toString)
val executorCustomLabels = KubernetesUtils.parsePrefixedKeyValuePairs(
sparkConf, KUBERNETES_EXECUTOR_LABEL_PREFIX)
presetLabels.keys.foreach { key =>
require(
!executorCustomLabels.contains(key),
s"Custom executor labels cannot contain $key as it is reserved for Spark.")
}
executorCustomLabels ++ presetLabels
}
override def environment: Map[String, String] = sparkConf.getExecutorEnv.filter(
p => checkExecutorEnvKey(p._1)).toMap
override def annotations: Map[String, String] = {
KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf, KUBERNETES_EXECUTOR_ANNOTATION_PREFIX)
}
override def secretNamesToMountPaths: Map[String, String] = {
KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf, KUBERNETES_EXECUTOR_SECRETS_PREFIX)
}
override def secretEnvNamesToKeyRefs: Map[String, String] = {
KubernetesUtils.parsePrefixedKeyValuePairs(sparkConf, KUBERNETES_EXECUTOR_SECRET_KEY_REF_PREFIX)
}
override def volumes: Seq[KubernetesVolumeSpec] = {
KubernetesVolumeUtils.parseVolumesWithPrefix(sparkConf, KUBERNETES_EXECUTOR_VOLUMES_PREFIX)
}
private def checkExecutorEnvKey(key: String): Boolean = {
// Pattern for matching an executorEnv key, which meets certain naming rules.
val executorEnvRegex = "[-._a-zA-Z][-._a-zA-Z0-9]*".r
if (executorEnvRegex.pattern.matcher(key).matches()) {
true
} else {
logWarning(s"Invalid key: $key: " +
"a valid environment variable name must consist of alphabetic characters, " +
"digits, '_', '-', or '.', and must not start with a digit." +
s"Regex used for validation is '$executorEnvRegex')")
false
}
}
}
private[spark] object KubernetesConf {
def createDriverConf(
sparkConf: SparkConf,
appId: String,
mainAppResource: MainAppResource,
mainClass: String,
appArgs: Array[String],
proxyUser: Option[String]): KubernetesDriverConf = {
// Parse executor volumes in order to verify configuration before the driver pod is created.
KubernetesVolumeUtils.parseVolumesWithPrefix(sparkConf, KUBERNETES_EXECUTOR_VOLUMES_PREFIX)
new KubernetesDriverConf(
sparkConf.clone(),
appId,
mainAppResource,
mainClass,
appArgs,
proxyUser)
}
def createExecutorConf(
sparkConf: SparkConf,
executorId: String,
appId: String,
driverPod: Option[Pod],
resourceProfileId: Int = DEFAULT_RESOURCE_PROFILE_ID): KubernetesExecutorConf = {
new KubernetesExecutorConf(sparkConf.clone(), appId, executorId, driverPod, resourceProfileId)
}
def getResourceNamePrefix(appName: String): String = {
val id = KubernetesUtils.uniqueID()
s"$appName-$id"
.trim
.toLowerCase(Locale.ROOT)
.replaceAll("\\\\s+", "-")
.replaceAll("\\\\.", "-")
.replaceAll("[^a-z0-9\\\\-]", "")
.replaceAll("-+", "-")
}
/**
* Build a resources name based on the vendor device plugin naming
* convention of: vendor-domain/resource. For example, an NVIDIA GPU is
* advertised as nvidia.com/gpu.
*/
def buildKubernetesResourceName(vendorDomain: String, resourceName: String): String = {
s"${vendorDomain}/${resourceName}"
}
}
| wangmiao1981/spark | resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala | Scala | apache-2.0 | 8,668 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.streaming.appmaster
import akka.actor._
import akka.pattern.ask
import com.typesafe.config.Config
import org.apache.gearpump.Time.MilliSeconds
import org.apache.gearpump.cluster.AppJar
import org.apache.gearpump.cluster.scheduler.{Resource, ResourceRequest}
import org.apache.gearpump.cluster.worker.WorkerId
import org.apache.gearpump.streaming.partitioner.PartitionerDescription
import org.apache.gearpump.streaming.appmaster.JarScheduler._
import org.apache.gearpump.streaming.task.TaskId
import org.apache.gearpump.streaming.{DAG, ProcessorDescription}
import org.apache.gearpump.util.{Constants, Graph, LogUtil}
import scala.concurrent.Future
/**
* Different processors of the stream application can use different jars. JarScheduler is the
* scheduler for different jars.
*
* For a DAG of multiple processors, each processor can have its own jar. Tasks of same jar
* is scheduled by TaskScheduler, and TaskSchedulers are scheduled by JarScheduler.
*
* In runtime, the implementation is delegated to actor JarSchedulerImpl
*/
class JarScheduler(appId: Int, appName: String, config: Config, factory: ActorRefFactory) {
private val actor: ActorRef = factory.actorOf(Props(new JarSchedulerImpl(appId, appName, config)))
private implicit val dispatcher = factory.dispatcher
private implicit val timeout = Constants.FUTURE_TIMEOUT
/** Set the current DAG version active */
def setDag(dag: DAG, startClock: Future[MilliSeconds]): Unit = {
actor ! TransitToNewDag
startClock.map { start =>
actor ! NewDag(dag, start)
}
}
/** AppMaster ask JarScheduler about how many resource it wants */
def getResourceRequestDetails(): Future[Array[ResourceRequestDetail]] = {
(actor ? GetResourceRequestDetails).asInstanceOf[Future[Array[ResourceRequestDetail]]]
}
/**
* AppMaster has resource allocated, and ask the jar scheduler to schedule tasks
* for this executor.
*/
def scheduleTask(appJar: AppJar, workerId: WorkerId, executorId: Int, resource: Resource)
: Future[List[TaskId]] = {
(actor ? ScheduleTask(appJar, workerId, executorId, resource))
.asInstanceOf[Future[List[TaskId]]]
}
/**
* Some executor JVM process is dead. AppMaster asks jar scheduler to re-schedule the impacted
* tasks.
*/
def executorFailed(executorId: Int): Future[Option[ResourceRequestDetail]] = {
(actor ? ExecutorFailed(executorId)).asInstanceOf[Future[Option[ResourceRequestDetail]]]
}
}
object JarScheduler {
case class ResourceRequestDetail(jar: AppJar, requests: Array[ResourceRequest])
case class NewDag(dag: DAG, startTime: MilliSeconds)
case object TransitToNewDag
case object GetResourceRequestDetails
/**
* Schedule tasks for one appJar.
*
* @param appJar Application jar.
* @param workerId Worker machine Id.
* @param executorId Executor Id.
* @param resource Slots that are available.
*/
case class ScheduleTask(appJar: AppJar, workerId: WorkerId, executorId: Int, resource: Resource)
/** Some executor JVM is dead, try to recover tasks that are located on failed executor */
case class ExecutorFailed(executorId: Int)
class JarSchedulerImpl(appId: Int, appName: String, config: Config) extends Actor with Stash {
// Each TaskScheduler maps to a jar.
private var taskSchedulers = Map.empty[AppJar, TaskScheduler]
private val LOG = LogUtil.getLogger(getClass)
def receive: Receive = waitForNewDag
def waitForNewDag: Receive = {
case TransitToNewDag => // Continue current state
case NewDag(dag, startTime) =>
LOG.info(s"Init JarScheduler, dag version: ${dag.version}, startTime: $startTime")
val processors = dag.processors.values.groupBy(_.jar)
taskSchedulers = processors.map { jarAndProcessors =>
val (jar, processors) = jarAndProcessors
// Construct the sub DAG, each sub DAG maps to a separate jar.
val subGraph = Graph.empty[ProcessorDescription, PartitionerDescription]
processors.foreach { processor =>
if (startTime < processor.life.death) {
subGraph.addVertex(processor)
}
}
val subDagForSingleJar = DAG(subGraph)
val taskScheduler = taskSchedulers
.getOrElse(jar, new TaskSchedulerImpl(appId, appName, config))
LOG.info(s"Set DAG for TaskScheduler, count: " + subDagForSingleJar.processors.size)
taskScheduler.setDAG(subDagForSingleJar)
jar -> taskScheduler
}
unstashAll()
context.become(ready)
case other =>
stash()
}
def ready: Receive = {
// Notifies there is a new DAG coming.
case TransitToNewDag =>
context.become(waitForNewDag)
case GetResourceRequestDetails =>
// Asks each TaskScheduler (Each for one jar) the resource requests.
val result: Array[ResourceRequestDetail] = taskSchedulers.map { jarAndScheduler =>
val (jar, scheduler) = jarAndScheduler
ResourceRequestDetail(jar, scheduler.getResourceRequests())
}.toArray
LOG.info(s"GetRequestDetails " + result.mkString(";"))
sender ! result
case ScheduleTask(appJar, workerId, executorId, resource) =>
val result: List[TaskId] = taskSchedulers.get(appJar).map { scheduler =>
scheduler.schedule(workerId, executorId, resource)
}.getOrElse(List.empty)
LOG.info(s"ScheduleTask " + result.mkString(";"))
sender ! result
case ExecutorFailed(executorId) =>
val result: Option[ResourceRequestDetail] = taskSchedulers.
find(_._2.scheduledTasks(executorId).nonEmpty).map { jarAndScheduler =>
ResourceRequestDetail(jarAndScheduler._1, jarAndScheduler._2.executorFailed(executorId))
}
LOG.info(s"ExecutorFailed " + result.mkString(";"))
sender ! result
}
}
} | manuzhang/incubator-gearpump | streaming/src/main/scala/org/apache/gearpump/streaming/appmaster/JarScheduler.scala | Scala | apache-2.0 | 6,741 |
object DifferentWays extends App {
val MODULO: Int = 100000007;
case class Query(n: Int, k: Int)
import scala.collection.mutable.{HashMap ⇒ MMap}
val mem = MMap[Query, Int]() // let's cache prev results
def memCount(n: Int, k: Int) = mem getOrElseUpdate (Query(n, k), count(n, k))
def count(n: Int, k: Int): Int = k match {
case 0 => 1
case `n` => 1
case _ => (memCount(n - 1, k - 1) + memCount(n - 1, k)) % MODULO
}
val t = io.StdIn.readInt // number of test cases
for (_ <- 1 to t) {
val Array(n, k) = io.StdIn.readLine.split(" ").map(_.toInt) // total number, number to be selected
println(count(n, k))
}
} | flopezlasanta/hackerrank | src/functional_programming/memoization_dp/DifferentWays.scala | Scala | mit | 642 |
package test.ch.bsisa.hyperbird.orders
import test.ch.bsisa.hyperbird.util.BaseSerialisationSpec
import ch.bsisa.hyperbird.orders.OrderUtil
import ch.bsisa.hyperbird.model.format.ElfinFormat
import org.specs2.mutable._
import play.api.test._
import play.api.test.Helpers._
/**
* Tests ch.bsisa.hyperbird.io.OrderUtilSpec.computeOrderFigures(carP : CARACTERISTIQUE) function
*
*
* Tip: from sbt play console run:
* {{{
* test-only test.ch.bsisa.hyperbird.orders.OrderUtilRoundingSpec
* }}}
* to have only the current test run.
*
* @author Patrick Refondini
*/
class OrderUtilRoundingSpec extends BaseSerialisationSpec with PlaySpecification {
/**
* Test Use Case provided by end-user.
*
* TOTAL_GROSS = 8726
*
* TOTAL_NET = TOTAL_GROSS + ( TOTAL_GROSS * -0.01 ) + ( TOTAL_GROSS * -0.02 ) + 5 = 1144.75 - 11.45 - 22.90 + 5 = 1115.40
* TAX_RATE computed amount = TOTAL_NET * TAX_RATE = 1115.40 * 0.08 = 89.23
* TOTAL_NET_INCL_TAX = TOTAL_NET + ( TOTAL_NET * TAX_RATE ) = 1115.40 + 89.23 = 1204.63
*/
val carInXml =
<CARACTERISTIQUE>
<CAR1 NOM="Surface au sol" UNITE="Ligne" VALEUR="100"/>
<CAR2 NOM="Surface au sol" UNITE="Ligne" VALEUR="100"/>
<CAR3 NOM="Surface au sol" UNITE="Taux TVA" VALEUR="100"/>
<CARSET>
<CAR NOM="" UNITE="" VALEUR="" POS="1"/>
<CAR NOM="" UNITE="" VALEUR="" POS="2"/>
</CARSET>
<FRACTION>
<L POS="1">
<C POS="1">TOTAL_GROSS</C>
<C POS="2">Total brut</C>
<C POS="3"/>
<C POS="4"/>
<C POS="5">8726</C>
<C POS="6">false</C>
</L>
<L POS="2">
<C POS="1">APPLIED_RATE</C>
<C POS="2">Rabais</C>
<C POS="3">-10.00</C>
<C POS="4">%</C>
<C POS="5">0.00</C>
<C POS="6">true</C>
</L>
<L POS="3">
<C POS="1">APPLIED_RATE</C>
<C POS="2">Escompte</C>
<C POS="3">-2.0</C>
<C POS="4">%</C>
<C POS="5">0.00</C>
<C POS="6">true</C>
</L>
<L POS="4">
<C POS="1">TOTAL_NET</C>
<C POS="2">Total net</C>
<C POS="3"/>
<C POS="4"/>
<C POS="5">9999.00</C>
<C POS="6">false</C>
</L>
<L POS="5">
<C POS="1">APPLIED_AMOUNT</C>
<C POS="2">Arrondi</C>
<C POS="3"/>
<C POS="4"/>
<C POS="5">-0.35</C>
<C POS="6">true</C>
</L>
<L POS="6">
<C POS="1">TAX_RATE</C>
<C POS="2">TVA</C>
<C POS="3">8.0</C>
<C POS="4">%</C>
<C POS="5">0.00</C>
<C POS="6">true</C>
</L>
<L POS="7">
<C POS="1">TOTAL_NET_INCL_TAX</C>
<C POS="2">Total net TTC</C>
<C POS="3"/>
<C POS="4"/>
<C POS="5">9999.00</C>
<C POS="6">false</C>
</L>
</FRACTION>
</CARACTERISTIQUE>
val carIn = ElfinFormat caracteristiqueFromXml carInXml
val carOut = OrderUtil computeOrderFigures carIn
println(">>>>> \\n" + ElfinFormat.caracteristiqueToJson(carOut) + "\\n")
// ==========================================================================
// Test input
// ==========================================================================
s"The number of carIn.FRACTION.L " should {
s"equal 7" in {
val nbLinesOpt = carIn.FRACTION.map { fractionMat =>
fractionMat.L.foldLeft(0)((acc, l) => acc + 1)
}
nbLinesOpt.get mustEqual 7
}
}
s"Amounts of carIn.FRACTION.L " should {
"equal 8726 for L POS='1' manual record (TOTAL_GROSS) " in {
val lPos1 = carIn.FRACTION.map(f => f.L(0)).get
OrderUtil getLineAmount lPos1 mustEqual Some(8726)
}
"equal -10% for L POS='2' submitted reduction rate " in {
val lPos2 = carIn.FRACTION.map(f => f.L(1)).get
OrderUtil getLineRate lPos2 mustEqual Some(-10d/100d)
}
"equal 0.0 for L POS='2' not yet computed reduction amount " in {
val lPos2 = carIn.FRACTION.map(f => f.L(1)).get
OrderUtil getLineAmount lPos2 mustEqual Some(0.0)
}
"equal -2% for L POS='3' submitted discount rate " in {
val lPos3 = carIn.FRACTION.map(f => f.L(2)).get
OrderUtil getLineRate lPos3 mustEqual Some(-2d/100d)
}
"equal 0.0 for L POS='3' not yet computed discount amount " in {
val lPos3 = carIn.FRACTION.map(f => f.L(2)).get
OrderUtil getLineAmount lPos3 mustEqual Some(0.0)
}
"equal 9999.0 for L POS='4' not yet computed net total amount " in {
val lPos4 = carIn.FRACTION.map(f => f.L(3)).get
OrderUtil getLineAmount lPos4 mustEqual Some(9999.0)
}
"equal -0.35 for L POS='5' rounding amount " in {
val lPos5 = carIn.FRACTION.map(f => f.L(4)).get
OrderUtil getLineAmount lPos5 mustEqual Some(-0.35)
}
"equal 8% for L POS='6' submitted VAT rate " in {
val lPos6 = carIn.FRACTION.map(f => f.L(5)).get
OrderUtil getLineRate lPos6 mustEqual Some(8.0d/100d)
}
"equal 0.0 for L POS='6' not yet computed VAT amount " in {
val lPos6 = carIn.FRACTION.map(f => f.L(5)).get
OrderUtil getLineAmount lPos6 mustEqual Some(0.0)
}
"equal 9999.0 for L POS='9' not yet computed Tax incl. net total " in {
val lPos7 = carIn.FRACTION.map(f => f.L(6)).get
OrderUtil getLineAmount lPos7 mustEqual Some(9999.00)
}
}
// ==========================================================================
// Test output
// ==========================================================================
s"The number of carOut.FRACTION.L " should {
s"equal 7" in {
val nbLinesOpt = carOut.FRACTION.map { fractionMat =>
fractionMat.L.foldLeft(0)((acc, l) => acc + 1)
}
nbLinesOpt.get mustEqual 7
}
}
s"Amounts of carOut.FRACTION.L " should {
"equal 8726 for L POS='1' manual record (TOTAL_GROSS) " in {
val lPos1 = carOut.FRACTION.map(f => f.L(0)).get
OrderUtil getLineAmount lPos1 mustEqual Some(8726)
}
"equal -10% for L POS='2' submitted reduction rate " in {
val lPos2 = carOut.FRACTION.map(f => f.L(1)).get
OrderUtil getLineRate lPos2 mustEqual Some(-10d/100d)
}
"equal -872.60 for L POS='2' computed reduction amount " in {
val lPos2 = carOut.FRACTION.map(f => f.L(1)).get
OrderUtil getLineAmount lPos2 mustEqual Some(-872.60)
}
"equal -2% for L POS='3' submitted discount rate " in {
val lPos3 = carOut.FRACTION.map(f => f.L(2)).get
OrderUtil getLineRate lPos3 mustEqual Some(-2d/100d)
}
"equal (7853.4 * -0.02) −157.068 => −157.05 for L POS='3' computed discount amount " in {
val lPos3 = carOut.FRACTION.map(f => f.L(2)).get
OrderUtil getLineAmount lPos3 mustEqual Some(-157.05)
}
"equal 7696.35 for L POS='4' computed net total amount " in {
val lPos4 = carOut.FRACTION.map(f => f.L(3)).get
OrderUtil getLineAmount lPos4 mustEqual Some(7696.35)
}
"equal -0.35 for L POS='5' rounding amount " in {
val lPos5 = carOut.FRACTION.map(f => f.L(4)).get
OrderUtil getLineAmount lPos5 mustEqual Some(-0.35)
}
"equal 8% for L POS='6' submitted VAT rate " in {
val lPos6 = carOut.FRACTION.map(f => f.L(5)).get
OrderUtil getLineRate lPos6 mustEqual Some(8.0d/100d)
}
"equal (7696 * 0.08 = 615.68) => 615.70 for L POS='6' computed VAT amount " in {
val lPos6 = carOut.FRACTION.map(f => f.L(5)).get
OrderUtil getLineAmount lPos6 mustEqual Some(615.70)
}
"equal 8311.70 = 7696.35 - 0.35 + 615.70 for L POS='9' computed Tax incl. net total " in {
val lPos7 = carOut.FRACTION.map(f => f.L(6)).get
OrderUtil getLineAmount lPos7 mustEqual Some(8311.70)
}
}
} | bsisa/hb-api | test/test/ch/bsisa/hyperbird/orders/OrderUtilRoundingSpec.scala | Scala | gpl-2.0 | 7,850 |
class Foo[T]
trait Prepend {
type Out
}
object Test {
def foo()(implicit ev: Prepend): Foo[ev.Out] = ???
def test: Unit = {
foo(): Foo[Any] // error: found: Prepend => Foo[_] required: Foo[Any]
}
implicit val p: Prepend = ???
}
| som-snytt/dotty | tests/neg/i2672.scala | Scala | apache-2.0 | 247 |
package fommil.stalagmite.benchmarks
import org.openjdk.jmh.annotations.Benchmark
import testing.meta._
import testing.{ caseclass, memoised, optimiseheap, weakmemoised }
class ApplyBenchmark {
import BenchmarkData._
// case class
@Benchmark
def caseClass(cs: CaseClassData): IndexedSeq[FooCaseClass[String]] =
cs.data.map { case (a, b, c, d) => FooCaseClass(a, b, c, d) }
@Benchmark
def caseClassSpec(cs: CaseClassData): IndexedSeq[caseclass.Foo[String]] =
cs.data.map { case (a, b, c, d) => caseclass.Foo(a, b, c, d) }
@Benchmark
def caseClassMeta(cs: CaseClassData): IndexedSeq[FooMeta[String]] =
cs.data.map { case (a, b, c, d) => FooMeta(a, b, c, d) }
// optimize heap
@Benchmark
def optimizeHeapCaseClass(
oh: OptimizeHeapData
): IndexedSeq[FooOptimizeHeapCaseClass] =
oh.data.map { case (a, b, c) => FooOptimizeHeapCaseClass(a, b, c) }
@Benchmark
def optimizeHeapSpec(oh: OptimizeHeapData): IndexedSeq[optimiseheap.Foo] =
oh.data.map { case (a, b, c) => optimiseheap.Foo(a, b, c) }
@Benchmark
def optimizeHeapMeta(oh: OptimizeHeapData): IndexedSeq[FooMetaOptimiseHeap] =
oh.data.map { case (a, b, c) => FooMetaOptimiseHeap(a, b, c) }
// memoised
@Benchmark
def memoisedCaseClass(m: MemoisedData): IndexedSeq[FooMemoisedCaseClass] =
m.data.map { case (a, b) => FooMemoisedCaseClass(a, b) }
@Benchmark
def memoisedSpec(m: MemoisedData): IndexedSeq[memoised.Foo] =
m.data.map { case (a, b) => memoised.Foo(a, b) }
@Benchmark
def memoisedMeta(m: MemoisedData): IndexedSeq[FooMetaMemoised] =
m.data.map { case (a, b) => FooMetaMemoised(a, b) }
@Benchmark
def memoisedWeakSpec(m: MemoisedData): IndexedSeq[weakmemoised.Foo] =
m.data.map { case (a, b) => weakmemoised.Foo(a, b) }
}
| vovapolu/scala-data-classes | src/jmh/scala/fommil/stalagmite/benchmarks/ApplyBenchmark.scala | Scala | lgpl-3.0 | 1,792 |
/*
* Copyright © 2013 by Jörg D. Weisbarth
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License 3 as published by
* the Free Software Foundation;
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY.
*
* See the License.txt file for more details.
*/
package sim.app.antDefenseAIs.model
import sim.engine.SimState
private[antDefenseAIs] object AntQueen {
val startRessources = 15 /** Amount of resources a tribe starts with. Should be >= `productionCost` */
val productionTime: Int = 10 /** time to produce an ant*/
val productionCost: Int = 15 /** costs to produce an ant */
val maximumAge: Int = Integer.MAX_VALUE /** Maximum age of a queen (in steps) */
/**
* Creates an NormalAntWorker
*
* @param tribeID Tribe the ant belongs to
* @param world World the ant lives on
* @return NormalAntWorker
*/
private def apply(tribeID: Int, world: World, conf: BehaviourConf, antGen: AntGenerator) =
new AntQueen(tribeID, world, conf, antGen)
}
import AntQueen._
/**
* Queen of a colony
*
* @param antGen Constructor of the ant type the queen should use for new ants
*/
private[antDefenseAIs] final class AntQueen(
override val tribeID: Int,
override val world: World,
val behaviourConf: BehaviourConf,
private val antGen: AntGenerator) extends Ant with StandardPheroSystem {
override def maximumAge: Int = AntQueen.maximumAge
override val alpha = behaviourConf.alpha
override val explorationRate = behaviourConf.explorationRate
override val gamma = behaviourConf.gamma
_inBackpack = startRessources /** Resources the queen owns */
/*
0 means: no ant being produced
every other value between 1 and productionTime is the number of
leaving time units until the production is completed
*/
private var productionState: Int = 0
/**
* Used to give the queen resources
*
* @param amount intensity of resources the queen receives
*/
def receiveRes(amount: Int) {
assert(amount >= 0)
_inBackpack += amount
}
/**
* Queen places all owned resources her current position
*/
def dropDeposit() {
val res = world.resOn(currentPos) + _inBackpack
world.setResOn(currentPos, res)
_inBackpack = 0
}
/**
* Queen tries to create new ant.
*
* Success iff enough resources available and maximum population not reached
*/
override def step(state: SimState) {
assert(0 <= productionState && productionState <= productionTime)
val tmp = _inBackpack - productionCost
if (tmp >= 0 && productionState == 0) { // enough resources and no other construction in progress?
_inBackpack = tmp
productionState += 1
}
else if (productionState >= productionTime - 1) { // production completed?
val ant: Ant = antGen(this)
try {
world.placeNewAnt(ant)
productionState = 0
} catch {
case e: IllegalStateException => println(e.getMessage)
}
}
else if (productionState > 0 && productionState < productionTime - 1) // production started and not ready?
productionState += 1 // advance in construction
}
} | joergdw/antconflictbeh | src/sim/app/antDefenseAIs/model/AntQueen.scala | Scala | lgpl-3.0 | 3,231 |
package io.jfc
import algebra.Eq
import cats.Show
sealed abstract class CursorOpElement extends Product with Serializable
object CursorOpElement {
case object CursorOpLeft extends CursorOpElement
case object CursorOpRight extends CursorOpElement
case object CursorOpFirst extends CursorOpElement
case object CursorOpLast extends CursorOpElement
case object CursorOpUp extends CursorOpElement
case class CursorOpLeftN(n: Int) extends CursorOpElement
case class CursorOpRightN(n: Int) extends CursorOpElement
case class CursorOpLeftAt(p: Json => Boolean) extends CursorOpElement
case class CursorOpRightAt(p: Json => Boolean) extends CursorOpElement
case class CursorOpFind(p: Json => Boolean) extends CursorOpElement
case class CursorOpField(f: String) extends CursorOpElement
case class CursorOpDownField(f: String) extends CursorOpElement
case object CursorOpDownArray extends CursorOpElement
case class CursorOpDownAt(p: Json => Boolean) extends CursorOpElement
case class CursorOpDownN(n: Int) extends CursorOpElement
case object CursorOpDeleteGoParent extends CursorOpElement
case object CursorOpDeleteGoLeft extends CursorOpElement
case object CursorOpDeleteGoRight extends CursorOpElement
case object CursorOpDeleteGoFirst extends CursorOpElement
case object CursorOpDeleteGoLast extends CursorOpElement
case class CursorOpDeleteGoField(f: String) extends CursorOpElement
case object CursorOpDeleteLefts extends CursorOpElement
case object CursorOpDeleteRights extends CursorOpElement
implicit val showCursorOpElement: Show[CursorOpElement] = Show.show {
case CursorOpLeft => "<-"
case CursorOpRight => "->"
case CursorOpFirst => "|<-"
case CursorOpLast => "->|"
case CursorOpUp => "_/"
case CursorOpLeftN(n) => "-<-:(" + n + ")"
case CursorOpRightN(n) => ":->-(" + n + ")"
case CursorOpLeftAt(_) => "?<-:"
case CursorOpRightAt(_) => ":->?"
case CursorOpFind(_) => "find"
case CursorOpField(f) => "--(" + f + ")"
case CursorOpDownField(f) => "--\\\\(" + f + ")"
case CursorOpDownArray => "\\\\\\\\"
case CursorOpDownAt(_) => "-\\\\"
case CursorOpDownN(n) => "=\\\\(" + n + ")"
case CursorOpDeleteGoParent => "!_/"
case CursorOpDeleteGoLeft => "<-!"
case CursorOpDeleteGoRight => "!->"
case CursorOpDeleteGoFirst => "|<-!"
case CursorOpDeleteGoLast => "!->|"
case CursorOpDeleteGoField(f) => "!--(" + f + ")"
case CursorOpDeleteLefts => "!<"
case CursorOpDeleteRights => ">!"
}
implicit val eqCursorOpElement: Eq[CursorOpElement] = Eq.fromUniversalEquals
}
| non/circe | core/src/main/scala/io/jfc/CursorOpElement.scala | Scala | apache-2.0 | 2,601 |
/*
* Copyright (c) 2013 Daniel Krzywicki <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package pl.edu.agh.scalamas.emas
import pl.edu.agh.scalamas.emas.EmasTypes.{Death, _}
import pl.edu.agh.scalamas.emas.fight.FightStrategy
import pl.edu.agh.scalamas.emas.reproduction.ReproductionStrategy
import pl.edu.agh.scalamas.genetic.GeneticProblem
import pl.edu.agh.scalamas.mas.LogicTypes.Migration
import pl.edu.agh.scalamas.mas.logic.MeetingsStrategy
import pl.edu.agh.scalamas.random.RandomGeneratorComponent
import pl.edu.agh.scalamas.util.Util._
/**
* Default EMAS meetings component.
*
* Death meetings yield no agent.
* Reproduction and fight meetings group agents according to the capacity of the meeting and delegate to strategy functions.
* Migration is a no-op by default and is left the the agent environment to override if possible.
*
* After reproduction, stats are update with the number of fitness evaluation which happened and the best fitness among the new agents.
*/
trait EmasMeetings extends MeetingsStrategy {
this: GeneticProblem
with EmasStats
with FightStrategy
with ReproductionStrategy
with RandomGeneratorComponent =>
def meetingsStrategy = DefaultEmasMeeting
object DefaultEmasMeeting extends MeetingsProvider {
implicit val ordering = genetic.ordering
implicit val rand = randomData
def meetingsFunction = {
case (Death(_), _) => List.empty[Agent[Genetic]]
case (Fight(cap), agents) =>
checked[Genetic](agents).shuffled.grouped(cap).flatMap(fightStrategy.apply).toList
case (Reproduction(cap), agents) =>
val newAgents = checked[Genetic](agents).shuffled.grouped(cap).flatMap(reproductionStrategy.apply).toList
stats.update((newAgents.maxBy(_.fitness).fitness, agents.size.toLong))
newAgents
case (Migration(_), agents) => agents
}
}
} | eleaar/scala-mas | emas/src/main/scala/pl/edu/agh/scalamas/emas/EmasMeetings.scala | Scala | mit | 2,931 |
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.pipes.aggregation
import org.junit.Test
import org.junit.Assert._
import org.neo4j.cypher.internal.commands.expressions.Expression
class CountTest extends AggregateTest {
def createAggregator(inner: Expression) = new CountFunction(inner)
@Test def testCounts() {
val result = aggregateOn(1, null, "foo")
assertEquals(2L, result)
assertTrue(result.isInstanceOf[Long])
}
} | dksaputra/community | cypher/src/test/scala/org/neo4j/cypher/internal/pipes/aggregation/CountTest.scala | Scala | gpl-3.0 | 1,223 |
package com.github.btmorr.harmonia.models
case object SimpleLookup extends Model[String, String] {
def apply(in: String) = in.toLowerCase match {
case i if in contains "weather" => "I'm working on learning how to look up the weather"
case i if in contains "how are you" => "I'm doing well. How's about you?"
case i if in contains "ring" => "If you like it, you should've put a ring on it"
case _ => "I'm not sure what you're trying to say"
}
}
| btmorr/ideal-spork | mastermind/src/main/scala/com/github/btmorr/harmonia/models/SimpleLookup.scala | Scala | gpl-3.0 | 465 |
/*
Copyright (c) 2017, Qvantel
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Qvantel nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL Qvantel BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.qvantel.jsonapi.spray
import org.specs2.mutable.Specification
import _root_.spray.http.StatusCodes._
import _root_.spray.http.{ContentType, MediaTypes}
import _root_.spray.json.DefaultJsonProtocol._
import _root_.spray.json.lenses.JsonLenses._
import _root_.spray.json.{JsArray, JsonParser}
import _root_.spray.routing.Directives
import _root_.spray.testkit.Specs2RouteTest
class SprayExceptionHandlerSpec extends Specification with Directives with Specs2RouteTest {
class TestSprayExceptionHandler extends SprayExceptionHandler
val testSprayExceptionHandler = new TestSprayExceptionHandler
private[this] val wrap = handleExceptions(testSprayExceptionHandler.defaultSprayExceptionHandler)
val JSON = ContentType(MediaTypes.`application/vnd.api+json`, None)
"The spray ExceptionHandler" should {
"Respond with InternalServerError and specified error message" in {
Get() ~> wrap {
failWith(new Exception("Specified error message"))
} ~> check {
status must_== InternalServerError
contentType must_== JSON
val json = JsonParser(body.asString)
val error = json.extract[JsArray]('errors).elements.headOption
error.map(_.extract[String]('detail)) must beSome("Specified error message")
}
}
"Respond with InternalServerError and default error message" in {
Get() ~> wrap {
failWith(new Exception)
} ~> check {
status must_== InternalServerError
contentType must_== JSON
val json = JsonParser(body.asString)
val error = json.extract[JsArray]('errors).elements.headOption
error.map(_.extract[String]('detail)) must beSome(InternalServerError.defaultMessage)
}
}
"None should return 404 with proper jsonapi.org error object" in {
Get() ~> wrap {
val x: Option[String] = None
SprayExceptionHandler.noneHandler {
complete(x)
}
} ~> check {
status must_== NotFound
contentType must_== JSON
val json = JsonParser(body.asString)
val error = json.extract[JsArray]('errors).elements.headOption
error.map(_.extract[String]('detail)) must beSome(NotFound.defaultMessage)
error.map(_.extract[String]('title)) must beSome(NotFound.reason)
}
}
}
}
| Doikor/jsonapi-scala | spray/src/test/scala/com/qvantel/jsonapi/spray/SprayExceptionHandlerSpec.scala | Scala | bsd-3-clause | 3,821 |
package im.actor.server.enrich
import scala.concurrent.ExecutionContext
import akka.util.Timeout
import slick.dbio._
import im.actor.api.rpc.Implicits._
import im.actor.api.rpc.messaging.{ ApiMessage, UpdateMessageContentChanged }
import im.actor.server.models.{ Peer, PeerType }
import im.actor.server.persist
import im.actor.server.sequence.SeqUpdatesExtension
import im.actor.server.sequence.SeqState
import im.actor.server.user.{ UserOffice, UserViewRegion }
object UpdateHandler {
def getHandler(fromPeer: Peer, toPeer: Peer, randomId: Long)(
implicit
ec: ExecutionContext,
timeout: Timeout,
userViewRegion: UserViewRegion,
seqUpdExt: SeqUpdatesExtension
): UpdateHandler =
toPeer.typ match {
case PeerType.Group ⇒ new GroupHandler(toPeer, randomId)
case PeerType.Private ⇒ new PrivateHandler(fromPeer, toPeer, randomId)
}
}
abstract class UpdateHandler(val randomId: Long) {
def handleDbUpdate(message: ApiMessage): DBIO[Int]
def handleUpdate(message: ApiMessage): DBIO[Seq[SeqState]]
}
class PrivateHandler(fromPeer: Peer, toPeer: Peer, randomId: Long)(
implicit
ec: ExecutionContext,
timeout: Timeout,
userViewRegion: UserViewRegion,
seqUpdExt: SeqUpdatesExtension
) extends UpdateHandler(randomId) {
require(fromPeer.typ == PeerType.Private
&& toPeer.typ == PeerType.Private, "Peers must be private")
def handleUpdate(message: ApiMessage): DBIO[Seq[SeqState]] =
DBIO.from(for {
fromUpdate ← UserOffice.broadcastUserUpdate(
fromPeer.id,
UpdateMessageContentChanged(toPeer.asStruct, randomId, message), None, false, deliveryId = Some(s"msgcontent_${randomId}")
)
toUpdate ← UserOffice.broadcastUserUpdate(
toPeer.id,
UpdateMessageContentChanged(fromPeer.asStruct, randomId, message), None, false, deliveryId = Some(s"msgcontent_${randomId}")
)
} yield Seq(fromUpdate, toUpdate).flatten)
def handleDbUpdate(message: ApiMessage): DBIO[Int] = persist.HistoryMessage.updateContentAll(
userIds = Set(fromPeer.id, toPeer.id),
randomId = randomId,
peerType = PeerType.Private,
peerIds = Set(fromPeer.id, toPeer.id),
messageContentHeader = message.header,
messageContentData = message.toByteArray
)
}
class GroupHandler(groupPeer: Peer, randomId: Long)(
implicit
ec: ExecutionContext,
timeout: Timeout,
userViewRegion: UserViewRegion,
seqUpdExt: SeqUpdatesExtension
) extends UpdateHandler(randomId) {
require(groupPeer.typ == PeerType.Group, "Peer must be a group")
def handleUpdate(message: ApiMessage): DBIO[Seq[SeqState]] = {
val update = UpdateMessageContentChanged(groupPeer.asStruct, randomId, message)
for {
usersIds ← persist.GroupUser.findUserIds(groupPeer.id)
seqstate ← DBIO.from(UserOffice.broadcastUsersUpdate(usersIds.toSet, update, None, false, deliveryId = Some(s"msgcontent_${randomId}")))
} yield seqstate
}
def handleDbUpdate(message: ApiMessage): DBIO[Int] =
for {
usersIds ← persist.GroupUser.findUserIds(groupPeer.id)
result ← persist.HistoryMessage.updateContentAll(
userIds = usersIds.toSet,
randomId = randomId,
peerType = PeerType.Group,
peerIds = Set(groupPeer.id),
messageContentHeader = message.header,
messageContentData = message.toByteArray
)
} yield result
} | damoguyan8844/actor-platform | actor-server/actor-enrich/src/main/scala/im/actor/server/enrich/UpdateHandler.scala | Scala | mit | 3,468 |
package com.komanov.uuid
import java.util.UUID
import scala.annotation.tailrec
object UuidScala3Utils {
def fromStringFast(s: String): UUID = {
val component1EndIndex = indexOfHyphen(s, 0)
val component2EndIndex = indexOfHyphen(s, component1EndIndex + 1)
val component3EndIndex = indexOfHyphen(s, component2EndIndex + 1)
val component4EndIndex = indexOfHyphen(s, component3EndIndex + 1)
if (s.indexOf('-', component4EndIndex + 1) != -1) {
throw new IllegalArgumentException(s"Too much hyphens in a string: $s")
}
// This is a copy-paste from UUID.fromString implementation
var mostSigBits: Long = parseHex(s, 0, component1EndIndex)
mostSigBits <<= 16
mostSigBits |= parseHex(s, component1EndIndex + 1, component2EndIndex)
mostSigBits <<= 16
mostSigBits |= parseHex(s, component2EndIndex + 1, component3EndIndex)
var leastSigBits: Long = parseHex(s, component3EndIndex + 1, component4EndIndex)
leastSigBits <<= 48
leastSigBits |= parseHex(s, component4EndIndex + 1, s.length)
new UUID(mostSigBits, leastSigBits)
}
private def indexOfHyphen(s: String, from: Int): Int = {
val index = s.indexOf('-', from)
if (index == -1) {
throw new IllegalArgumentException(s"Expected 4 hyphens (-) in a string: $s")
}
index
}
private def parseHex(s: String, from: Int, to: Int): Long = {
if (to <= from) {
throw new NumberFormatException(s"An empty component found in $s")
}
if (to - from > 16) {
throw new NumberFormatException(s"Too long component found in $s: ${s.substring(from, to)}")
}
parseHexRec(s, from, from, to, 0L)
}
@tailrec
private def parseHexRec(s: String, i: Int, from: Int, to: Int, prevResult: Long): Long = {
if (i < to) {
val ch = s.charAt(i)
val digit = Character.digit(ch, 16)
if (digit >= 0) {
parseHexRec(s, i + 1, from, to, prevResult * 16 + digit)
} else if (i == from && ch == '+') {
parseHexRec(s, i + 1, from, to, prevResult)
} else {
throw new NumberFormatException(s"Unknown character $ch in $s")
}
} else {
prevResult
}
}
}
| dkomanov/stuff | src/com/komanov/uuid/UuidScala3Utils.scala | Scala | mit | 2,172 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.scaledaction.core.kafka
import com.scaledaction.core.config.{ AppConfig, HasAppConfig }
import com.typesafe.config.Config
import java.util.Properties
import org.apache.kafka.clients.producer.ProducerConfig
import scala.util.Try
import com.scaledaction.core.config.CoreConfig
class KafkaConfig(
val brokers: String, // brokers is a comma-separated list
val topic: String,
val keySerializer: String,
val valueSerializer: String,
rootConfig: Config) extends AppConfig(rootConfig: Config) {
def toProducerProperties: Properties = {
val props = new Properties()
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, brokers)
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, keySerializer)
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer)
props
}
val kafkaParams = Map[String, String]("metadata.broker.list" -> brokers)
val topics = topic.split(",").toSet
override def toString(): String = s"brokers: ${brokers}, topic: ${topic}, keySerializer: ${keySerializer}, valueSerializer: ${valueSerializer}"
}
trait HasKafkaConfig extends HasAppConfig {
private val CONFIG_NAME = "kafka"
def getKafkaConfig: KafkaConfig = getKafkaConfig(getConfig(CONFIG_NAME))
def getKafkaConfig(rootName: String): KafkaConfig = getKafkaConfig(getConfig(rootName))
private def getKafkaConfig(kafka: CoreConfig): KafkaConfig = {
val brokers = getRequiredValue(kafka, "brokers")
val topic = getRequiredValue(kafka, "topic")
val keySerializer = getRequiredValue(kafka, "key_serializer")
val valueSerializer = getRequiredValue(kafka, "value_serializer")
new KafkaConfig(brokers, topic, keySerializer, valueSerializer, kafka.config)
}
def listKafkaConfig = listConfig(getConfig(CONFIG_NAME))
}
| benburford/core | core/src/main/scala/com/scaledaction/core/kafka/KafkaConfig.scala | Scala | apache-2.0 | 2,584 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.rdd.read
import java.io.StringWriter
import htsjdk.samtools.{ SAMFileHeader, SAMTextHeaderCodec, SAMTextWriter, ValidationStringency }
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{ FileSystem, Path }
import org.apache.hadoop.io.LongWritable
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.rdd.MetricsContext._
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.bdgenomics.adam.algorithms.consensus.{ ConsensusGenerator, ConsensusGeneratorFromReads }
import org.bdgenomics.adam.converters.AlignmentRecordConverter
import org.bdgenomics.adam.instrumentation.Timers._
import org.bdgenomics.adam.models._
import org.bdgenomics.adam.rdd.ADAMContext._
import org.bdgenomics.adam.rdd.read.realignment.RealignIndels
import org.bdgenomics.adam.rdd.read.recalibration.BaseQualityRecalibration
import org.bdgenomics.adam.rdd.{ ADAMSaveAnyArgs, ADAMSequenceDictionaryRDDAggregator }
import org.bdgenomics.adam.rich.RichAlignmentRecord
import org.bdgenomics.adam.util.MapTools
import org.bdgenomics.formats.avro._
import org.seqdoop.hadoop_bam.SAMRecordWritable
class AlignmentRecordRDDFunctions(rdd: RDD[AlignmentRecord])
extends ADAMSequenceDictionaryRDDAggregator[AlignmentRecord](rdd) {
/**
* Calculates the subset of the RDD whose AlignmentRecords overlap the corresponding
* query ReferenceRegion. Equality of the reference sequence (to which these are aligned)
* is tested by string equality of the names. AlignmentRecords whose 'getReadMapped' method
* return 'false' are ignored.
*
* The end of the record against the reference sequence is calculated from the cigar string
* using the ADAMContext.referenceLengthFromCigar method.
*
* @param query The query region, only records which overlap this region are returned.
* @return The subset of AlignmentRecords (corresponding to either primary or secondary alignments) that
* overlap the query region.
*/
def filterByOverlappingRegion(query: ReferenceRegion): RDD[AlignmentRecord] = {
def overlapsQuery(rec: AlignmentRecord): Boolean =
rec.getReadMapped &&
rec.getContig.getContigName.toString == query.referenceName &&
rec.getStart < query.end &&
rec.getEnd > query.start
rdd.filter(overlapsQuery)
}
def maybeSaveBam(args: ADAMSaveAnyArgs,
isSorted: Boolean = false): Boolean = {
if (args.outputPath.endsWith(".sam")) {
log.info("Saving data in SAM format")
rdd.adamSAMSave(args.outputPath, asSingleFile = args.asSingleFile)
true
} else if (args.outputPath.endsWith(".bam")) {
log.info("Saving data in BAM format")
rdd.adamSAMSave(args.outputPath, asSam = false, asSingleFile = args.asSingleFile)
true
} else
false
}
def maybeSaveFastq(args: ADAMSaveAnyArgs): Boolean = {
if (args.outputPath.endsWith(".fq") || args.outputPath.endsWith(".fastq") ||
args.outputPath.endsWith(".ifq")) {
rdd.adamSaveAsFastq(args.outputPath, sort = args.sortFastqOutput)
true
} else
false
}
def adamAlignedRecordSave(args: ADAMSaveAnyArgs) = {
maybeSaveBam(args) || { rdd.adamParquetSave(args); true }
}
def adamSave(args: ADAMSaveAnyArgs,
isSorted: Boolean = false) = {
maybeSaveBam(args, isSorted) || maybeSaveFastq(args) || { rdd.adamParquetSave(args); true }
}
def adamSAMString: String = {
// convert the records
val (convertRecords: RDD[SAMRecordWritable], header: SAMFileHeader) = rdd.adamConvertToSAM()
val records = convertRecords.coalesce(1, shuffle = true).collect()
val samHeaderCodec = new SAMTextHeaderCodec
samHeaderCodec.setValidationStringency(ValidationStringency.SILENT)
val samStringWriter = new StringWriter()
samHeaderCodec.encode(samStringWriter, header);
val samWriter: SAMTextWriter = new SAMTextWriter(samStringWriter)
//samWriter.writeHeader(stringHeaderWriter.toString)
records.foreach(record => samWriter.writeAlignment(record.get))
samStringWriter.toString
}
/**
* Saves an RDD of ADAM read data into the SAM/BAM format.
*
* @param filePath Path to save files to.
* @param asSam Selects whether to save as SAM or BAM. The default value is true (save in SAM format).
* @param isSorted If the output is sorted, this will modify the header.
*/
def adamSAMSave(filePath: String,
asSam: Boolean = true,
asSingleFile: Boolean = false,
isSorted: Boolean = false) = SAMSave.time {
// convert the records
val (convertRecords: RDD[SAMRecordWritable], header: SAMFileHeader) = rdd.adamConvertToSAM(isSorted)
// add keys to our records
val withKey =
if (asSingleFile) convertRecords.keyBy(v => new LongWritable(v.get.getAlignmentStart)).coalesce(1)
else convertRecords.keyBy(v => new LongWritable(v.get.getAlignmentStart))
val bcastHeader = rdd.context.broadcast(header)
val mp = rdd.mapPartitionsWithIndex((idx, iter) => {
log.info(s"Setting ${if (asSam) "SAM" else "BAM"} header for partition $idx")
val header = bcastHeader.value
synchronized {
// perform map partition call to ensure that the SAM/BAM header is set on all
// nodes in the cluster; see:
// https://github.com/bigdatagenomics/adam/issues/353,
// https://github.com/bigdatagenomics/adam/issues/676
asSam match {
case true =>
ADAMSAMOutputFormat.clearHeader()
ADAMSAMOutputFormat.addHeader(header)
log.info(s"Set SAM header for partition $idx")
case false =>
ADAMBAMOutputFormat.clearHeader()
ADAMBAMOutputFormat.addHeader(header)
log.info(s"Set BAM header for partition $idx")
}
}
Iterator[Int]()
}).count()
// force value check, ensure that computation happens
if (mp != 0) {
log.error("Had more than 0 elements after map partitions call to set VCF header across cluster.")
}
// attach header to output format
asSam match {
case true =>
ADAMSAMOutputFormat.clearHeader()
ADAMSAMOutputFormat.addHeader(header)
log.info(s"Set SAM header on driver")
case false =>
ADAMBAMOutputFormat.clearHeader()
ADAMBAMOutputFormat.addHeader(header)
log.info(s"Set BAM header on driver")
}
// write file to disk
val conf = rdd.context.hadoopConfiguration
asSam match {
case true =>
withKey.saveAsNewAPIHadoopFile(
filePath,
classOf[LongWritable],
classOf[SAMRecordWritable],
classOf[InstrumentedADAMSAMOutputFormat[LongWritable]],
conf
)
case false =>
withKey.saveAsNewAPIHadoopFile(
filePath,
classOf[LongWritable],
classOf[SAMRecordWritable],
classOf[InstrumentedADAMBAMOutputFormat[LongWritable]],
conf
)
}
if (asSingleFile) {
log.info(s"Writing single ${if (asSam) "SAM" else "BAM"} file (not Hadoop-style directory)")
val conf = new Configuration()
val fs = FileSystem.get(conf)
val ouputParentDir = filePath.substring(0, filePath.lastIndexOf("/") + 1)
val tmpPath = ouputParentDir + "tmp" + System.currentTimeMillis().toString
fs.rename(new Path(filePath + "/part-r-00000"), new Path(tmpPath))
fs.delete(new Path(filePath), true)
fs.rename(new Path(tmpPath), new Path(filePath))
}
}
def getSequenceRecordsFromElement(elem: AlignmentRecord): scala.collection.Set[SequenceRecord] = {
SequenceRecord.fromADAMRecord(elem)
}
/**
* Collects a dictionary summarizing the read groups in an RDD of ADAMRecords.
*
* @return A dictionary describing the read groups in this RDD.
*/
def adamGetReadGroupDictionary(): RecordGroupDictionary = {
val rgNames = rdd.flatMap(RecordGroup(_))
.distinct()
.collect()
.toSeq
new RecordGroupDictionary(rgNames)
}
/**
* Converts an RDD of ADAM read records into SAM records.
*
* @return Returns a SAM/BAM formatted RDD of reads, as well as the file header.
*/
def adamConvertToSAM(isSorted: Boolean = false): (RDD[SAMRecordWritable], SAMFileHeader) = ConvertToSAM.time {
// collect global summary data
val sd = rdd.adamGetSequenceDictionary()
val rgd = rdd.adamGetReadGroupDictionary()
// create conversion object
val adamRecordConverter = new AlignmentRecordConverter
// create header
val header = adamRecordConverter.createSAMHeader(sd, rgd)
if (isSorted) {
header.setSortOrder(SAMFileHeader.SortOrder.coordinate)
}
// broadcast for efficiency
val hdrBcast = rdd.context.broadcast(SAMFileHeaderWritable(header))
// map across RDD to perform conversion
val convertedRDD: RDD[SAMRecordWritable] = rdd.map(r => {
// must wrap record for serializability
val srw = new SAMRecordWritable()
srw.set(adamRecordConverter.convert(r, hdrBcast.value))
srw
})
(convertedRDD, header)
}
/**
* Cuts reads into _k_-mers, and then counts the number of occurrences of each _k_-mer.
*
* @param kmerLength The value of _k_ to use for cutting _k_-mers.
* @return Returns an RDD containing k-mer/count pairs.
*
* @see adamCountQmers
*/
def adamCountKmers(kmerLength: Int): RDD[(String, Long)] = {
rdd.flatMap(r => {
// cut each read into k-mers, and attach a count of 1L
r.getSequence
.toString
.sliding(kmerLength)
.map(k => (k, 1L))
}).reduceByKey((k1: Long, k2: Long) => k1 + k2)
}
def adamSortReadsByReferencePosition(): RDD[AlignmentRecord] = SortReads.time {
log.info("Sorting reads by reference position")
// NOTE: In order to keep unmapped reads from swamping a single partition
// we sort the unmapped reads by read name. We prefix with tildes ("~";
// ASCII 126) to ensure that the read name is lexicographically "after" the
// contig names.
rdd.keyBy(r => {
if (r.getReadMapped) {
ReferencePosition(r)
} else {
ReferencePosition(s"~~~${r.getReadName}", 0)
}
}).sortByKey().map(_._2)
}
def adamMarkDuplicates(): RDD[AlignmentRecord] = MarkDuplicatesInDriver.time {
MarkDuplicates(rdd)
}
/**
* Runs base quality score recalibration on a set of reads. Uses a table of
* known SNPs to mask true variation during the recalibration process.
*
* @param knownSnps A table of known SNPs to mask valid variants.
* @param observationDumpFile An optional local path to dump recalibration
* observations to.
* @return Returns an RDD of recalibrated reads.
*/
def adamBQSR(knownSnps: Broadcast[SnpTable],
observationDumpFile: Option[String] = None,
validationStringency: ValidationStringency = ValidationStringency.LENIENT): RDD[AlignmentRecord] = BQSRInDriver.time {
BaseQualityRecalibration(rdd, knownSnps, observationDumpFile, validationStringency)
}
/**
* Realigns indels using a concensus-based heuristic.
*
* @see RealignIndels
*
* @param isSorted If the input data is sorted, setting this parameter to true avoids a second sort.
* @param maxIndelSize The size of the largest indel to use for realignment.
* @param maxConsensusNumber The maximum number of consensus sequences to realign against per
* target region.
* @param lodThreshold Log-odds threhold to use when realigning; realignments are only finalized
* if the log-odds threshold is exceeded.
* @param maxTargetSize The maximum width of a single target region for realignment.
*
* @return Returns an RDD of mapped reads which have been realigned.
*/
def adamRealignIndels(consensusModel: ConsensusGenerator = new ConsensusGeneratorFromReads,
isSorted: Boolean = false,
maxIndelSize: Int = 500,
maxConsensusNumber: Int = 30,
lodThreshold: Double = 5.0,
maxTargetSize: Int = 3000): RDD[AlignmentRecord] = RealignIndelsInDriver.time {
RealignIndels(rdd, consensusModel, isSorted, maxIndelSize, maxConsensusNumber, lodThreshold)
}
// Returns a tuple of (failedQualityMetrics, passedQualityMetrics)
def adamFlagStat(): (FlagStatMetrics, FlagStatMetrics) = {
FlagStat(rdd)
}
/**
* Groups all reads by record group and read name
* @return SingleReadBuckets with primary, secondary and unmapped reads
*/
def adamSingleReadBuckets(): RDD[SingleReadBucket] = {
SingleReadBucket(rdd)
}
/**
* Converts a set of records into an RDD containing the pairs of all unique tagStrings
* within the records, along with the count (number of records) which have that particular
* attribute.
*
* @return An RDD of attribute name / count pairs.
*/
def adamCharacterizeTags(): RDD[(String, Long)] = {
rdd.flatMap(RichAlignmentRecord(_).tags.map(attr => (attr.tag, 1L))).reduceByKey(_ + _)
}
/**
* Calculates the set of unique attribute <i>values</i> that occur for the given
* tag, and the number of time each value occurs.
*
* @param tag The name of the optional field whose values are to be counted.
* @return A Map whose keys are the values of the tag, and whose values are the number of time each tag-value occurs.
*/
def adamCharacterizeTagValues(tag: String): Map[Any, Long] = {
adamFilterRecordsWithTag(tag).flatMap(RichAlignmentRecord(_).tags.find(_.tag == tag)).map(
attr => Map(attr.value -> 1L)).reduce {
(map1: Map[Any, Long], map2: Map[Any, Long]) =>
MapTools.add(map1, map2)
}
}
/**
* Returns the subset of the ADAMRecords which have an attribute with the given name.
* @param tagName The name of the attribute to filter on (should be length 2)
* @return An RDD[Read] containing the subset of records with a tag that matches the given name.
*/
def adamFilterRecordsWithTag(tagName: String): RDD[AlignmentRecord] = {
assert(tagName.length == 2,
"withAttribute takes a tagName argument of length 2; tagName=\\"%s\\"".format(tagName))
rdd.filter(RichAlignmentRecord(_).tags.exists(_.tag == tagName))
}
/**
* Saves these AlignmentRecords to two FASTQ files: one for the first mate in each pair, and the other for the second.
*
* @param fileName1 Path at which to save a FASTQ file containing the first mate of each pair.
* @param fileName2 Path at which to save a FASTQ file containing the second mate of each pair.
* @param validationStringency Iff strict, throw an exception if any read in this RDD is not accompanied by its mate.
*/
def adamSaveAsPairedFastq(fileName1: String,
fileName2: String,
outputOriginalBaseQualities: Boolean = false,
validationStringency: ValidationStringency = ValidationStringency.LENIENT,
persistLevel: Option[StorageLevel] = None): Unit = {
def maybePersist[T](r: RDD[T]): Unit = {
persistLevel.foreach(r.persist(_))
}
def maybeUnpersist[T](r: RDD[T]): Unit = {
persistLevel.foreach(_ => r.unpersist())
}
maybePersist(rdd)
val numRecords = rdd.count()
val readsByID: RDD[(String, Iterable[AlignmentRecord])] =
rdd.groupBy(record => {
if (!AlignmentRecordConverter.readNameHasPairedSuffix(record))
record.getReadName.toString
else
record.getReadName.toString.dropRight(2)
})
validationStringency match {
case ValidationStringency.STRICT | ValidationStringency.LENIENT =>
val readIDsWithCounts: RDD[(String, Int)] = readsByID.mapValues(_.size)
val unpairedReadIDsWithCounts: RDD[(String, Int)] = readIDsWithCounts.filter(_._2 != 2)
maybePersist(unpairedReadIDsWithCounts)
val numUnpairedReadIDsWithCounts: Long = unpairedReadIDsWithCounts.count()
if (numUnpairedReadIDsWithCounts != 0) {
val readNameOccurrencesMap: collection.Map[Int, Long] = unpairedReadIDsWithCounts.map(_._2).countByValue()
val msg =
List(
s"Found $numUnpairedReadIDsWithCounts read names that don't occur exactly twice:",
readNameOccurrencesMap.map({
case (numOccurrences, numReadNames) => s"${numOccurrences}x:\\t$numReadNames"
}).take(100).mkString("\\t", "\\n\\t", if (readNameOccurrencesMap.size > 100) "\\n\\t…" else ""),
"",
"Samples:",
unpairedReadIDsWithCounts
.take(100)
.map(_._1)
.mkString("\\t", "\\n\\t", if (numUnpairedReadIDsWithCounts > 100) "\\n\\t…" else "")
).mkString("\\n")
if (validationStringency == ValidationStringency.STRICT)
throw new IllegalArgumentException(msg)
else if (validationStringency == ValidationStringency.LENIENT)
logError(msg)
}
case ValidationStringency.SILENT =>
}
val pairedRecords: RDD[AlignmentRecord] = readsByID.filter(_._2.size == 2).map(_._2).flatMap(x => x)
maybePersist(pairedRecords)
val numPairedRecords = pairedRecords.count()
maybeUnpersist(rdd.unpersist())
val firstInPairRecords: RDD[AlignmentRecord] = pairedRecords.filter(_.getFirstOfPair)
maybePersist(firstInPairRecords)
val numFirstInPairRecords = firstInPairRecords.count()
val secondInPairRecords: RDD[AlignmentRecord] = pairedRecords.filter(_.getSecondOfPair)
maybePersist(secondInPairRecords)
val numSecondInPairRecords = secondInPairRecords.count()
maybeUnpersist(pairedRecords)
log.info(
"%d/%d records are properly paired: %d firsts, %d seconds".format(
numPairedRecords,
numRecords,
numFirstInPairRecords,
numSecondInPairRecords
)
)
if (validationStringency == ValidationStringency.STRICT) {
firstInPairRecords.foreach(read =>
if (read.getSecondOfPair)
throw new Exception("Read %s found with first- and second-of-pair set".format(read.getReadName))
)
secondInPairRecords.foreach(read =>
if (read.getFirstOfPair)
throw new Exception("Read %s found with first- and second-of-pair set".format(read.getReadName))
)
}
assert(
numFirstInPairRecords == numSecondInPairRecords,
"Different numbers of first- and second-reads: %d vs. %d".format(numFirstInPairRecords, numSecondInPairRecords)
)
val arc = new AlignmentRecordConverter
firstInPairRecords
.sortBy(_.getReadName.toString)
.map(record => arc.convertToFastq(record, maybeAddSuffix = true, outputOriginalBaseQualities = outputOriginalBaseQualities))
.saveAsTextFile(fileName1)
secondInPairRecords
.sortBy(_.getReadName.toString)
.map(record => arc.convertToFastq(record, maybeAddSuffix = true, outputOriginalBaseQualities = outputOriginalBaseQualities))
.saveAsTextFile(fileName2)
maybeUnpersist(firstInPairRecords)
maybeUnpersist(secondInPairRecords)
}
/**
* Saves reads in FASTQ format.
*
* @param fileName Path to save files at.
* @param outputOriginalBaseQualities Output the original base qualities (OQ) if available as opposed to those from BQSR
* @param sort Whether to sort the FASTQ files by read name or not. Defaults
* to false. Sorting the output will recover pair order, if desired.
*/
def adamSaveAsFastq(fileName: String,
fileName2Opt: Option[String] = None,
outputOriginalBaseQualities: Boolean = false,
sort: Boolean = false,
validationStringency: ValidationStringency = ValidationStringency.LENIENT,
persistLevel: Option[StorageLevel] = None) {
log.info("Saving data in FASTQ format.")
fileName2Opt match {
case Some(fileName2) =>
adamSaveAsPairedFastq(
fileName,
fileName2,
outputOriginalBaseQualities = outputOriginalBaseQualities,
validationStringency = validationStringency,
persistLevel = persistLevel
)
case _ =>
val arc = new AlignmentRecordConverter
// sort the rdd if desired
val outputRdd = if (sort || fileName2Opt.isDefined) {
rdd.sortBy(_.getReadName.toString)
} else {
rdd
}
// convert the rdd and save as a text file
outputRdd
.map(record => arc.convertToFastq(record, outputOriginalBaseQualities = outputOriginalBaseQualities))
.saveAsTextFile(fileName)
}
}
/**
* Reassembles read pairs from two sets of unpaired reads. The assumption is that the two sets
* were _originally_ paired together.
*
* @note The RDD that this is called on should be the RDD with the first read from the pair.
*
* @param secondPairRdd The rdd containing the second read from the pairs.
* @param validationStringency How stringently to validate the reads.
* @return Returns an RDD with the pair information recomputed.
*/
def adamRePairReads(secondPairRdd: RDD[AlignmentRecord],
validationStringency: ValidationStringency = ValidationStringency.LENIENT): RDD[AlignmentRecord] = {
// cache rdds
val firstPairRdd = rdd.cache()
secondPairRdd.cache()
val firstRDDKeyedByReadName = firstPairRdd.keyBy(_.getReadName.toString.dropRight(2))
val secondRDDKeyedByReadName = secondPairRdd.keyBy(_.getReadName.toString.dropRight(2))
// all paired end reads should have the same name, except for the last two
// characters, which will be _1/_2
val joinedRDD: RDD[(String, (AlignmentRecord, AlignmentRecord))] =
if (validationStringency == ValidationStringency.STRICT) {
firstRDDKeyedByReadName.cogroup(secondRDDKeyedByReadName).map {
case (readName, (firstReads, secondReads)) =>
(firstReads.toList, secondReads.toList) match {
case (firstRead :: Nil, secondRead :: Nil) =>
(readName, (firstRead, secondRead))
case _ =>
throw new Exception(
"Expected %d first reads and %d second reads for name %s; expected exactly one of each:\\n%s\\n%s".format(
firstReads.size,
secondReads.size,
readName,
firstReads.map(_.getReadName.toString).mkString("\\t", "\\n\\t", ""),
secondReads.map(_.getReadName.toString).mkString("\\t", "\\n\\t", "")
)
)
}
}
} else {
firstRDDKeyedByReadName.join(secondRDDKeyedByReadName)
}
val finalRdd = joinedRDD
.flatMap(kv => Seq(
AlignmentRecord.newBuilder(kv._2._1)
.setReadPaired(true)
.setProperPair(true)
.setFirstOfPair(true)
.setSecondOfPair(false)
.build(),
AlignmentRecord.newBuilder(kv._2._2)
.setReadPaired(true)
.setProperPair(true)
.setFirstOfPair(false)
.setSecondOfPair(true)
.build()
))
// uncache temp rdds
firstPairRdd.unpersist()
secondPairRdd.unpersist()
// return
finalRdd
}
}
| kcompher/adam | adam-core/src/main/scala/org/bdgenomics/adam/rdd/read/AlignmentRecordRDDFunctions.scala | Scala | apache-2.0 | 24,331 |
package cromwell.database.migration.workflowoptions
import cromwell.core.WorkflowOptions
/**
* Clear the values from encrypted keys in METADATA_ENTRY.
*/
class ClearMetadataEntryWorkflowOptions extends WorkflowOptionsChange {
override val tableName = "METADATA_ENTRY"
override val primaryKeyColumn = "METADATA_JOURNAL_ID"
override val workflowOptionsColumn = "METADATA_VALUE"
override val additionalReadBatchFilters = "AND METADATA_KEY = 'submittedFiles:options'"
override def migrateWorkflowOptions(workflowOptions: WorkflowOptions) = workflowOptions.clearEncryptedValues
}
| ohsu-comp-bio/cromwell | database/migration/src/main/scala/cromwell/database/migration/workflowoptions/ClearMetadataEntryWorkflowOptions.scala | Scala | bsd-3-clause | 592 |
package spire.math.algebraic
import spire.algebra.Sign
import spire.math._
import org.scalatest.FunSuite
class MaybeDoubleTest extends FunSuite {
test("Invalid is invalid") {
assert(!MaybeDouble.Invalid.isValid)
}
test("Exact construction from Float, Double, and Int") {
assert(MaybeDouble(0.3f).isExact)
assert(MaybeDouble(33e100).isExact)
assert(MaybeDouble(1234).isExact)
assert(MaybeDouble(Int.MinValue).isExact)
}
test("In-bounds Long construction is exact") {
// Technically, a Double can hold 1L << 53, but only because the last digit
// is a 0. That is, a Double can also hold 1L << 62 exactly, because it only
// has 1 digit in the mantissa. However, (1L << 53) - 1 is the largest
// exact integer a Double can hold that whose bits are all 1's.
assert(MaybeDouble((1L << 53) - 1).isExact)
}
test("Out-of-bounds Long construction is not exact") {
assert(!MaybeDouble(1L << 53).isExact)
assert(!MaybeDouble(Long.MaxValue).isExact)
}
test("In-bounds BigInt construction is exact") {
assert(MaybeDouble((BigInt(1) << 53) - 1).isExact)
}
test("Out-of-bounds BigInt construction is not exact") {
assert(!MaybeDouble(BigInt(1) << 53).isExact)
assert(!MaybeDouble(-(BigInt(17389) * 17387 * 17383 * 17377 * 17359)).isExact)
}
test("Rational construction is approximate") {
val md = MaybeDouble(Rational(1, 3))
assert(!md.isExact)
assert(md.error > 0.0)
}
test("BigDecimal construction is approximate") {
// Truthfully, it would be nice if BigDecimal construction would be exact
// when it can. What would be better is a base-2 BigFloat class.
val md = MaybeDouble(BigDecimal(0.5))
assert(!md.isExact)
assert(md.error > 0.0)
}
def assertAlmostEqual(a: MaybeDouble, x: Double) {
assert(a.approx - a.error <= x && x <= a.approx + a.error)
}
test("Ring ops keep correct error bound") {
val a = BigDecimal("1.234568")
val b = BigDecimal("9281234.199991111111111111111002304291")
val c = BigDecimal("0.333333333333333333333333333333333333333333")
val a_ = MaybeDouble(a)
val b_ = MaybeDouble(b)
val c_ = MaybeDouble(c)
// I dunno, just do some ish
val bdx = a * b * b * b + c * a - a
val mdx = a_ * b_ * b_ * b_ + c_ * a_ - a_
assertAlmostEqual(mdx, bdx.toDouble)
assertAlmostEqual(a_ * b_, (a * b).toDouble)
assertAlmostEqual(a_ + b_, (a + b).toDouble)
assertAlmostEqual(a_ - b_, (a - b).toDouble)
}
test("Division keeps the faith") {
val a = Rational(1)
val b = Rational(3)
val a_ = MaybeDouble(a)
val b_ = MaybeDouble(b)
assertAlmostEqual(a_ / b_, (a / b).toDouble)
val x = Iterator.fill(23)(b_).foldLeft(a_)(_ / _)
assertAlmostEqual(x, (a / b pow 23).toDouble)
}
test("Square-roots keep correct error bound") {
val a = Algebraic(2)
val a_ = MaybeDouble(2)
assertAlmostEqual(MaybeDouble(2).sqrt, Algebraic(2).sqrt.toDouble)
assertAlmostEqual(MaybeDouble(4).sqrt, Algebraic(4).sqrt.toDouble)
}
test("abs doesn't affect error") {
val a_ = MaybeDouble(Rational(-1, 3))
assert(a_.abs === -a_)
assert(MaybeDouble(1).abs === MaybeDouble(1))
assert(a_.abs.error === a_.error)
}
test("Unary-minus don't affect sign") {
val a = MaybeDouble(1.0)
assert(-a.error === a.error)
assert(-(-a) === a)
assert(-a === MaybeDouble(-1.0))
}
test("Sign returns None when not exact") {
val a = MaybeDouble.approx(1)
assert((a - a).sign === None)
// Note: x.sqrt - y.sqrt - z.sqrt == 0.
val x = MaybeDouble(18)
val y = MaybeDouble(8)
val z = MaybeDouble(2)
assert((x.sqrt - (y.sqrt + z.sqrt)).sign === None)
}
test("Sign returns Some(sign) when it can") {
val x = MaybeDouble(3.08)
val r = MaybeDouble(19).sqrt
assert((r - (x*x + x*x).sqrt).sign == Some(Sign.Positive))
}
test("Conversion functions can be exact") {
val x = Iterator.fill(23)(MaybeDouble(3.0)).foldLeft(MaybeDouble(1.0))(_ / _)
assert(x.toFloat.isDefined)
assert(x.toInt === Some(0))
assert(MaybeDouble(Int.MaxValue).toInt === Some(Int.MaxValue))
assert(MaybeDouble(1.0).toDouble == Some(1.0))
}
test("Conversion functions return None when not exact") {
assert(MaybeDouble(Float.MaxValue.toDouble * 2).toFloat == None)
// Turns out it is pretty hard to get an error so bad it isn't even a Float.
val a = Iterator.fill(10)(MaybeDouble.approx(10000.0)) reduce (_ * _)
val b = Iterator.fill(10)(MaybeDouble.approx(1 / 10000.0)) reduce (_ * _)
val x = Iterator.fill(10000)(a * b) reduce (_ + _)
assert((x - x).toFloat === None)
assert(MaybeDouble.approx(1.0).toLong === None)
assert(MaybeDouble.approx(-1.0).toInt === None)
assert(MaybeDouble.approx(4.0).toBigInt === None)
assert(MaybeDouble.approx(1.0).toDouble === None)
}
}
| lrytz/spire | tests/src/test/scala/spire/math/fpf/MaybeDoubleTest.scala | Scala | mit | 4,912 |
/**
* Copyright (C) 2010 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms
import analysis.model.Instance
import model.DataModel
import org.dom4j._
import org.orbeon.oxf.pipeline.api.TransformerXMLReceiver
import org.orbeon.oxf.pipeline.api.XMLReceiver
import org.orbeon.oxf.xforms.control.controls.XFormsRepeatControl
import org.orbeon.oxf.xforms.event._
import org.orbeon.oxf.xforms.event.events._
import org.orbeon.oxf.xml.TransformerUtils
import org.orbeon.oxf.xml.dom4j.Dom4jUtils
import org.orbeon.oxf.xml.dom4j.LocationData
import org.orbeon.saxon.dom4j.DocumentWrapper
import javax.xml.transform.stream.StreamResult
import collection.JavaConverters._
import org.orbeon.oxf.common.OXFException
import org.orbeon.oxf.xforms.XFormsServerSharedInstancesCache.Loader
import org.orbeon.saxon.om.{NodeInfo, VirtualNode, DocumentInfo}
import org.orbeon.oxf.util._
import org.orbeon.oxf.xforms.state.InstanceState
// Caching information associated with an instance loaded with xxf:cache="true"
case class InstanceCaching(
timeToLive: Long,
handleXInclude: Boolean,
sourceURI: String,
requestBodyHash: Option[String]) {
require(sourceURI ne null, """Only XForms instances externally loaded through the src attribute may have xxf:cache="true".""")
def debugPairs = Seq(
"timeToLive" → timeToLive.toString,
"handleXInclude" → handleXInclude.toString,
"sourceURI" → sourceURI,
"requestBodyHash" → requestBodyHash.orNull
)
def writeAttributes(att: (String, String) ⇒ Unit) {
att("cache", "true")
if (timeToLive >= 0) att("ttl", timeToLive.toString)
if (handleXInclude) att("xinclude", "true")
att("source-uri", sourceURI)
requestBodyHash foreach (att("request-body-hash", _))
}
}
object InstanceCaching {
// Not using "apply" as that causes issues for Java callers
def fromValues(timeToLive: Long, handleXInclude: Boolean, sourceURI: String, requestBodyHash: String): InstanceCaching =
InstanceCaching(timeToLive, handleXInclude, sourceURI, Option(requestBodyHash))
// Not using "apply" as that causes issues for Java callers
def fromInstance(instance: Instance, sourceURI: String, requestBodyHash: String): InstanceCaching =
InstanceCaching(instance.timeToLive, instance.handleXInclude, sourceURI, Option(requestBodyHash))
}
/**
* Represent an XForms instance. An instance is made of:
*
* - immutable information
* - a reference to its parent model
* - a reference to its static representation
* - mutable information
* - XML document with the instance content
* - information to reload the instance after deserialization if needed
* - whether the instance is readonly
* - whether the instance has been modified
*/
class XFormsInstance(
val parent: XFormsModel, // concrete parent model
val instance: Instance, // static instance
private var _instanceCaching: Option[InstanceCaching], // information to restore cached instance content
private var _documentInfo: DocumentInfo, // fully wrapped document
private var _readonly: Boolean, // whether the instance is readonly (can change upon submission)
private var _modified: Boolean, // whether the instance was modified
var valid: Boolean) // whether the instance was valid as of the last revalidation
extends ListenersTrait
with XFormsInstanceIndex
with XFormsEventObserver
with Logging {
require(! (_readonly && _documentInfo.isInstanceOf[VirtualNode]))
requireNewIndex()
def containingDocument = parent.containingDocument
// Getters
def instanceCaching = _instanceCaching
def documentInfo = _documentInfo
def readonly = _readonly
def modified = _modified
// Mark the instance as modified
// This is used so we can optimize serialization: if an instance is inline and not modified, we don't need to
// serialize its content
def markModified() = _modified = true
// Update the instance upon submission with instance replacement
def update(instanceCaching: Option[InstanceCaching], documentInfo: DocumentInfo, readonly: Boolean): Unit = {
_instanceCaching = instanceCaching
_documentInfo = documentInfo
_readonly = readonly
requireNewIndex()
markModified()
}
def exposeXPathTypes = instance.exposeXPathTypes
def isSchemaValidation = instance.isSchemaValidation && ! _readonly
// Don't serialize if the instance is inline and hasn't been modified
// NOTE: If the instance is cacheable, its metadata gets serialized, but not it's XML content
def mustSerialize = ! (instance.useInlineContent && ! _modified)
// Return the model that contains this instance
def model = parent
// The instance root node
def root = _documentInfo
// The instance root element as with the instance() function
def rootElement = DataModel.firstChildElement(_documentInfo)
def getId = instance.staticId
def getPrefixedId = XFormsUtils.getPrefixedId(getEffectiveId)
def getEffectiveId = XFormsUtils.getRelatedEffectiveId(parent.getEffectiveId, instance.staticId)
def scope = model.getStaticModel.scope
def container = model.container
def getLocationData =
if (_documentInfo.isInstanceOf[DocumentWrapper])
XFormsUtils.getNodeLocationData(underlyingDocumentOrNull.getRootElement)
else
new LocationData(_documentInfo.getSystemId, _documentInfo.getLineNumber, -1)
def parentEventObserver: XFormsEventObserver = model
def performDefaultAction(event: XFormsEvent) =
event match {
case ev: XXFormsInstanceInvalidate ⇒
implicit val indentedLogger = event.containingDocument.getIndentedLogger(XFormsModel.LOGGING_CATEGORY)
_instanceCaching match {
case Some(instanceCaching) ⇒
XFormsServerSharedInstancesCache.remove(indentedLogger, instanceCaching.sourceURI, null, instanceCaching.handleXInclude)
case None ⇒
warn("xxforms-instance-invalidate event dispatched to non-cached instance", Seq("instance id" → getEffectiveId))
}
case ev: XXFormsActionErrorEvent ⇒
XFormsError.handleNonFatalActionError(this, ev.throwable)
case _ ⇒
}
def performTargetAction(event: XFormsEvent) =
event match {
case insertEvent: XFormsInsertEvent ⇒
// New nodes were just inserted
// As per XForms 1.1, this is where repeat indexes must be adjusted, and where new repeat items must be
// inserted.
// Find affected repeats
val insertedNodes = insertEvent.insertedNodes
//didInsertNodes = insertedNodes.size() != 0
// Find affected repeats and update their node-sets and indexes
val controls = container.getContainingDocument.getControls
updateRepeatNodesets(controls, insertedNodes)
// Update index
// If this was a root element replacement, rely on XXFormsReplaceEvent instead
if (! insertEvent.isRootElementReplacement)
updateIndexForInsert(insertedNodes)
case deleteEvent: XFormsDeleteEvent ⇒
// New nodes were just deleted
if (deleteEvent.deletedNodes.nonEmpty) {
// Find affected repeats and update them
val controls = container.getContainingDocument.getControls
updateRepeatNodesets(controls, null)
updateIndexForDelete(deleteEvent.deletedNodes)
}
case replaceEvent: XXFormsReplaceEvent ⇒
// A node was replaced
// As of 2013-02-18, this happens for:
// - a root element replacement
// - an id attribute replacement
updateIndexForReplace(replaceEvent.formerNode, replaceEvent.currentNode)
case valueChangeEvent: XXFormsValueChangedEvent ⇒
updateIndexForValueChange(valueChangeEvent)
case _ ⇒
}
private def updateRepeatNodesets(controls: XFormsControls, insertedNodes: Seq[NodeInfo]) {
val repeatControlsMap = controls.getCurrentControlTree.getRepeatControls.asScala
if (repeatControlsMap.nonEmpty) {
val instanceScope = container.getPartAnalysis.scopeForPrefixedId(getPrefixedId)
// NOTE: Copy into List as the list of repeat controls may change within updateNodesetForInsertDelete()
val repeatControls = repeatControlsMap.values.toList
for {
repeatControl ← repeatControls
// Get a new reference to the control, in case it is no longer present in the tree due to earlier updates
newRepeatControl ← Option(controls.getObjectByEffectiveId(repeatControl.getEffectiveId).asInstanceOf[XFormsRepeatControl])
if newRepeatControl.getResolutionScope == instanceScope
} yield
// Only update controls within same scope as modified instance
// NOTE: This can clearly break with e.g. xxf:instance()
// NOTE: the control may be non-relevant
newRepeatControl.updateSequenceForInsertDelete(insertedNodes)
}
}
// Return the instance document as a dom4j Document
// If the instance is readonly, this returns null. Callers should use instanceRoot() whenever possible.
def underlyingDocumentOrNull: Document =
_documentInfo match {
case virtualNode: VirtualNode ⇒ virtualNode.getUnderlyingNode.asInstanceOf[Document]
case _ ⇒ null
}
// LATER: Measure performance of Dom4jUtils.domToString(instance.getDocument)
def contentAsString =
Option(underlyingDocumentOrNull) map
(TransformerUtils.dom4jToString(_, false)) getOrElse
TransformerUtils.tinyTreeToString(_documentInfo)
// Don't allow any external events
def allowExternalEvent(eventName: String) = false
// Write the instance document to the specified ContentHandler
def write(xmlReceiver: XMLReceiver) =
TransformerUtils.sourceToSAX(_documentInfo, xmlReceiver)
// Log the instance
def logContent(indentedLogger: IndentedLogger, message: String): Unit = {
implicit val logger = indentedLogger
debug(message, Seq(
"model effective id" → parent.getEffectiveId,
"instance effective id" → getEffectiveId,
"instance" → TransformerUtils.tinyTreeToString(rootElement)
))
}
// Print the instance with extra annotation attributes to Console.out. For debug only.
def debugPrintOut() = {
val identityTransformerHandler: TransformerXMLReceiver = TransformerUtils.getIdentityTransformerHandler
identityTransformerHandler.setResult(new StreamResult(Console.out))
write(identityTransformerHandler)
}
// Log the current MIP values applied to this instance
def debugLogMIPs() = {
val result = Dom4jUtils.createDocument
underlyingDocumentOrNull.accept(new VisitorSupport {
final override def visit(element: Element) = {
currentElement = rootElement.addElement("element")
currentElement.addAttribute("qname", element.getQualifiedName)
currentElement.addAttribute("namespace-uri", element.getNamespaceURI)
addMIPInfo(currentElement, element)
}
final override def visit(attribute: Attribute) = {
val attributeElement = currentElement.addElement("attribute")
attributeElement.addAttribute("qname", attribute.getQualifiedName)
attributeElement.addAttribute("namespace-uri", attribute.getNamespaceURI)
addMIPInfo(attributeElement, attribute)
}
private def addMIPInfo(parentInfoElement: Element, node: Node) = {
parentInfoElement.addAttribute("readonly", InstanceData.getInheritedReadonly(node).toString)
parentInfoElement.addAttribute("relevant", InstanceData.getInheritedRelevant(node).toString)
parentInfoElement.addAttribute("required", InstanceData.getRequired(node).toString)
parentInfoElement.addAttribute("valid", InstanceData.getValid(node).toString)
val typeQName = InstanceData.getType(node)
parentInfoElement.addAttribute("type", Option(typeQName) map (_.getQualifiedName) getOrElse "")
}
private val rootElement = result.addElement("mips")
private var currentElement: Element = null
})
XFormsUtils.logDebugDocument("MIPs: ", result)
}
// Replace the instance with the given document
// This includes marking the structural change as well as dispatching events
def replace(newDocumentInfo: DocumentInfo, dispatch: Boolean = true, instanceCaching: Option[InstanceCaching] = instanceCaching, isReadonly: Boolean = readonly): Unit = {
val formerRoot = rootElement
// Update the instance (this also marks it as modified)
update(
instanceCaching,
newDocumentInfo,
isReadonly)
// Call this directly, since we are not using insert/delete here
model.markStructuralChange(this)
val currentRoot = rootElement
if (dispatch) {
// Dispatch xxforms-replace event
// NOTE: For now, still dispatch xforms-insert for backward compatibility.
Dispatch.dispatchEvent(
new XXFormsReplaceEvent(
this,
formerRoot,
currentRoot)
)
// Dispatch xforms-insert event for backward compatibility
// NOTE: use the root node as insert location as it seems to make more sense than pointing to the former
// root element.
Dispatch.dispatchEvent(
new XFormsInsertEvent(
this,
Seq[NodeInfo](currentRoot).asJava,
null, // CHECK
currentRoot.getDocumentRoot,
"into") // "into" makes more sense than "after" or "before"! We used to have "after", not sure why.
)
}
}
}
// For instances which declare xxf:index="id", keep up-to-date an index of ids to elements. The index is set on
// DocumentWrapper, so that the XPath id() function works out of the box.
//
// Implementation notes:
//
// - set an IdGetter on DocumentWrapper when a new Dom4j DocumentWrapper is set on the instance
// - index all elements with an attribute whose local name is "id"
// - initial index is created the first time an id is required
// - upon subsequent document updates (insert, delete, setvalue), the index is incrementally updated
// - keep reference to all elements which have a given id so that we support insert/delete in any order
// - sort the elements in case there is more than one possible result; this is not very efficient so it's better to
// make sure that every id is unique
//
// Possible improvements:
//
// - should just index "id" and "xml:id"
// - handle schema xs:ID type as well
trait XFormsInstanceIndex {
self: XFormsInstance ⇒
import org.orbeon.scaxon.XML._
import collection.{mutable ⇒ m}
import org.w3c.dom.Node.{ATTRIBUTE_NODE, ELEMENT_NODE}
private var idIndex: m.Map[String, List[Element]] = _
// Iterator over all ids
def idsIterator = {
createIndexIfNeeded()
if (idIndex ne null) idIndex.keysIterator else Iterator.empty
}
def requireNewIndex() = {
idIndex = null
if (instance.indexIds && self.documentInfo.isInstanceOf[DocumentWrapper]) {
val wrapper = self.documentInfo.asInstanceOf[DocumentWrapper]
wrapper.setIdGetter(new DocumentWrapper.IdGetter {
object ElementOrdering extends Ordering[Element] {
def compare(x: Element, y: Element) =
wrapper.wrap(x).compareOrder(wrapper.wrap(y))
}
def apply(id: String) = {
// Lazily create index the first time if needed
createIndexIfNeeded()
// Query index
idIndex.get(id) match {
case Some(list) if list.size > 1 ⇒ list.min(ElementOrdering) // get first in document order
case Some(list) ⇒ list.head // empty list not allowed in the map
case None ⇒ null
}
}
})
}
}
private def createIndexIfNeeded() =
if (idIndex eq null) {
idIndex = m.Map()
combineMappings(mappingsInSubtree(self.documentInfo))
}
def updateIndexForInsert(nodes: Seq[NodeInfo]) =
if (idIndex ne null)
for (node ← nodes)
combineMappings(mappingsInSubtree(node))
def updateIndexForDelete(nodes: Seq[NodeInfo]) =
if (idIndex ne null)
for (node ← nodes; (id, element) ← mappingsInSubtree(node))
removeId(id, element)
def updateIndexForReplace(formerNode: NodeInfo, currentNode: NodeInfo) =
if (idIndex ne null) {
if (currentNode.getNodeKind == ATTRIBUTE_NODE && currentNode.getLocalPart == "id")
// Don't use updateIndexForDelete, because formerNode.getParent will fail
removeId(formerNode.stringValue, unwrapElement(currentNode.getParent))
else if (currentNode.getNodeKind == ELEMENT_NODE)
updateIndexForDelete(Seq(formerNode))
updateIndexForInsert(Seq(currentNode))
}
def updateIndexForValueChange(valueChangeEvent: XXFormsValueChangedEvent) =
if ((idIndex ne null) && valueChangeEvent.node.getLocalPart == "id") {
val parentElement = unwrapElement(valueChangeEvent.node.getParent)
removeId(valueChangeEvent.oldValue, parentElement)
addId(valueChangeEvent.newValue, parentElement)
}
private def idsInSubtree(start: NodeInfo) =
if (start.getNodeKind == ATTRIBUTE_NODE)
start self "id"
else
start descendantOrSelf * att "id"
private def mappingsInSubtree(start: NodeInfo) = idsInSubtree(start) map (id ⇒ id.getStringValue → unwrapElement(id.getParent))
private def removeId(id: String, parentElement: Element) = {
idIndex.get(id) match {
case Some(list) if list.size > 1 ⇒
idIndex(id) = list filter (_ ne parentElement)
assert(idIndex(id).nonEmpty)
case Some(list) ⇒ idIndex -= id // don't leave an empty list in the map
case None ⇒ // NOP
}
}
private def addId(id: String, element: Element) =
idIndex(id) = element :: (
idIndex.get(id) match {
case Some(list) ⇒
// We should enable the assert below, but first we need to make sure we skip xforms-insert
// processing for an attribute replacement, because xxforms-replace has already handled the updated
// attribute. For now, filter so we don't get duplicates.
//assert(! (list exists (_ eq element)))
list filter (_ ne element)
case None ⇒ Nil
}
)
private def combineMappings(mappings: Seq[(String, Element)]) =
for ((id, element) ← mappings)
addId(id, element)
}
object XFormsInstance extends Logging {
import Instance._
// Create an initial instance without caching information
def apply(model: XFormsModel, instance: Instance, documentInfo: DocumentInfo) =
new XFormsInstance(
model,
instance,
None,
documentInfo,
instance.readonly,
false,
true)
def createDocumentInfo(documentOrDocumentInfo: AnyRef, exposeXPathTypes: Boolean) = documentOrDocumentInfo match {
case dom4jDocument: Document ⇒ wrapDocument(dom4jDocument, exposeXPathTypes)
case documentInfo: DocumentInfo ⇒ documentInfo
case _ ⇒ throw new OXFException("Invalid type for instance document: " + documentOrDocumentInfo.getClass.getName)
}
def createDocumentInfo(xmlString: String, readonly: Boolean, exposeXPathTypes: Boolean) =
if (readonly)
TransformerUtils.stringToTinyTree(XPathCache.getGlobalConfiguration, xmlString, false, true)
else
wrapDocument(Dom4jUtils.readDom4j(xmlString), exposeXPathTypes)
// Take a non-wrapped DocumentInfo and wrap it if needed
def wrapDocumentInfo(documentInfo: DocumentInfo, readonly: Boolean, exposeXPathTypes: Boolean) = {
assert(! documentInfo.isInstanceOf[VirtualNode], "DocumentInfo must not be a VirtualNode, i.e. it must be a native readonly tree like TinyTree")
// NOTE: We don't honor exposeXPathTypes on readonly instances, anyway they don't support MIPs at this time
if (readonly)
documentInfo // the optimal case: no copy of the cached document is needed
else
wrapDocument(TransformerUtils.tinyTreeToDom4j(documentInfo), exposeXPathTypes)
}
// Restore an instance on the model, given InstanceState
def restoreInstanceFromState(model: XFormsModel, instanceState: InstanceState, loader: Loader): Unit = {
implicit val logger = model.indentedLogger
val instance = model.staticModel.instances(XFormsUtils.getStaticIdFromId(instanceState.effectiveId))
val (caching, documentInfo) =
instanceState.cachingOrContent match {
case Left(caching) ⇒
debug("restoring instance from instance cache", Seq("id" → instanceState.effectiveId))
// NOTE: No XInclude supported to read instances with @src for now
// TODO: must pass method and request body in case of POST/PUT
(Some(caching),
XFormsServerSharedInstancesCache.findContentOrLoad(logger, instance, caching, instanceState.readonly, loader))
case Right(content) ⇒
debug("using initialized instance from state", Seq("id" → instanceState.effectiveId))
(None,
createDocumentInfo(content, instanceState.readonly, instance.exposeXPathTypes))
}
model.indexInstance(
new XFormsInstance(
model,
instance,
caching,
documentInfo,
instanceState.readonly,
instanceState.modified,
instanceState.valid))
}
}
| evlist/orbeon-forms | src/main/scala/org/orbeon/oxf/xforms/XFormsInstance.scala | Scala | lgpl-2.1 | 24,197 |
package com.twitter.finagle.zipkin.thrift
import java.nio.ByteBuffer
import java.net.{InetAddress, InetSocketAddress, SocketAddress}
import java.util.logging.Logger
import com.twitter.finagle.thrift.thrift
/**
* Endpoints describe a TCP endpoint that terminates RPC
* communication.
*/
case class Endpoint(ipv4: Int, port: Short) {
/**
* @return If this endpoint's ip is 0.0.0.0 we get the local host and return that.
*/
def boundEndpoint: Endpoint = if (ipv4 == 0) Endpoint(Endpoint.getLocalHost, port) else this
def toThrift: Option[thrift.Endpoint] = {
val e = new thrift.Endpoint
e.setIpv4(ipv4)
e.setPort(port)
Some(e)
}
}
object Endpoint {
private[this] val log = Logger.getLogger(getClass.toString)
val Unknown = new Endpoint(0, 0) {
override def toThrift = None
}
def toIpv4(inetAddress: InetAddress): Int =
ByteBuffer.wrap(inetAddress.getAddress).getInt
/**
* Get the local host as an integer.
*/
val getLocalHost: Int = {
try {
Endpoint.toIpv4(InetAddress.getLocalHost)
} catch {
case e =>
log.warning("Failed to retrieve local host address: %s".format(e))
0
}
}
/**
* @return If possible, convert from a SocketAddress object to an Endpoint.
* If not, return Unknown Endpoint.
*/
def fromSocketAddress(socketAddress: SocketAddress): Endpoint = {
socketAddress match {
case inet: InetSocketAddress => {
Endpoint(toIpv4(inet.getAddress), inet.getPort.toShort)
}
case _ => Endpoint.Unknown
}
}
}
| joshbedo/finagle | finagle-zipkin/src/main/scala/com/twitter/finagle/zipkin/thrift/Endpoint.scala | Scala | apache-2.0 | 1,562 |
// 2. What is the value of an empty block expression {} ? What is its type?
Value of empty block expression is ();
Type of such expression is Unit.
| P7h/ScalaPlayground | Scala for the Impatient/exercises/org/p7h/scala/impatient/chapter02/02.scala | Scala | apache-2.0 | 151 |
package demo.components
import scala.scalajs.js
import scala.scalajs.js.annotation.JSImport
object Images {
@js.native @JSImport("./googleMap.png", JSImport.Default)
object googleMapImage extends js.Any
@js.native @JSImport("./reactListView.png", JSImport.Default)
object reactListViewImage extends js.Any
@js.native @JSImport("./reactTreeView.png", JSImport.Default)
object reactTreeViewImage extends js.Any
@js.native @JSImport("./elementalui.png", JSImport.Default)
object elementaluiImage extends js.Any
@js.native @JSImport("./mui.png", JSImport.Default)
object materialuiImage extends js.Any
@js.native @JSImport("./semanticui.png", JSImport.Default)
object semanticuiImage extends js.Any
@js.native @JSImport("./reactTable.png", JSImport.Default)
object reactTableImage extends js.Any
@js.native @JSImport("./reactSplitPane.png", JSImport.Default)
object reactSplitPaneImage extends js.Any
@js.native @JSImport("./bottom-tear.svg", JSImport.Default)
object bottomTearImage extends js.Any
@js.native @JSImport("./reactTagsInput.png", JSImport.Default)
object reactTagsInputImage extends js.Any
@js.native @JSImport("./reactInfinite.png", JSImport.Default)
object reactInfiniteImage extends js.Any
@js.native @JSImport("./reactGeomIcon.png", JSImport.Default)
object reactGeomIconImage extends js.Any
@js.native @JSImport("./spinner.png", JSImport.Default)
object spinnerImage extends js.Any
@js.native @JSImport("./reactPopover.png", JSImport.Default)
object reactPopoverImage extends js.Any
@js.native @JSImport("./reactDraggable.png", JSImport.Default)
object reactDraggableImage extends js.Any
}
| aparo/scalajs-react-components | demo/src/main/scala/demo/components/Images.scala | Scala | apache-2.0 | 1,668 |
package com.gigaspaces.csvwriter
import java.io.File
import org.apache.commons.cli.{CommandLine, GnuParser, Options}
import scala.Predef.String
import scala.Predef.require
import org.openspaces.core.space.UrlSpaceConfigurer
import org.openspaces.core.{GigaSpace, GigaSpaceConfigurer}
import org.slf4j.LoggerFactory
class CommandLineProcessing(args: Array[String]) {
private val logger = LoggerFactory.getLogger(getClass)
private val inputFileOption = "in"
private val inputFileDescription = "csv file"
private val spaceUrlOption = "url"
private val spaceUrlDescription = "Space URL (see http://wiki.gigaspaces.com/wiki/display/XAP9/Space+URL for details)"
private val documentDataTypeName = "dt"
private val documentDataTypeDescription = "Type of data stored in document"
private val options: Options = new Options()
.addOption(inputFileOption, true, inputFileDescription)
.addOption(spaceUrlOption, true, spaceUrlDescription)
.addOption(documentDataTypeName, true, documentDataTypeDescription)
/** @return parse the command line, and return a java.io.File according to the args
*/
def inputFile(): File = {
val p = inputFilePath
val f = new File(p)
require(!f.isDirectory, String.format("Input %s must not be a directory, but [%s] is one.", inputFileDescription, p))
require(f.isFile, String.format("Input %s must exist, but [%s] does not.", inputFileDescription, p))
val path = f.getAbsolutePath
require(f.canRead, String.format("Input file [ %s ] is not readable.", path))
logger.debug("Valid input file specified in params: [ {} ]", path)
f
}
private def inputFilePath: String = stringValFromOpt(inputFileOption)
private def commandLine(): CommandLine = {
require(args != null, options.toString)
require(args.length > 1, options.toString)
val parser = new GnuParser
parser.parse(options, args)
}
private def spaceUrlString: String = stringValFromOpt(spaceUrlOption)
private def stringValFromOpt(opt: String) = commandLine().getOptionValue(opt)
/** @return a GigaSpace, properly construed
*/
def gigaSpace(): GigaSpace = {
val url = spaceUrlString
require(url != null, options.toString)
require(url.length > 0, options.toString)
new GigaSpaceConfigurer(new UrlSpaceConfigurer(url)).gigaSpace()
}
private def documentDataTypeString = stringValFromOpt(documentDataTypeName)
def documentDataType(): String = {
val dataType = documentDataTypeString
require(dataType != null, options.toString)
require(dataType.length > 0, options.toString)
dataType
}
} | jasonnerothin/gs-csvwriter | src/main/scala/com/gigaspaces/csvwriter/CommandLineProcessing.scala | Scala | apache-2.0 | 2,614 |
package net.selenate.server
package actions
package workers
import extensions.SelenateFirefox
import net.selenate.common.comms.req.SeReqElementGetAttributes
import net.selenate.common.comms.res.SeResElementGetAttributes
import net.selenate.common.exceptions.SeActionException
import scala.util.{ Failure, Success, Try }
import java.util.ArrayList
import scala.collection.JavaConversions._
object ElementGetAttributesAction {
val getAttributesJS = """
|var report = [];
|var attrList = arguments[0].attributes;
|for (var n = 0; n < attrList.length; n++) {
| var entry = attrList[n];
| report.push(entry.name + ' -> ' + entry.value);
|};
|return report;""".stripMargin
def parseAttributeReport(reportRaw: Object): Map[String, String] = {
reportRaw match {
case report: ArrayList[_] =>
val attributeList: List[(String, String)] =
report.toList flatMap {
case entry: String =>
val elements = entry.split(" -> ").toList
elements match {
case attribute :: value :: Nil => Some(attribute -> value)
case _ => None
}
case _ => None
}
attributeList.toMap
case _ => Map.empty
}
}
}
class ElementGetAttributesAction(val sessionID: String, val context: SessionContext, val d: SelenateFirefox)
extends RetryableAction[SeReqElementGetAttributes, SeResElementGetAttributes]
with ActionCommons {
import ElementGetAttributesAction._
def retryableAct = { arg =>
val result: Option[Try[Map[String, String]]] = elementInAllWindows(arg.getSelector) { (address, e) =>
val attributeReport = d.executeScript(getAttributesJS, e)
parseAttributeReport(attributeReport)
}
result match {
case Some(Success((attributeMap))) =>
new SeResElementGetAttributes(mapToRealJava(attributeMap));
case Some(Failure(ex)) =>
logError(s"An error occurred while executing $name action ($arg)!", ex)
throw new SeActionException(name, arg, ex)
case None =>
val msg = "element not found in any frame"
logError(s"An error occurred while executing $name action ($arg): $msg!")
throw new SeActionException(name, arg, msg)
}
}
}
| tferega/selenate | code/Server/src/main/scala/net/selenate/server/actions/workers/ElementGetAttributesAction.scala | Scala | bsd-3-clause | 2,295 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import kafka.admin._
import kafka.log.LogConfig
import kafka.log.CleanerConfig
import kafka.log.LogManager
import kafka.utils._
import java.util.concurrent._
import atomic.{AtomicInteger, AtomicBoolean}
import java.io.File
import java.net.BindException
import org.I0Itec.zkclient.ZkClient
import kafka.controller.{ControllerStats, KafkaController}
import kafka.cluster.Broker
import kafka.api.{ControlledShutdownResponse, ControlledShutdownRequest}
import kafka.common.ErrorMapping
import kafka.network.{Receive, BlockingChannel, SocketServer}
/**
* Represents the lifecycle of a single Kafka broker. Handles all functionality required
* to start up and shutdown a single Kafka node.
*/
class KafkaServer(val config: KafkaConfig, time: Time = SystemTime) extends Logging {
this.logIdent = "[Kafka Server " + config.brokerId + "], "
private var isShuttingDown = new AtomicBoolean(false)
private var shutdownLatch = new CountDownLatch(1)
private var startupComplete = new AtomicBoolean(false);
val correlationId: AtomicInteger = new AtomicInteger(0)
var socketServer: SocketServer = null
var requestHandlerPool: KafkaRequestHandlerPool = null
var logManager: LogManager = null
var kafkaHealthcheck: KafkaHealthcheck = null
var topicConfigManager: TopicConfigManager = null
var replicaManager: ReplicaManager = null
var apis: KafkaApis = null
var kafkaController: KafkaController = null
val kafkaScheduler = new KafkaScheduler(config.backgroundThreads)
var zkClient: ZkClient = null
/**
* Start up API for bringing up a single instance of the Kafka server.
* Instantiates the LogManager, the SocketServer and the request handlers - KafkaRequestHandlers
*/
def startup() {
info("starting")
isShuttingDown = new AtomicBoolean(false)
shutdownLatch = new CountDownLatch(1)
/* start scheduler */
kafkaScheduler.startup()
/* setup zookeeper */
zkClient = initZk()
/* start log manager */
logManager = createLogManager(zkClient)
logManager.startup()
socketServer = new SocketServer(config.brokerId,
config.hostName,
config.port,
config.numNetworkThreads,
config.queuedMaxRequests,
config.socketSendBufferBytes,
config.socketReceiveBufferBytes,
config.socketRequestMaxBytes)
socketServer.startup()
replicaManager = new ReplicaManager(config, time, zkClient, kafkaScheduler, logManager, isShuttingDown)
kafkaController = new KafkaController(config, zkClient)
/* start processing requests */
apis = new KafkaApis(socketServer.requestChannel, replicaManager, zkClient, config.brokerId, config, kafkaController)
requestHandlerPool = new KafkaRequestHandlerPool(config.brokerId, socketServer.requestChannel, apis, config.numIoThreads)
Mx4jLoader.maybeLoad()
replicaManager.startup()
kafkaController.startup()
topicConfigManager = new TopicConfigManager(zkClient, logManager)
topicConfigManager.startup()
/* tell everyone we are alive */
kafkaHealthcheck = new KafkaHealthcheck(config.brokerId, config.advertisedHostName, config.advertisedPort, config.zkSessionTimeoutMs, zkClient)
kafkaHealthcheck.startup()
registerStats()
startupComplete.set(true);
info("started")
}
private def initZk(): ZkClient = {
info("Connecting to zookeeper on " + config.zkConnect)
val chroot = {
if (config.zkConnect.indexOf("/") > 0)
config.zkConnect.substring(config.zkConnect.indexOf("/"))
else
""
}
if (chroot.length > 1) {
val zkConnForChrootCreation = config.zkConnect.substring(0, config.zkConnect.indexOf("/"))
val zkClientForChrootCreation = new ZkClient(zkConnForChrootCreation, config.zkSessionTimeoutMs, config.zkConnectionTimeoutMs, ZKStringSerializer)
ZkUtils.makeSurePersistentPathExists(zkClientForChrootCreation, chroot)
info("Created zookeeper path " + chroot)
zkClientForChrootCreation.close()
}
val zkClient = new ZkClient(config.zkConnect, config.zkSessionTimeoutMs, config.zkConnectionTimeoutMs, ZKStringSerializer)
ZkUtils.setupCommonPaths(zkClient)
zkClient
}
/**
* Forces some dynamic jmx beans to be registered on server startup.
*/
private def registerStats() {
BrokerTopicStats.getBrokerAllTopicsStats()
ControllerStats.uncleanLeaderElectionRate
ControllerStats.leaderElectionTimer
}
/**
* Performs controlled shutdown
*/
private def controlledShutdown() {
if (startupComplete.get() && config.controlledShutdownEnable) {
// We request the controller to do a controlled shutdown. On failure, we backoff for a configured period
// of time and try again for a configured number of retries. If all the attempt fails, we simply force
// the shutdown.
var remainingRetries = config.controlledShutdownMaxRetries
info("Starting controlled shutdown")
var channel : BlockingChannel = null;
var prevController : Broker = null
var shutdownSuceeded : Boolean =false
try {
while (!shutdownSuceeded && remainingRetries > 0) {
remainingRetries = remainingRetries - 1
// 1. Find the controller and establish a connection to it.
// Get the current controller info. This is to ensure we use the most recent info to issue the
// controlled shutdown request
val controllerId = ZkUtils.getController(zkClient)
ZkUtils.getBrokerInfo(zkClient, controllerId) match {
case Some(broker) =>
if (channel == null || prevController == null || !prevController.equals(broker)) {
// if this is the first attempt or if the controller has changed, create a channel to the most recent
// controller
if (channel != null) {
channel.disconnect()
}
channel = new BlockingChannel(broker.host, broker.port,
BlockingChannel.UseDefaultBufferSize,
BlockingChannel.UseDefaultBufferSize,
config.controllerSocketTimeoutMs)
channel.connect()
prevController = broker
}
case None=>
//ignore and try again
}
// 2. issue a controlled shutdown to the controller
if (channel != null) {
var response: Receive = null
try {
// send the controlled shutdown request
val request = new ControlledShutdownRequest(correlationId.getAndIncrement, config.brokerId)
channel.send(request)
response = channel.receive()
val shutdownResponse = ControlledShutdownResponse.readFrom(response.buffer)
if (shutdownResponse.errorCode == ErrorMapping.NoError && shutdownResponse.partitionsRemaining != null &&
shutdownResponse.partitionsRemaining.size == 0) {
shutdownSuceeded = true
info ("Controlled shutdown succeeded")
}
else {
info("Remaining partitions to move: %s".format(shutdownResponse.partitionsRemaining.mkString(",")))
info("Error code from controller: %d".format(shutdownResponse.errorCode))
}
}
catch {
case ioe: java.io.IOException =>
channel.disconnect()
channel = null
// ignore and try again
}
}
if (!shutdownSuceeded) {
Thread.sleep(config.controlledShutdownRetryBackoffMs)
warn("Retrying controlled shutdown after the previous attempt failed...")
}
}
}
finally {
if (channel != null) {
channel.disconnect()
channel = null
}
}
if (!shutdownSuceeded) {
warn("Proceeding to do an unclean shutdown as all the controlled shutdown attempts failed")
}
}
}
/**
* Shutdown API for shutting down a single instance of the Kafka server.
* Shuts down the LogManager, the SocketServer and the log cleaner scheduler thread
*/
def shutdown() {
info("shutting down")
val canShutdown = isShuttingDown.compareAndSet(false, true);
if (canShutdown) {
Utils.swallow(controlledShutdown())
if(socketServer != null)
Utils.swallow(socketServer.shutdown())
if(requestHandlerPool != null)
Utils.swallow(requestHandlerPool.shutdown())
Utils.swallow(kafkaScheduler.shutdown())
if(apis != null)
Utils.swallow(apis.close())
if(replicaManager != null)
Utils.swallow(replicaManager.shutdown())
if(logManager != null)
Utils.swallow(logManager.shutdown())
if(kafkaController != null)
Utils.swallow(kafkaController.shutdown())
if(zkClient != null)
Utils.swallow(zkClient.close())
shutdownLatch.countDown()
startupComplete.set(false);
info("shut down completed")
}
}
/**
* After calling shutdown(), use this API to wait until the shutdown is complete
*/
def awaitShutdown(): Unit = shutdownLatch.await()
def getLogManager(): LogManager = logManager
private def createLogManager(zkClient: ZkClient): LogManager = {
val defaultLogConfig = LogConfig(segmentSize = config.logSegmentBytes,
segmentMs = 60L * 60L * 1000L * config.logRollHours,
flushInterval = config.logFlushIntervalMessages,
flushMs = config.logFlushIntervalMs.toLong,
retentionSize = config.logRetentionBytes,
retentionMs = config.logRetentionTimeMillis,
maxMessageSize = config.messageMaxBytes,
maxIndexSize = config.logIndexSizeMaxBytes,
indexInterval = config.logIndexIntervalBytes,
deleteRetentionMs = config.logCleanerDeleteRetentionMs,
fileDeleteDelayMs = config.logDeleteDelayMs,
minCleanableRatio = config.logCleanerMinCleanRatio,
compact = config.logCleanupPolicy.trim.toLowerCase == "compact")
val defaultProps = defaultLogConfig.toProps
val configs = AdminUtils.fetchAllTopicConfigs(zkClient).mapValues(LogConfig.fromProps(defaultProps, _))
// read the log configurations from zookeeper
val cleanerConfig = CleanerConfig(numThreads = config.logCleanerThreads,
dedupeBufferSize = config.logCleanerDedupeBufferSize,
dedupeBufferLoadFactor = config.logCleanerDedupeBufferLoadFactor,
ioBufferSize = config.logCleanerIoBufferSize,
maxMessageSize = config.messageMaxBytes,
maxIoBytesPerSecond = config.logCleanerIoMaxBytesPerSecond,
backOffMs = config.logCleanerBackoffMs,
enableCleaner = config.logCleanerEnable)
new LogManager(logDirs = config.logDirs.map(new File(_)).toArray,
topicConfigs = configs,
defaultConfig = defaultLogConfig,
cleanerConfig = cleanerConfig,
flushCheckMs = config.logFlushSchedulerIntervalMs,
flushCheckpointMs = config.logFlushOffsetCheckpointIntervalMs,
retentionCheckMs = config.logCleanupIntervalMs,
scheduler = kafkaScheduler,
time = time)
}
}
| mthak/test_kafka | core/src/main/scala/kafka/server/KafkaServer.scala | Scala | apache-2.0 | 12,902 |
package com.strad.evan.interpreters
import cats.~>
import com.strad.evan.algebra.EventStore.{EventStoreA, Write}
import fs2.Task
import org.mongodb.scala._
object MongoDbInterpreter extends (EventStoreA ~> Task) {
implicit val scheduler =
_root_.fs2.Scheduler.fromFixedDaemonPool(2, "generator-scheduler")
implicit val S =
_root_.fs2.Strategy.fromFixedDaemonPool(2, "generator-timer")
val mc = MongoClient()
val db = mc.getDatabase("mydb")
val c = db.getCollection("test")
def apply[A](fa: EventStoreA[A]): Task[A] =
fa match {
case Write(item) =>
val doc = Document(item.toString)
Task
.fromFuture(c.insertOne(doc).toFuture)(
S,
scala.concurrent.ExecutionContext.Implicits.global
)
.map(x => ())
}
}
| rstradling/evan | client/src/main/scala/com/strad/evan/interpreters/MongoDbInterpreter.scala | Scala | mit | 809 |
/*
* Copyright 2012 IL <iron9light AT gmali DOT com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ideacolorschemes.commons.json
import com.ideacolorschemes.commons.Binary
import net.liftweb.json._
import org.apache.commons.codec.binary.Base64._
/**
* @author il
* @version 11/8/11 10:11 AM
*/
class BinarySerializer extends Serializer[Binary] {
private val clazz = classOf[Binary]
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), Binary] = {
case (TypeInfo(this.clazz, _), json) =>
json match {
case JString(s) => decodeBase64(s)
case value => throw new MappingException("Can't convert " + value + " to " + clazz)
}
}
def serialize(implicit format: Formats): PartialFunction[Any, JValue] = {
case Binary(bytes) =>
JString(encodeBase64String(bytes));
}
} | iron9light/ideacolorschemes-commons | src/main/scala/com/ideacolorschemes/commons/json/BinarySerializer.scala | Scala | apache-2.0 | 1,372 |
package lila.forumSearch
import com.sksamuel.elastic4s.ElasticDsl._
import com.sksamuel.elastic4s.QueryDefinition
import org.elasticsearch.search.sort.SortOrder
import lila.search.ElasticSearch
private[forumSearch] final class Query private (
indexType: String,
terms: List[String],
staff: Boolean,
troll: Boolean) extends lila.search.Query {
def searchDef(from: Int = 0, size: Int = 10) =
search in indexType query makeQuery sort (
by field Fields.date order SortOrder.DESC
) start from size size
def countDef = count from indexType query makeQuery
private def queryTerms = terms filterNot (_ startsWith "user:")
private def userSearch = terms find (_ startsWith "user:") map { _ drop 5 }
private lazy val makeQuery = filteredQuery query {
queryTerms match {
case Nil => all
case terms => must {
terms.map { term =>
multiMatchQuery(term) fields (Query.searchableFields: _*)
}: _*
}
}
} filter {
List(
userSearch map { termFilter(Fields.author, _) },
!staff option termFilter(Fields.staff, false),
!troll option termFilter(Fields.troll, false)
).flatten match {
case Nil => matchAllFilter
case filters => must {
filters: _*
}
}
}
}
object Query {
private val searchableFields = List(Fields.body, Fields.topic, Fields.author)
def apply(indexType: String, text: String, staff: Boolean, troll: Boolean): Query = new Query(
indexType, ElasticSearch decomposeTextQuery text, staff, troll
)
}
| Happy0/lila | modules/forumSearch/src/main/Query.scala | Scala | mit | 1,569 |
package de.aaschmid.sbtplugin.testnotifier
import org.specs.Specification
import org.specs.util.DataTables
object SeverityTest extends Specification with DataTables {
import de.aaschmid.sbtplugin.testnotifier.{Error => SError}
def provide = addToSusVerb("provide")
"Sealed case class Severity" should provide {
"isWorseThan" in {
"s0" | "s1" | "result" |>
Passed.asInstanceOf[Severity] !
Passed.asInstanceOf[Severity] !
false |
Passed ! Skipped ! false |
Passed ! Failed ! false |
Passed ! SError ! false |
Skipped ! Passed ! true |
Skipped ! Failed ! false |
Failed ! Passed ! true |
Skipped ! SError ! false |
SError ! Failed ! true | { (s0, s1, result) =>
(s0 isWorseThan s1) must_== result
}
}
"toString which does not contain any of [$.]" in {
Passed.toString must beMatching ("^[^$.]+$")
}
}
"Companion object Severity" should provide {
"correct worst" in {
"list" | "result" |>
List[Severity](Passed) ! Passed.asInstanceOf[Severity] |
List(Passed, Passed) ! Passed |
List(Passed, Skipped, Passed) ! Skipped |
List(Failed, Passed, Skipped) ! Failed |
List(Passed, Skipped, Failed, SError) ! SError | { (list, result) =>
Severity.worst(list) must_== result
}
}
"exception on passed empty List" in {
Severity.worst(Nil) must throwA[UnsupportedOperationException]
}
}
}
| aaschmid/sbt-test-notifier | src/test/scala/de/aaschmid/sbtplugin/testnotifier/SeverityTest.scala | Scala | apache-2.0 | 1,690 |
/**
* Swaggy Jenkins
* Jenkins API clients generated from Swagger / Open API specification
*
* The version of the OpenAPI document: 1.1.2-pre.0
* Contact: [email protected]
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
*/
package org.openapitools.server.model
case class ExtensionClassContainerImpl1map(
ioJenkinsBlueoceanServiceEmbeddedRestPipelineImpl: Option[ExtensionClassImpl],
ioJenkinsBlueoceanServiceEmbeddedRestMultiBranchPipelineImpl: Option[ExtensionClassImpl],
`class`: Option[String]
)
| cliffano/swaggy-jenkins | clients/scalatra/generated/src/main/scala/org/openapitools/server/model/ExtensionClassContainerImpl1map.scala | Scala | mit | 601 |
package epfl.pacman
package maze
import collection.{mutable => mut, immutable => imm}
trait Models extends Thingies with Positions with Directions { this: MVC =>
abstract class State
case object Running extends State
case object Paused extends State
case class Loading(next: State) extends State
case class GameOver(var delay: Int) extends State
case class GameWon(var delay: Int) extends State
case class LifeLost(var delay: Int) extends State
case class CompileError(next: State) extends State
case class Model(pacman: PacMan = ModelDefaults.pacman,
monsters: Set[Monster] = ModelDefaults.monsters,
walls: Set[Wall] = ModelDefaults.mazeWalls,
points: Set[Thingy] = Set(),
deadMonsters: Set[Monster] = Set(),
counters: Counters = new Counters(),
simpleMode: Boolean = true,
state: State = Running) {
def resetFigures() = {
val oldScore = counters('score)
counters.clear()
counters('score) = oldScore;
copy(pacman = ModelDefaults.pacman.copy(lives = pacman.lives),
monsters = ModelDefaults.monsters, deadMonsters = Set())
}
private val wallCache = Set[BlockPosition]() ++ walls.map(_.pos)
def isWallAt(pos: Position) = wallCache.contains(pos)
// all viable blocks
private val allPos = (for (x <- 0 to (Settings.hBlocks-1); y <- 0 to (Settings.vBlocks-1)) yield BlockPosition(x, y)).toSet -- wallCache
private val g = new Graph
// set up graph: add all nodes
for (p <- allPos) {
g.addNode(p)
}
// set up graph: connect nodes
for (fromP <- allPos) {
for (toD <- List(Left, Right, Up, Down)) {
var toP = fromP.nextIn(toD)
if (!isWallAt(toP)) {
g.addEdge(fromP, toP)
}
}
}
/**
* ???
*/
def randomValidPos = {
import scala.util.Random.nextInt
allPos.toSeq.apply(nextInt(allPos.size))
}
def minDistBetween(init: Position, from: Position, to: Position, exclude: Set[Position]): Int =
minDistBetween(init, from, Set(to), exclude)
/**
* ???
*/
def minDistBetween(init: Position, from: Position, to: Set[Position], exclude: Set[Position]): Int = {
g.markTargets(to)
if (init != from) {
//exclude init position
g.mark(init)
}
g.markExcludes(exclude)
val r = g.simpleDistFrom(from, 45)
g.clear
r
}
/**
* ???
*/
def maxPathBetween(init: Position, dir: Direction, to: Set[Position]): Int = {
g.markTargets(to + init)
val r = g.maxPathFrom(init.nextIn(dir), 45)
g.clear
r
}
def maxSafePathBetween(init: Position, dir: Direction, to: Set[Position], max: Int): Int = {
g.maxSafePath(init, dir, to, max)
}
private class Graph {
case class Node(pos: Position, var color: Int = 0)
var nodes = imm.Map[Position, Node]()
var edgesFrom = imm.Map[Node, Set[Node]]().withDefaultValue(Set())
def addNode(pos: Position) {
if (!(nodes contains pos)) {
nodes += (pos -> Node(pos))
} else {
error("Node "+pos+" already in")
}
}
def addEdge(from: Position, to: Position) {
val fromN = nodes(from)
val toN = nodes(to)
edgesFrom += fromN -> (edgesFrom(fromN) + toN)
}
def markTargets(positions: Set[Position]) {
for (p <- positions) {
nodes(p).color = 2
}
}
def markExcludes(positions: Set[Position]) {
for (p <- positions) {
nodes(p).color = 1
}
}
def mark(pos: Position) {
nodes(pos).color = 1
}
def clear {
for ((p, n) <- nodes) {
n.color = 0
}
}
def simpleDistFrom(p: Position, max: Int): Int = {
var toVisit: Set[Node] = Set(nodes(p))
var dist = 0
if (toVisit.forall(_.color == 1)) return max;
while (!toVisit.isEmpty && dist < max) {
dist += 1
val toVisitBatch = toVisit
toVisit = Set[Node]()
for (n <- toVisitBatch) {
if (n.color == 0) {
edgesFrom(n).foreach(toVisit += _)
n.color = 1;
} else if (n.color == 2) {
return dist;
}
}
}
dist min max
}
def maxSafePath(init: Position, dir: Direction, opponents: Set[Position], max: Int): Int = {
val pos = init.nextIn(dir)
mark(init)
var toVisitFrom: Set[Node] = Set(nodes(pos))
var toVisitOpp: Set[Node] = opponents.flatMap(o => edgesFrom(nodes(o)) + nodes(o)).toSet
var dist = 0
while (!toVisitFrom.isEmpty && dist < max) {
dist += 1
// Let's move monsters
val toVisitOppBatch = toVisitOpp
toVisitOpp = Set[Node]()
for (n <- toVisitOppBatch) {
edgesFrom(n).foreach(toVisitOpp += _)
n.color = dist;
}
// Let's move pacman
val toVisitFromBatch = toVisitFrom
toVisitFrom = Set[Node]()
for (n <- toVisitFromBatch) {
if (n.color == 0) {
// visited by nobody
edgesFrom(n).foreach(toVisitFrom += _)
n.color = dist;
} else {
// already visited by pacman or a monster, ignore
}
}
}
clear
dist min max
}
def maxPathFrom(p: Position, max: Int): Int = {
var toVisit: Set[Node] = Set(nodes(p))
var dist = 0
while (!toVisit.isEmpty && dist < max) {
dist += 1
val toVisitBatch = toVisit
toVisit = Set[Node]()
for (n <- toVisitBatch) {
if (n.color == 0) {
edgesFrom(n).foreach(toVisit += _)
n.color = 1;
} else if (n.color == 2) {
// ignore this path
}
}
}
dist min max
}
}
}
class Counters extends mut.HashMap[Any, Int] {
override def default(k: Any) = {
if (k == 'time) Settings.surviveTime
else 0
}
}
object ModelDefaults {
val pacman = new PacMan(new OffsetPosition(9, 10), Right)
val monsters: Set[Monster] = {
Set() + Monster(new OffsetPosition(1,1), Right, Info) +
Monster(new OffsetPosition(17,1), Left, Info) +
Monster(new OffsetPosition(17,18), Left, Cerebro) +
Monster(new OffsetPosition(1,18), Right, Cerebro)
}
/*
val maze: Set[Wall] = {
def w(x: Int, y: Int) = Wall(new BlockPosition(x,y))
// Default maze, *ouch*
Set() ++
(for(x <- 0 to 29) yield w(x, 0)) ++
(for(x <- 0 to 29) yield w(x, 19)) ++
(for(y <- 1 to 6) yield w(0, y)) ++
(for(y <- 1 to 6) yield w(29, y)) ++
(for(y <- 12 to 18) yield w(0, y)) ++
(for(y <- 12 to 18) yield w(29, y)) ++
(for(y <- 7 to 8; x <- 0 to 5) yield w(x, y)) ++
(for(y <- 10 to 11; x <- 0 to 5) yield w(x, y)) ++
(for(y <- 7 to 8; x <- 24 to 29) yield w(x, y)) ++
(for(y <- 10 to 11; x <- 24 to 29) yield w(x, y)) ++
(for(y <- 2 to 3; x <- 2 to 5) yield w(x, y)) ++
(for(y <- 2 to 3; x <- 24 to 27) yield w(x, y)) ++
(for(y <- 2 to 3; x <- 7 to 11) yield w(x, y)) ++
(for(y <- 2 to 3; x <- 18 to 22) yield w(x, y)) ++
(for(y <- 5 to 5; x <- 2 to 5) yield w(x, y)) ++
(for(y <- 5 to 5; x <- 24 to 27) yield w(x, y)) ++
(for(y <- 5 to 6; x <- 10 to 19) yield w(x, y)) ++
(for(y <- 8 to 9; x <- 13 to 16) yield w(x, y)) ++
(for(y <- 1 to 3; x <- 13 to 16) yield w(x, y)) ++
(for(y <- 5 to 13; x <- 7 to 8) yield w(x, y)) ++
(for(y <- 5 to 13; x <- 21 to 22) yield w(x, y)) ++
(for(y <- 8 to 9; x <- 18 to 20) yield w(x, y)) ++
(for(y <- 8 to 9; x <- 8 to 11) yield w(x, y)) ++
(for(y <- 11 to 17; x <- 19 to 19) yield w(x, y)) ++
(for(y <- 11 to 11; x <- 11 to 13) yield w(x, y)) ++
(for(y <- 11 to 11; x <- 15 to 18) yield w(x, y)) ++
(for(y <- 17 to 17; x <- 11 to 14) yield w(x, y)) ++
(for(y <- 17 to 17; x <- 16 to 18) yield w(x, y)) ++
(for(y <- 17 to 17; x <- 16 to 18) yield w(x, y)) ++
(for(y <- 13 to 13; x <- 12 to 17) yield w(x, y)) ++
(for(y <- 15 to 15; x <- 12 to 17) yield w(x, y)) ++
(for(y <- 11 to 17; x <- 10 to 10) yield w(x, y)) ++
(for(y <- 13 to 13; x <- 2 to 5) yield w(x, y)) ++
(for(y <- 15 to 15; x <- 1 to 3) yield w(x, y)) ++
(for(y <- 15 to 15; x <- 26 to 28) yield w(x, y)) ++
(for(y <- 13 to 13; x <- 24 to 27) yield w(x, y)) ++
(for(y <- 15 to 17; x <- 7 to 8) yield w(x, y)) ++
(for(y <- 15 to 17; x <- 5 to 5) yield w(x, y)) ++
(for(y <- 15 to 17; x <- 24 to 24) yield w(x, y)) ++
(for(y <- 17 to 17; x <- 2 to 4) yield w(x, y)) ++
(for(y <- 17 to 17; x <- 25 to 27) yield w(x, y)) ++
(for(y <- 15 to 17; x <- 21 to 22) yield w(x, y))
}
*/
/*
XXXXXXXXXXXXXXXXXXX.
X X X.
X XX XXX X XXX XX X.
X XX XXX X XXX XX X.
X X.
X XX X XXXXX X XX X.
X X X X X.
X XX XXX X XXX XXXX.
X XX X X XXXX.
XXXXX .
XXXX X X XX X.
XXXX X XXXXX X XX X.
X X X.
X XX XXX X XXX XX X.
X X X X.
XX X X XXXXX X X XX.
X X X X X.
X XX XXX X XXX XX X.
X X.
XXXXXXXXXXXXXXXXXXX""".split(".\n").tail
*/
val maze: Set[Wall] = {
val lines =""".
XXXXXXXXXXXXXXXXXXX.
X X.
X RRR RRR RRR R X.
X R RNR R R X.
X R RRR R R X.
X RRR R RRR R X.
X R R R R X.
X R R R R X.
X RRR R R RRR X.
.
XXXX X X XX X.
XXXX X XXXXX X XX X.
X X X.
X XX XXX X XXX XX X.
X X X X.
XX X X XXXXX X X XX.
X X X X X.
X XX XXX X XXX XX X.
X X.
XXXXXXXXXXXXXXXXXXX""".split(".\n").tail
Set() ++ { for ((line, y) <- lines.zipWithIndex; (char, x) <- line.zipWithIndex if (char == 'X' || char == 'R' || char == 'N'))
yield (Wall(new BlockPosition(x, y), char match { case 'R' => RedWall; case 'X' => BlueWall; case 'N' => NoWall }))
}
}
val mazeWalls = maze filter(w => w.tpe != NoWall)
def points: Set[Thingy] = {
import scala.util.Random.nextInt
val wallsPos = Set[Position]() ++ maze.map(w => w.pos)
collection.immutable.ListSet[Thingy]() ++
(for (x <- 0 until Settings.hBlocks; y <- 0 until Settings.vBlocks if !(wallsPos contains BlockPosition(x, y))) yield {
if (nextInt(100) < Settings.superPointsRatio) {
SuperPoint(new BlockPosition(x, y))
} else {
NormalPoint(new BlockPosition(x, y))
}
})
}
}
}
| lrytz/pacman | src/main/scala/epfl/pacman/maze/Models.scala | Scala | bsd-2-clause | 11,501 |
/*
* Copyright (C) 2017 HAT Data Exchange Ltd
* SPDX-License-Identifier: AGPL-3.0
*
* This file is part of the Hub of All Things project (HAT).
*
* HAT is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation, version 3 of
* the License.
*
* HAT is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General
* Public License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*
* Written by Andrius Aucinas <[email protected]>
* 11 / 2017
*/
package org.hatdex.hat.she.functions
import org.hatdex.hat.api.models.FilterOperator.Between
import org.hatdex.hat.she.mappers._
import org.joda.time.{ DateTime, DateTimeUtils }
import org.specs2.mock.Mockito
import org.specs2.specification.BeforeAfterAll
import play.api.Logger
import play.api.test.PlaySpecification
import scala.concurrent.Await
import scala.concurrent.duration._
class DataFeedDirectMapperSpec extends PlaySpecification with Mockito with DataFeedDirectMapperContext with BeforeAfterAll {
val logger = Logger(this.getClass)
def beforeAll: Unit = {
DateTimeUtils.setCurrentMillisFixed(1514764800000L)
Await.result(databaseReady, 60.seconds)
}
def afterAll: Unit = {
DateTimeUtils.setCurrentMillisSystem()
}
override def before(): Unit = {
import org.hatdex.hat.dal.Tables._
import org.hatdex.libs.dal.HATPostgresProfile.api._
val endpointRecrodsQuery = DataJson.filter(d => d.source.like("test%") ||
d.source.like("rumpel%") ||
d.source.like("twitter%") ||
d.source.like("facebook%") ||
d.source.like("fitbit%") ||
d.source.like("calendar%")).map(_.recordId)
val action = DBIO.seq(
DataDebitBundle.filter(_.bundleId.like("test%")).delete,
DataDebitContract.filter(_.dataDebitKey.like("test%")).delete,
DataCombinators.filter(_.combinatorId.like("test%")).delete,
DataBundles.filter(_.bundleId.like("test%")).delete,
DataJsonGroupRecords.filter(_.recordId in endpointRecrodsQuery).delete,
DataJsonGroups.filterNot(g => g.groupId in DataJsonGroupRecords.map(_.groupId)).delete,
DataJson.filter(r => r.recordId in endpointRecrodsQuery).delete)
Await.result(hatDatabase.run(action), 60.seconds)
}
"The `mapGoogleCalendarEvent` method" should {
"translate google calendar event with timezone information" in {
val mapper = new GoogleCalendarMapper()
val transformed = mapper.mapDataRecord(googleCalendarEvent.recordId.get, googleCalendarEvent.data).get
transformed.source must be equalTo "google"
transformed.types must contain("event")
transformed.title.get.text must contain("MadHATTERs Tea Party: The Boston Party")
transformed.title.get.subtitle.get must contain("12 December 18:30 - 22:30 America/New_York")
transformed.content.get.text.get must contain("personal data, user accounts, security and value")
}
"remove html tags from google calendar event description" in {
val mapper = new GoogleCalendarMapper()
val transformed = mapper.mapDataRecord(googleCalendarEventHtml.recordId.get, googleCalendarEventHtml.data).get
transformed.source must be equalTo "google"
transformed.types must contain("event")
transformed.title.get.text must contain("MadHATTERs Tea Party: The Boston Party")
transformed.title.get.subtitle.get must contain("12 December 18:30 - 22:30 America/New_York")
transformed.content.get.text.get must contain("BD call")
transformed.content.get.text.get must not contain ("<br>")
transformed.content.get.text.get must not contain (" ")
transformed.content.get.text.get must not contain ("</a>")
}
"translate google calendar full-day event" in {
val mapper = new GoogleCalendarMapper()
val transformed = mapper.mapDataRecord(googleCalendarFullDayEvent.recordId.get, googleCalendarFullDayEvent.data).get
transformed.source must be equalTo "google"
transformed.types must contain("event")
transformed.content.get.text must beNone
}
}
// TODO: updated tweet with retweet structure
"The `mapTweet` method" should {
"translate twitter retweets" in {
val mapper = new TwitterFeedMapper()
val transformed = mapper.mapDataRecord(exampleTweetRetweet.recordId.get, exampleTweetRetweet.data).get
transformed.source must be equalTo "twitter"
transformed.types must contain("post")
transformed.title.get.text must contain("You retweeted")
transformed.content.get.text.get must contain("RT @jupenur: Oh shit Adobe https://t.co/7rDL3LWVVz")
transformed.location.get.geo.get.longitude must be equalTo -75.14310264
transformed.location.get.address.get.country.get must be equalTo "United States"
transformed.location.get.address.get.city.get must be equalTo "Washington"
}
// TODO: update tweet with reply structure
"translate twitter replies" in {
val mapper = new TwitterFeedMapper()
val transformed = mapper.mapDataRecord(exampleTweetMentions.recordId.get, exampleTweetMentions.data).get
transformed.source must be equalTo "twitter"
transformed.title.get.text must contain("You replied to @drgeep")
}
"translate minimal tweet structure correctly" in {
val mapper = new TwitterFeedMapper()
val transformed = mapper.mapDataRecord(exampleTweetMinimalFields.recordId.get, exampleTweetMinimalFields.data).get
transformed.source must be equalTo "twitter"
transformed.content.get.text.get must contain("Tweet from Portugal.")
}
}
"The `InstagramMediaMapper` class" should {
"translate single image posts using v1 API" in {
val mapper = new InstagramMediaMapper()
val transformed = mapper.mapDataRecord(exampleInstagramImagev1.recordId.get, exampleInstagramImagev1.data).get
transformed.source must be equalTo "instagram"
transformed.title.get.text must contain("You posted")
transformed.title.get.action.get must be equalTo "image"
transformed.content.get.text.get must contain("Saturday breakfast magic")
transformed.content.get.media.get.length must be equalTo 1
transformed.content.get.media.get.head.url.get must be startingWith "https://scontent.cdninstagram.com/vp"
}
"translate multiple image carousel posts using v1 API" in {
val mapper = new InstagramMediaMapper()
val transformed = mapper.mapDataRecord(exampleMultipleInstagramImages.recordId.get, exampleMultipleInstagramImages.data).get
transformed.source must be equalTo "instagram"
transformed.title.get.text must contain("You posted")
transformed.title.get.action.get must be equalTo "carousel"
transformed.content.get.text.get must contain("The beauty of Richmond park...")
transformed.content.get.media.get.length must be equalTo 3
transformed.content.get.media.get.head.url.get must be startingWith "https://scontent.cdninstagram.com/vp"
}
"translate single image posts using v2 API" in {
val mapper = new InstagramMediaMapper()
val transformed = mapper.mapDataRecord(exampleInstagramImagev2.recordId.get, exampleInstagramImagev2.data).get
transformed.source must be equalTo "instagram"
transformed.title.get.text must contain("You posted")
transformed.title.get.action.get must be equalTo "image"
transformed.content.get.text.get must contain("Saturday breakfast magic")
transformed.content.get.media.get.length must be equalTo 1
transformed.content.get.media.get.head.url.get must be startingWith "https://scontent.xx.fbcdn.net/v/"
}
"create data queries using correct unix timestamp format" in {
val mapper = new InstagramMediaMapper()
val fromDate = new DateTime("2018-05-01T09:00:00Z")
val untilDate = fromDate.plusDays(1)
val propertyQuery = mapper.dataQueries(Some(fromDate), Some(untilDate))
propertyQuery.head.orderBy.get must be equalTo "ds_created_time"
propertyQuery.head.endpoints.head.endpoint must be equalTo "instagram/feed"
propertyQuery.head.endpoints.head.filters.get.head.operator.asInstanceOf[Between].lower.as[String] must be equalTo "1525165200"
propertyQuery.head.endpoints.head.filters.get.head.operator.asInstanceOf[Between].upper.as[String] must be equalTo "1525251600"
}
}
"The `mapFacebookPost` method" should {
"translate facebook photo posts" in {
val mapper = new FacebookFeedMapper()
val transformed = mapper.mapDataRecord(exampleFacebookPhotoPost.recordId.get, exampleFacebookPhotoPost.data).get
transformed.source must be equalTo "facebook"
transformed.title.get.text must be equalTo "You posted a photo"
transformed.content.get.media.get.head.url.get must be startingWith "https://scontent.xx.fbcdn.net"
}
"translate facebook replies" in {
val mapper = new FacebookFeedMapper()
val transformed = mapper.mapDataRecord(exampleFacebookPost.recordId.get, exampleFacebookPost.data).get
transformed.source must be equalTo "facebook"
transformed.title.get.text must be equalTo "You posted"
transformed.content.get.text.get must be startingWith "jetlag wouldn't be so bad if not for Aileen signing (whistling?) out the window overnight..."
}
"translate facebook stories" in {
val mapper = new FacebookFeedMapper()
val transformed = mapper.mapDataRecord(facebookStory.recordId.get, facebookStory.data).get
transformed.source must be equalTo "facebook"
transformed.title.get.text must be equalTo "You shared a story"
transformed.content.get.text.get must be startingWith "Guilty. Though works for startups too."
transformed.content.get.text.get must contain("http://phdcomics.com/comics.php?f=1969")
}
}
"The `mapFacebookEvent` method" should {
"translate facebook events with location" in {
val mapper = new FacebookEventMapper()
val transformed = mapper.mapDataRecord(facebookEvent.recordId.get, facebookEvent.data).get
transformed.source must be equalTo "facebook"
transformed.types must contain("event")
transformed.title.get.text must be equalTo "You are attending an event"
transformed.content.get.text.get must contain("We're going somewhere new")
transformed.location.get.address.get.city.get must be equalTo "Singapore"
transformed.location.get.address.get.name.get must be equalTo "Carlton Hotel Singapore"
}
"translate facebook events without location" in {
val mapper = new FacebookEventMapper()
val transformed = mapper.mapDataRecord(facebookEvenNoLocation.recordId.get, facebookEvenNoLocation.data).get
transformed.source must be equalTo "facebook"
transformed.types must contain("event")
transformed.title.get.text must be equalTo "You are attending an event"
transformed.content.get.text.get must contain("privacy, security, access rights, regulation")
transformed.location must beNone
}
"translate facebook events with incomplete location" in {
val mapper = new FacebookEventMapper()
val transformed = mapper.mapDataRecord(facebookEvenPartialLocation.recordId.get, facebookEvenPartialLocation.data).get
transformed.source must be equalTo "facebook"
transformed.types must contain("event")
transformed.title.get.text must be equalTo "You are attending an event"
transformed.content.get.text.get must contain("privacy, security, access rights, regulation")
transformed.location must beNone
}
}
"The `mapFitbitWeight` method" should {
"translate fitbit weight" in {
val mapper = new FitbitWeightMapper()
val transformed = mapper.mapDataRecord(fitbitWeightMeasurement.recordId.get, fitbitWeightMeasurement.data).get
transformed.source must be equalTo "fitbit"
transformed.types must contain("fitness", "weight")
transformed.title.get.text must be equalTo "You added a new weight measurement"
transformed.content.get.text.get must contain("94.8")
transformed.content.get.text.get must contain("25.46")
transformed.content.get.text.get must contain("21.5")
}
}
"The `mapFitbitSleep` method" should {
"translate fitbit sleep" in {
val mapper = new FitbitSleepMapper()
val transformed = mapper.mapDataRecord(fitbitSleepMeasurement.recordId.get, fitbitSleepMeasurement.data).get
transformed.source must be equalTo "fitbit"
transformed.types must contain("fitness", "sleep")
transformed.title.get.text must contain("You woke up")
transformed.content.get.text.get must contain("You spent 8 hours and 4 minutes in bed.")
transformed.content.get.text.get must contain("You slept for 7 hours and 20 minutes ")
transformed.content.get.text.get must contain("and were awake for 44 minutes")
}
}
"The `mapFitbitActivity` method" should {
"translate fitbit activity" in {
val mapper = new FitbitActivityMapper()
val transformed = mapper.mapDataRecord(fitbitActivity.recordId.get, fitbitActivity.data).get
transformed.source must be equalTo "fitbit"
transformed.types must contain("fitness")
transformed.title.get.text must contain("You logged Fitbit activity")
transformed.content.get.text.get must contain("Activity: Walk")
transformed.content.get.text.get must contain("Duration: 17 minutes")
transformed.content.get.text.get must contain("Average heart rate: 94")
transformed.content.get.text.get must contain("Calories burned: 126")
}
}
"The `mapFitbitDaySummarySteps` method" should {
"not generate a feed item for days with 0 steps recorded" in {
val mapper = new FitbitActivityDaySummaryMapper()
val transformed = mapper.mapDataRecord(fitbitDayEmptySummary.recordId.get, fitbitDayEmptySummary.data)
transformed must beAFailedTry
}
"translate fitbit day summary to steps" in {
val mapper = new FitbitActivityDaySummaryMapper()
val transformed = mapper.mapDataRecord(fitbitDaySummary.recordId.get, fitbitDaySummary.data).get
transformed.source must be equalTo "fitbit"
transformed.types must contain("fitness")
transformed.title.get.text must contain("You walked 12135 steps")
transformed.content must beNone
}
}
}
| Hub-of-all-Things/HAT2.0 | hat/test/org/hatdex/hat/she/functions/DataFeedDirectMapperSpec.scala | Scala | agpl-3.0 | 14,695 |
package com.github.dakatsuka.akka.http.oauth2.client
import akka.actor.ActorSystem
import akka.http.scaladsl.model.{ HttpEntity, HttpResponse, StatusCodes }
import akka.http.scaladsl.model.ContentTypes.`application/json`
import akka.stream.{ ActorMaterializer, Materializer }
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.time.{ Millis, Seconds, Span }
import org.scalatest.{ BeforeAndAfterAll, DiagrammedAssertions, FlatSpec }
import scala.concurrent.{ Await, ExecutionContext }
import scala.concurrent.duration.Duration
class AccessTokenSpec extends FlatSpec with DiagrammedAssertions with ScalaFutures with BeforeAndAfterAll {
implicit val system: ActorSystem = ActorSystem()
implicit val ec: ExecutionContext = system.dispatcher
implicit val materializer: Materializer = ActorMaterializer()
implicit val defaultPatience: PatienceConfig =
PatienceConfig(timeout = Span(5, Seconds), interval = Span(700, Millis))
override def afterAll(): Unit = {
Await.ready(system.terminate(), Duration.Inf)
}
behavior of "AccessToken"
it should "apply from HttpResponse" in {
val accessToken = "xxx"
val tokenType = "bearer"
val expiresIn = 86400
val refreshToken = "yyy"
val httpResponse = HttpResponse(
status = StatusCodes.OK,
headers = Nil,
entity = HttpEntity(
`application/json`,
s"""
|{
| "access_token": "$accessToken",
| "token_type": "$tokenType",
| "expires_in": $expiresIn,
| "refresh_token": "$refreshToken"
|}
""".stripMargin
)
)
val result = AccessToken(httpResponse)
whenReady(result) { token =>
assert(token.accessToken == accessToken)
assert(token.tokenType == tokenType)
assert(token.expiresIn == expiresIn)
assert(token.refreshToken.contains(refreshToken))
}
}
}
| dakatsuka/akka-http-oauth2-client | src/test/scala/com/github/dakatsuka/akka/http/oauth2/client/AccessTokenSpec.scala | Scala | apache-2.0 | 1,925 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.util.{Locale, TimeZone}
import scala.reflect.ClassTag
import org.apache.log4j.Level
import org.scalatest.Matchers
import org.apache.spark.api.python.PythonEvalType
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType, InMemoryCatalog, SessionCatalog}
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.errors.TreeNodeException
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.aggregate.{AggregateExpression, Count, Sum}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
import org.apache.spark.sql.catalyst.plans.{Cross, Inner}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.physical.{HashPartitioning, Partitioning, RangePartitioning, RoundRobinPartitioning}
import org.apache.spark.sql.catalyst.rules.RuleExecutor
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
class AnalysisSuite extends AnalysisTest with Matchers {
import org.apache.spark.sql.catalyst.analysis.TestRelations._
test("union project *") {
val plan = (1 to 120)
.map(_ => testRelation)
.fold[LogicalPlan](testRelation) { (a, b) =>
a.select(UnresolvedStar(None)).select($"a").union(b.select(UnresolvedStar(None)))
}
assertAnalysisSuccess(plan)
}
test("check project's resolved") {
assert(Project(testRelation.output, testRelation).resolved)
assert(!Project(Seq(UnresolvedAttribute("a")), testRelation).resolved)
val explode = Explode(AttributeReference("a", IntegerType, nullable = true)())
assert(!Project(Seq(Alias(explode, "explode")()), testRelation).resolved)
assert(!Project(Seq(Alias(count(Literal(1)), "count")()), testRelation).resolved)
}
test("analyze project") {
checkAnalysis(
Project(Seq(UnresolvedAttribute("a")), testRelation),
Project(testRelation.output, testRelation))
checkAnalysis(
Project(Seq(UnresolvedAttribute("TbL.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Project(testRelation.output, testRelation))
assertAnalysisError(
Project(Seq(UnresolvedAttribute("tBl.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Seq("cannot resolve"))
checkAnalysis(
Project(Seq(UnresolvedAttribute("TbL.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Project(testRelation.output, testRelation),
caseSensitive = false)
checkAnalysis(
Project(Seq(UnresolvedAttribute("tBl.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Project(testRelation.output, testRelation),
caseSensitive = false)
}
test("resolve sort references - filter/limit") {
val a = testRelation2.output(0)
val b = testRelation2.output(1)
val c = testRelation2.output(2)
// Case 1: one missing attribute is in the leaf node and another is in the unary node
val plan1 = testRelation2
.where($"a" > "str").select($"a", $"b")
.where($"b" > "str").select($"a")
.sortBy($"b".asc, $"c".desc)
val expected1 = testRelation2
.where(a > "str").select(a, b, c)
.where(b > "str").select(a, b, c)
.sortBy(b.asc, c.desc)
.select(a)
checkAnalysis(plan1, expected1)
// Case 2: all the missing attributes are in the leaf node
val plan2 = testRelation2
.where($"a" > "str").select($"a")
.where($"a" > "str").select($"a")
.sortBy($"b".asc, $"c".desc)
val expected2 = testRelation2
.where(a > "str").select(a, b, c)
.where(a > "str").select(a, b, c)
.sortBy(b.asc, c.desc)
.select(a)
checkAnalysis(plan2, expected2)
}
test("resolve sort references - join") {
val a = testRelation2.output(0)
val b = testRelation2.output(1)
val c = testRelation2.output(2)
val h = testRelation3.output(3)
// Case: join itself can resolve all the missing attributes
val plan = testRelation2.join(testRelation3)
.where($"a" > "str").select($"a", $"b")
.sortBy($"c".desc, $"h".asc)
val expected = testRelation2.join(testRelation3)
.where(a > "str").select(a, b, c, h)
.sortBy(c.desc, h.asc)
.select(a, b)
checkAnalysis(plan, expected)
}
test("resolve sort references - aggregate") {
val a = testRelation2.output(0)
val b = testRelation2.output(1)
val c = testRelation2.output(2)
val alias_a3 = count(a).as("a3")
val alias_b = b.as("aggOrder")
// Case 1: when the child of Sort is not Aggregate,
// the sort reference is handled by the rule ResolveSortReferences
val plan1 = testRelation2
.groupBy($"a", $"c", $"b")($"a", $"c", count($"a").as("a3"))
.select($"a", $"c", $"a3")
.orderBy($"b".asc)
val expected1 = testRelation2
.groupBy(a, c, b)(a, c, alias_a3, b)
.select(a, c, alias_a3.toAttribute, b)
.orderBy(b.asc)
.select(a, c, alias_a3.toAttribute)
checkAnalysis(plan1, expected1)
// Case 2: when the child of Sort is Aggregate,
// the sort reference is handled by the rule ResolveAggregateFunctions
val plan2 = testRelation2
.groupBy($"a", $"c", $"b")($"a", $"c", count($"a").as("a3"))
.orderBy($"b".asc)
val expected2 = testRelation2
.groupBy(a, c, b)(a, c, alias_a3, alias_b)
.orderBy(alias_b.toAttribute.asc)
.select(a, c, alias_a3.toAttribute)
checkAnalysis(plan2, expected2)
}
test("resolve relations") {
assertAnalysisError(UnresolvedRelation(TableIdentifier("tAbLe")), Seq())
checkAnalysis(UnresolvedRelation(TableIdentifier("TaBlE")), testRelation)
checkAnalysis(
UnresolvedRelation(TableIdentifier("tAbLe")), testRelation, caseSensitive = false)
checkAnalysis(
UnresolvedRelation(TableIdentifier("TaBlE")), testRelation, caseSensitive = false)
}
test("divide should be casted into fractional types") {
val plan = caseInsensitiveAnalyzer.execute(
testRelation2.select(
$"a" / Literal(2) as "div1",
$"a" / $"b" as "div2",
$"a" / $"c" as "div3",
$"a" / $"d" as "div4",
$"e" / $"e" as "div5"))
val pl = plan.asInstanceOf[Project].projectList
assert(pl(0).dataType == DoubleType)
assert(pl(1).dataType == DoubleType)
assert(pl(2).dataType == DoubleType)
assert(pl(3).dataType == DoubleType)
assert(pl(4).dataType == DoubleType)
}
test("pull out nondeterministic expressions from RepartitionByExpression") {
val plan = RepartitionByExpression(Seq(Rand(33)), testRelation, numPartitions = 10)
val projected = Alias(Rand(33), "_nondeterministic")()
val expected =
Project(testRelation.output,
RepartitionByExpression(Seq(projected.toAttribute),
Project(testRelation.output :+ projected, testRelation),
numPartitions = 10))
checkAnalysis(plan, expected)
}
test("pull out nondeterministic expressions from Sort") {
val plan = Sort(Seq(SortOrder(Rand(33), Ascending)), false, testRelation)
val projected = Alias(Rand(33), "_nondeterministic")()
val expected =
Project(testRelation.output,
Sort(Seq(SortOrder(projected.toAttribute, Ascending)), false,
Project(testRelation.output :+ projected, testRelation)))
checkAnalysis(plan, expected)
}
test("SPARK-9634: cleanup unnecessary Aliases in LogicalPlan") {
val a = testRelation.output.head
var plan = testRelation.select(((a + 1).as("a+1") + 2).as("col"))
var expected = testRelation.select((a + 1 + 2).as("col"))
checkAnalysis(plan, expected)
plan = testRelation.groupBy(a.as("a1").as("a2"))((min(a).as("min_a") + 1).as("col"))
expected = testRelation.groupBy(a)((min(a) + 1).as("col"))
checkAnalysis(plan, expected)
// CreateStruct is a special case that we should not trim Alias for it.
plan = testRelation.select(CreateStruct(Seq(a, (a + 1).as("a+1"))).as("col"))
expected = testRelation.select(CreateNamedStruct(Seq(
Literal(a.name), a,
Literal("a+1"), (a + 1))).as("col"))
checkAnalysis(plan, expected)
}
test("Analysis may leave unnecessary aliases") {
val att1 = testRelation.output.head
var plan = testRelation.select(
CreateStruct(Seq(att1, ((att1.as("aa")) + 1).as("a_plus_1"))).as("col"),
att1
)
val prevPlan = getAnalyzer(true).execute(plan)
plan = prevPlan.select(CreateArray(Seq(
CreateStruct(Seq(att1, (att1 + 1).as("a_plus_1"))).as("col1"),
/** alias should be eliminated by [[CleanupAliases]] */
"col".attr.as("col2")
)).as("arr"))
plan = getAnalyzer(true).execute(plan)
val expectedPlan = prevPlan.select(
CreateArray(Seq(
CreateNamedStruct(Seq(
Literal(att1.name), att1,
Literal("a_plus_1"), (att1 + 1))),
Symbol("col").struct(prevPlan.output(0).dataType.asInstanceOf[StructType]).notNull
)).as("arr")
)
checkAnalysis(plan, expectedPlan)
}
test("SPARK-10534: resolve attribute references in order by clause") {
val a = testRelation2.output(0)
val c = testRelation2.output(2)
val plan = testRelation2.select($"c").orderBy(Floor($"a").asc)
val expected = testRelation2.select(c, a)
.orderBy(Floor(Cast(a, DoubleType, Option(TimeZone.getDefault().getID))).asc).select(c)
checkAnalysis(plan, expected)
}
test("self intersect should resolve duplicate expression IDs") {
val plan = testRelation.intersect(testRelation, isAll = false)
assertAnalysisSuccess(plan)
}
test("SPARK-8654: invalid CAST in NULL IN(...) expression") {
val plan = Project(Alias(In(Literal(null), Seq(Literal(1), Literal(2))), "a")() :: Nil,
LocalRelation()
)
assertAnalysisSuccess(plan)
}
test("SPARK-8654: different types in inlist but can be converted to a common type") {
val plan = Project(Alias(In(Literal(null), Seq(Literal(1), Literal(1.2345))), "a")() :: Nil,
LocalRelation()
)
assertAnalysisSuccess(plan)
}
test("SPARK-8654: check type compatibility error") {
val plan = Project(Alias(In(Literal(null), Seq(Literal(true), Literal(1))), "a")() :: Nil,
LocalRelation()
)
assertAnalysisError(plan, Seq("data type mismatch: Arguments must be same type"))
}
test("SPARK-11725: correctly handle null inputs for ScalaUDF") {
val testRelation = LocalRelation(
AttributeReference("a", StringType)(),
AttributeReference("b", DoubleType)(),
AttributeReference("c", ShortType)(),
AttributeReference("d", DoubleType, nullable = false)())
val string = testRelation.output(0)
val double = testRelation.output(1)
val short = testRelation.output(2)
val nonNullableDouble = testRelation.output(3)
val nullResult = Literal.create(null, StringType)
def checkUDF(udf: Expression, transformed: Expression): Unit = {
checkAnalysis(
Project(Alias(udf, "")() :: Nil, testRelation),
Project(Alias(transformed, "")() :: Nil, testRelation)
)
}
// non-primitive parameters do not need special null handling
val udf1 = ScalaUDF((s: String) => "x", StringType, string :: Nil, false :: Nil)
val expected1 = udf1
checkUDF(udf1, expected1)
// only primitive parameter needs special null handling
val udf2 = ScalaUDF((s: String, d: Double) => "x", StringType, string :: double :: Nil,
false :: true :: Nil)
val expected2 =
If(IsNull(double), nullResult, udf2.copy(children = string :: KnownNotNull(double) :: Nil))
checkUDF(udf2, expected2)
// special null handling should apply to all primitive parameters
val udf3 = ScalaUDF((s: Short, d: Double) => "x", StringType, short :: double :: Nil,
true :: true :: Nil)
val expected3 = If(
IsNull(short) || IsNull(double),
nullResult,
udf3.copy(children = KnownNotNull(short) :: KnownNotNull(double) :: Nil))
checkUDF(udf3, expected3)
// we can skip special null handling for primitive parameters that are not nullable
val udf4 = ScalaUDF(
(s: Short, d: Double) => "x",
StringType,
short :: nonNullableDouble :: Nil,
true :: true :: Nil)
val expected4 = If(
IsNull(short),
nullResult,
udf4.copy(children = KnownNotNull(short) :: nonNullableDouble :: Nil))
checkUDF(udf4, expected4)
}
test("SPARK-24891 Fix HandleNullInputsForUDF rule") {
val a = testRelation.output(0)
val func = (x: Int, y: Int) => x + y
val udf1 = ScalaUDF(func, IntegerType, a :: a :: Nil, false :: false :: Nil)
val udf2 = ScalaUDF(func, IntegerType, a :: udf1 :: Nil, false :: false :: Nil)
val plan = Project(Alias(udf2, "")() :: Nil, testRelation)
comparePlans(plan.analyze, plan.analyze.analyze)
}
test("SPARK-11863 mixture of aliases and real columns in order by clause - tpcds 19,55,71") {
val a = testRelation2.output(0)
val c = testRelation2.output(2)
val alias1 = a.as("a1")
val alias2 = c.as("a2")
val alias3 = count(a).as("a3")
val plan = testRelation2
.groupBy($"a", $"c")($"a".as("a1"), $"c".as("a2"), count($"a").as("a3"))
.orderBy($"a1".asc, $"c".asc)
val expected = testRelation2
.groupBy(a, c)(alias1, alias2, alias3)
.orderBy(alias1.toAttribute.asc, alias2.toAttribute.asc)
.select(alias1.toAttribute, alias2.toAttribute, alias3.toAttribute)
checkAnalysis(plan, expected)
}
test("Eliminate the unnecessary union") {
val plan = Union(testRelation :: Nil)
val expected = testRelation
checkAnalysis(plan, expected)
}
test("SPARK-12102: Ignore nullablity when comparing two sides of case") {
val relation = LocalRelation(Symbol("a").struct(Symbol("x").int),
Symbol("b").struct(Symbol("x").int.withNullability(false)))
val plan = relation.select(
CaseWhen(Seq((Literal(true), Symbol("a").attr)), Symbol("b")).as("val"))
assertAnalysisSuccess(plan)
}
test("Keep attribute qualifiers after dedup") {
val input = LocalRelation(Symbol("key").int, Symbol("value").string)
val query =
Project(Seq($"x.key", $"y.key"),
Join(
Project(Seq($"x.key"), SubqueryAlias("x", input)),
Project(Seq($"y.key"), SubqueryAlias("y", input)),
Cross, None, JoinHint.NONE))
assertAnalysisSuccess(query)
}
private def assertExpressionType(
expression: Expression,
expectedDataType: DataType): Unit = {
val afterAnalyze =
Project(Seq(Alias(expression, "a")()), OneRowRelation()).analyze.expressions.head
if (!afterAnalyze.dataType.equals(expectedDataType)) {
fail(
s"""
|data type of expression $expression doesn't match expected:
|Actual data type:
|${afterAnalyze.dataType}
|
|Expected data type:
|${expectedDataType}
""".stripMargin)
}
}
test("SPARK-15776: test whether Divide expression's data type can be deduced correctly by " +
"analyzer") {
assertExpressionType(sum(Divide(1, 2)), DoubleType)
assertExpressionType(sum(Divide(1.0, 2)), DoubleType)
assertExpressionType(sum(Divide(1, 2.0)), DoubleType)
assertExpressionType(sum(Divide(1.0, 2.0)), DoubleType)
assertExpressionType(sum(Divide(1, 2.0f)), DoubleType)
assertExpressionType(sum(Divide(1.0f, 2)), DoubleType)
assertExpressionType(sum(Divide(1, Decimal(2))), DecimalType(22, 11))
assertExpressionType(sum(Divide(Decimal(1), 2)), DecimalType(26, 6))
assertExpressionType(sum(Divide(Decimal(1), 2.0)), DoubleType)
assertExpressionType(sum(Divide(1.0, Decimal(2.0))), DoubleType)
}
test("SPARK-18058: union and set operations shall not care about the nullability" +
" when comparing column types") {
val firstTable = LocalRelation(
AttributeReference("a",
StructType(Seq(StructField("a", IntegerType, nullable = true))), nullable = false)())
val secondTable = LocalRelation(
AttributeReference("a",
StructType(Seq(StructField("a", IntegerType, nullable = false))), nullable = false)())
val unionPlan = Union(firstTable, secondTable)
assertAnalysisSuccess(unionPlan)
val r1 = Except(firstTable, secondTable, isAll = false)
val r2 = Intersect(firstTable, secondTable, isAll = false)
assertAnalysisSuccess(r1)
assertAnalysisSuccess(r2)
}
test("resolve as with an already existed alias") {
checkAnalysis(
Project(Seq(UnresolvedAttribute("tbl2.a")),
SubqueryAlias("tbl", testRelation).as("tbl2")),
Project(testRelation.output, testRelation),
caseSensitive = false)
checkAnalysis(SubqueryAlias("tbl", testRelation).as("tbl2"), testRelation)
}
test("SPARK-20311 range(N) as alias") {
def rangeWithAliases(args: Seq[Int], outputNames: Seq[String]): LogicalPlan = {
SubqueryAlias("t", UnresolvedTableValuedFunction("range", args.map(Literal(_)), outputNames))
.select(star())
}
assertAnalysisSuccess(rangeWithAliases(3 :: Nil, "a" :: Nil))
assertAnalysisSuccess(rangeWithAliases(1 :: 4 :: Nil, "b" :: Nil))
assertAnalysisSuccess(rangeWithAliases(2 :: 6 :: 2 :: Nil, "c" :: Nil))
assertAnalysisError(
rangeWithAliases(3 :: Nil, "a" :: "b" :: Nil),
Seq("Number of given aliases does not match number of output columns. "
+ "Function name: range; number of aliases: 2; number of output columns: 1."))
}
test("SPARK-20841 Support table column aliases in FROM clause") {
def tableColumnsWithAliases(outputNames: Seq[String]): LogicalPlan = {
UnresolvedSubqueryColumnAliases(
outputNames,
SubqueryAlias("t", UnresolvedRelation(TableIdentifier("TaBlE3")))
).select(star())
}
assertAnalysisSuccess(tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: Nil))
assertAnalysisError(
tableColumnsWithAliases("col1" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 1; number of columns: 4."))
assertAnalysisError(
tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: "col5" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 5; number of columns: 4."))
}
test("SPARK-20962 Support subquery column aliases in FROM clause") {
def tableColumnsWithAliases(outputNames: Seq[String]): LogicalPlan = {
UnresolvedSubqueryColumnAliases(
outputNames,
SubqueryAlias(
"t",
UnresolvedRelation(TableIdentifier("TaBlE3")))
).select(star())
}
assertAnalysisSuccess(tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: Nil))
assertAnalysisError(
tableColumnsWithAliases("col1" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 1; number of columns: 4."))
assertAnalysisError(
tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: "col5" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 5; number of columns: 4."))
}
test("SPARK-20963 Support aliases for join relations in FROM clause") {
def joinRelationWithAliases(outputNames: Seq[String]): LogicalPlan = {
val src1 = LocalRelation(Symbol("id").int, Symbol("v1").string).as("s1")
val src2 = LocalRelation(Symbol("id").int, Symbol("v2").string).as("s2")
UnresolvedSubqueryColumnAliases(
outputNames,
SubqueryAlias(
"dst",
src1.join(src2, Inner, Option(Symbol("s1.id") === Symbol("s2.id"))))
).select(star())
}
assertAnalysisSuccess(joinRelationWithAliases("col1" :: "col2" :: "col3" :: "col4" :: Nil))
assertAnalysisError(
joinRelationWithAliases("col1" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 1; number of columns: 4."))
assertAnalysisError(
joinRelationWithAliases("col1" :: "col2" :: "col3" :: "col4" :: "col5" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 5; number of columns: 4."))
}
test("SPARK-22614 RepartitionByExpression partitioning") {
def checkPartitioning[T <: Partitioning: ClassTag](
numPartitions: Int, exprs: Expression*): Unit = {
val partitioning = RepartitionByExpression(exprs, testRelation2, numPartitions).partitioning
val clazz = implicitly[ClassTag[T]].runtimeClass
assert(clazz.isInstance(partitioning))
}
checkPartitioning[HashPartitioning](numPartitions = 10, exprs = Literal(20))
checkPartitioning[HashPartitioning](numPartitions = 10,
exprs = Symbol("a").attr, Symbol("b").attr)
checkPartitioning[RangePartitioning](numPartitions = 10,
exprs = SortOrder(Literal(10), Ascending))
checkPartitioning[RangePartitioning](numPartitions = 10,
exprs = SortOrder(Symbol("a").attr, Ascending), SortOrder(Symbol("b").attr, Descending))
checkPartitioning[RoundRobinPartitioning](numPartitions = 10, exprs = Seq.empty: _*)
intercept[IllegalArgumentException] {
checkPartitioning(numPartitions = 0, exprs = Literal(20))
}
intercept[IllegalArgumentException] {
checkPartitioning(numPartitions = -1, exprs = Literal(20))
}
intercept[IllegalArgumentException] {
checkPartitioning(numPartitions = 10, exprs =
SortOrder(Symbol("a").attr, Ascending), Symbol("b").attr)
}
}
test("SPARK-24208: analysis fails on self-join with FlatMapGroupsInPandas") {
val pythonUdf = PythonUDF("pyUDF", null,
StructType(Seq(StructField("a", LongType))),
Seq.empty,
PythonEvalType.SQL_GROUPED_MAP_PANDAS_UDF,
true)
val output = pythonUdf.dataType.asInstanceOf[StructType].toAttributes
val project = Project(Seq(UnresolvedAttribute("a")), testRelation)
val flatMapGroupsInPandas = FlatMapGroupsInPandas(
Seq(UnresolvedAttribute("a")), pythonUdf, output, project)
val left = SubqueryAlias("temp0", flatMapGroupsInPandas)
val right = SubqueryAlias("temp1", flatMapGroupsInPandas)
val join = Join(left, right, Inner, None, JoinHint.NONE)
assertAnalysisSuccess(
Project(Seq(UnresolvedAttribute("temp0.a"), UnresolvedAttribute("temp1.a")), join))
}
test("SPARK-24488 Generator with multiple aliases") {
assertAnalysisSuccess(
listRelation.select(Explode($"list").as("first_alias").as("second_alias")))
assertAnalysisSuccess(
listRelation.select(MultiAlias(MultiAlias(
PosExplode($"list"), Seq("first_pos", "first_val")), Seq("second_pos", "second_val"))))
}
test("SPARK-24151: CURRENT_DATE, CURRENT_TIMESTAMP should be case insensitive") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "true") {
val input = Project(Seq(
UnresolvedAttribute("current_date"),
UnresolvedAttribute("CURRENT_DATE"),
UnresolvedAttribute("CURRENT_TIMESTAMP"),
UnresolvedAttribute("current_timestamp")), testRelation)
val expected = Project(Seq(
Alias(CurrentDate(), toPrettySQL(CurrentDate()))(),
Alias(CurrentDate(), toPrettySQL(CurrentDate()))(),
Alias(CurrentTimestamp(), toPrettySQL(CurrentTimestamp()))(),
Alias(CurrentTimestamp(), toPrettySQL(CurrentTimestamp()))()), testRelation).analyze
checkAnalysis(input, expected)
}
}
test("SPARK-25691: AliasViewChild with different nullabilities") {
object ViewAnalyzer extends RuleExecutor[LogicalPlan] {
val batches = Batch("View", Once, EliminateView) :: Nil
}
val relation = LocalRelation(Symbol("a").int.notNull, Symbol("b").string)
val view = View(CatalogTable(
identifier = TableIdentifier("v1"),
tableType = CatalogTableType.VIEW,
storage = CatalogStorageFormat.empty,
schema = StructType(Seq(StructField("a", IntegerType), StructField("b", StringType)))),
output = Seq(Symbol("a").int, Symbol("b").string),
child = relation)
val tz = Option(conf.sessionLocalTimeZone)
val expected = Project(Seq(
Alias(Cast(Symbol("a").int.notNull, IntegerType, tz), "a")(),
Alias(Cast(Symbol("b").string, StringType, tz), "b")()),
relation)
val res = ViewAnalyzer.execute(view)
comparePlans(res, expected)
}
test("CTE with non-existing column alias") {
assertAnalysisError(parsePlan("WITH t(x) AS (SELECT 1) SELECT * FROM t WHERE y = 1"),
Seq("cannot resolve '`y`' given input columns: [x]"))
}
test("CTE with non-matching column alias") {
assertAnalysisError(parsePlan("WITH t(x, y) AS (SELECT 1) SELECT * FROM t WHERE x = 1"),
Seq("Number of column aliases does not match number of columns. Number of column aliases: " +
"2; number of columns: 1."))
}
test("SPARK-28251: Insert into non-existing table error message is user friendly") {
assertAnalysisError(parsePlan("INSERT INTO test VALUES (1)"),
Seq("Table not found: test"))
}
test("check CollectMetrics resolved") {
val a = testRelation.output.head
val sum = Sum(a).toAggregateExpression().as("sum")
val random_sum = Sum(Rand(1L)).toAggregateExpression().as("rand_sum")
val literal = Literal(1).as("lit")
// Ok
assert(CollectMetrics("event", literal :: sum :: random_sum :: Nil, testRelation).resolved)
// Bad name
assert(!CollectMetrics("", sum :: Nil, testRelation).resolved)
assertAnalysisError(CollectMetrics("", sum :: Nil, testRelation),
"observed metrics should be named" :: Nil)
// No columns
assert(!CollectMetrics("evt", Nil, testRelation).resolved)
def checkAnalysisError(exprs: Seq[NamedExpression], errors: String*): Unit = {
assertAnalysisError(CollectMetrics("event", exprs, testRelation), errors)
}
// Unwrapped attribute
checkAnalysisError(
a :: Nil,
"Attribute", "can only be used as an argument to an aggregate function")
// Unwrapped non-deterministic expression
checkAnalysisError(
Rand(10).as("rnd") :: Nil,
"non-deterministic expression", "can only be used as an argument to an aggregate function")
// Distinct aggregate
checkAnalysisError(
Sum(a).toAggregateExpression(isDistinct = true).as("sum") :: Nil,
"distinct aggregates are not allowed in observed metrics, but found")
// Nested aggregate
checkAnalysisError(
Sum(Sum(a).toAggregateExpression()).toAggregateExpression().as("sum") :: Nil,
"nested aggregates are not allowed in observed metrics, but found")
// Windowed aggregate
val windowExpr = WindowExpression(
RowNumber(),
WindowSpecDefinition(Nil, a.asc :: Nil,
SpecifiedWindowFrame(RowFrame, UnboundedPreceding, CurrentRow)))
checkAnalysisError(
windowExpr.as("rn") :: Nil,
"window expressions are not allowed in observed metrics, but found")
}
test("check CollectMetrics duplicates") {
val a = testRelation.output.head
val sum = Sum(a).toAggregateExpression().as("sum")
val count = Count(Literal(1)).toAggregateExpression().as("cnt")
// Same result - duplicate names are allowed
assertAnalysisSuccess(Union(
CollectMetrics("evt1", count :: Nil, testRelation) ::
CollectMetrics("evt1", count :: Nil, testRelation) :: Nil))
// Same children, structurally different metrics - fail
assertAnalysisError(Union(
CollectMetrics("evt1", count :: Nil, testRelation) ::
CollectMetrics("evt1", sum :: Nil, testRelation) :: Nil),
"Multiple definitions of observed metrics" :: "evt1" :: Nil)
// Different children, same metrics - fail
val b = Symbol("b").string
val tblB = LocalRelation(b)
assertAnalysisError(Union(
CollectMetrics("evt1", count :: Nil, testRelation) ::
CollectMetrics("evt1", count :: Nil, tblB) :: Nil),
"Multiple definitions of observed metrics" :: "evt1" :: Nil)
// Subquery different tree - fail
val subquery = Aggregate(Nil, sum :: Nil, CollectMetrics("evt1", count :: Nil, testRelation))
val query = Project(
b :: ScalarSubquery(subquery, Nil).as("sum") :: Nil,
CollectMetrics("evt1", count :: Nil, tblB))
assertAnalysisError(query, "Multiple definitions of observed metrics" :: "evt1" :: Nil)
// Aggregate with filter predicate - fail
val sumWithFilter = sum.transform {
case a: AggregateExpression => a.copy(filter = Some(true))
}.asInstanceOf[NamedExpression]
assertAnalysisError(
CollectMetrics("evt1", sumWithFilter :: Nil, testRelation),
"aggregates with filter predicate are not allowed" :: Nil)
}
test("Analysis exceed max iterations") {
// RuleExecutor only throw exception or log warning when the rule is supposed to run
// more than once.
val maxIterations = 2
val conf = new SQLConf().copy(SQLConf.ANALYZER_MAX_ITERATIONS -> maxIterations)
val testAnalyzer = new Analyzer(
new SessionCatalog(new InMemoryCatalog, FunctionRegistry.builtin, conf), conf)
val plan = testRelation2.select(
$"a" / Literal(2) as "div1",
$"a" / $"b" as "div2",
$"a" / $"c" as "div3",
$"a" / $"d" as "div4",
$"e" / $"e" as "div5")
val message = intercept[TreeNodeException[LogicalPlan]] {
testAnalyzer.execute(plan)
}.getMessage
assert(message.startsWith(s"Max iterations ($maxIterations) reached for batch Resolution, " +
s"please set '${SQLConf.ANALYZER_MAX_ITERATIONS.key}' to a larger value."))
}
test("SPARK-30886 Deprecate two-parameter TRIM/LTRIM/RTRIM") {
Seq("trim", "ltrim", "rtrim").foreach { f =>
val logAppender = new LogAppender("deprecated two-parameter TRIM/LTRIM/RTRIM functions")
def check(count: Int): Unit = {
val message = "Two-parameter TRIM/LTRIM/RTRIM function signatures are deprecated."
assert(logAppender.loggingEvents.size == count)
assert(logAppender.loggingEvents.exists(
e => e.getLevel == Level.WARN &&
e.getRenderedMessage.contains(message)))
}
withLogAppender(logAppender) {
val testAnalyzer1 = new Analyzer(
new SessionCatalog(new InMemoryCatalog, FunctionRegistry.builtin, conf), conf)
val plan1 = testRelation2.select(
UnresolvedFunction(f, $"a" :: Nil, isDistinct = false))
testAnalyzer1.execute(plan1)
// One-parameter is not deprecated.
assert(logAppender.loggingEvents.isEmpty)
val plan2 = testRelation2.select(
UnresolvedFunction(f, $"a" :: $"b" :: Nil, isDistinct = false))
testAnalyzer1.execute(plan2)
// Deprecation warning is printed out once.
check(1)
val plan3 = testRelation2.select(
UnresolvedFunction(f, $"b" :: $"a" :: Nil, isDistinct = false))
testAnalyzer1.execute(plan3)
// There is no change in the log.
check(1)
// New analyzer from new SessionState
val testAnalyzer2 = new Analyzer(
new SessionCatalog(new InMemoryCatalog, FunctionRegistry.builtin, conf), conf)
val plan4 = testRelation2.select(
UnresolvedFunction(f, $"c" :: $"d" :: Nil, isDistinct = false))
testAnalyzer2.execute(plan4)
// Additional deprecation warning from new analyzer
check(2)
val plan5 = testRelation2.select(
UnresolvedFunction(f, $"c" :: $"d" :: Nil, isDistinct = false))
testAnalyzer2.execute(plan5)
// There is no change in the log.
check(2)
}
}
}
}
| goldmedal/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala | Scala | apache-2.0 | 32,799 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.migration
import java.sql.Timestamp
import scala.xml.NodeSeq
import java.sql.PreparedStatement
import java.sql.Connection
import scala.xml.Elem
/*
* Note: remember to use plain text here, as
* a failure on that test must rise wondering about
* a global value modification that was not taken
* into account in a migration.
*/
object MigrationTestLog {
//get a default TimeStamp value for that run
val defaultTimestamp = new Timestamp(System.currentTimeMillis)
}
case class MigrationTestLog(
id : Option[Long] = None
, eventType: String
, timestamp: Timestamp = MigrationTestLog.defaultTimestamp
, principal: String = "TestUser"
, cause : Option[Int] = None
, severity : Int = 100
, data : Elem
) {
def insertSql(c: Connection) : Long = {
//ignore cause id
val (row, qmark) = cause match {
case Some(id) => ("causeId", ", ?")
case None => ("", "")
}
val INSERT_SQL = "insert into EventLog (creationDate, principal, eventType, severity, data%s) values (?, ?, ?, ?, ?)".format(row, qmark)
val ps = c.prepareStatement(INSERT_SQL, Array("id"))
ps.setTimestamp(1, timestamp)
ps.setString(2, principal)
ps.setString(3, eventType)
ps.setInt(4, severity)
val sqlXml = c.createSQLXML()
sqlXml.setString(data.toString)
ps.setSQLXML(5, sqlXml)
cause.foreach { id =>
ps.setInt(6, id)
}
ps.executeUpdate
val rs = ps.getGeneratedKeys
rs.next
rs.getLong("id")
}
}
object Migration_10_2_DATA_EventLogs {
import com.normation.rudder.migration.{
Migration_10_2_DATA_Rule => RuleXml
, Migration_10_2_DATA_Other => OtherXml
, Migration_10_2_DATA_Directive => DirectiveXml
, Migration_10_2_DATA_Group => GroupXml
}
def e(xml:Elem) = <entry>{xml}</entry>
val data_10 = Map(
"rule_add" -> MigrationTestLog(
eventType = "ConfigurationRuleAdded"
, data = e(RuleXml.rule_add_10)
)
, "rule_modify" -> MigrationTestLog(
eventType = "ConfigurationRuleModified"
, data = e(RuleXml.rule_modify_10)
)
, "rule_delete" -> MigrationTestLog(
eventType = "ConfigurationRuleDeleted"
, data = e(RuleXml.rule_delete_10)
)
, "addPendingDeployment" -> MigrationTestLog(
eventType = "StartDeployement"
, data = e(OtherXml.addPendingDeployment_10)
)
, "node_accept" -> MigrationTestLog(
eventType = "AcceptNode"
, data = e(OtherXml.node_accept_10)
)
, "node_refuse" -> MigrationTestLog(
eventType = "RefuseNode"
, data = e(OtherXml.node_refuse_10)
)
, "directive_add" -> MigrationTestLog(
eventType = "PolicyInstanceAdded"
, data = e(DirectiveXml.directive_add_10)
)
, "directive_modify" -> MigrationTestLog(
eventType = "PolicyInstanceModified"
, data = e(DirectiveXml.directive_modify_10)
)
, "directive_delete" -> MigrationTestLog(
eventType = "PolicyInstanceDeleted"
, data = e(DirectiveXml.directive_delete_10)
)
, "nodeGroup_add" -> MigrationTestLog(
eventType = "NodeGroupAdded"
, data = e(GroupXml.nodeGroup_add_10)
)
, "nodeGroup_modify" -> MigrationTestLog(
eventType = "NodeGroupModified"
, data = e(GroupXml.nodeGroup_modify_10)
)
, "nodeGroup_delete" -> MigrationTestLog(
eventType = "NodeGroupDeleted"
, data = e(GroupXml.nodeGroup_delete_10)
)
)
val data_2 = Map(
"rule_add" -> MigrationTestLog(
eventType = "RuleAdded"
, data = e(RuleXml.rule_add_2)
)
, "rule_modify" -> MigrationTestLog(
eventType = "RuleModified"
, data = e(RuleXml.rule_modify_2)
)
, "rule_delete" -> MigrationTestLog(
eventType = "RuleDeleted"
, data = e(RuleXml.rule_delete_2)
)
, "addPendingDeployment" -> MigrationTestLog(
eventType = "AutomaticStartDeployement"
, data = e(OtherXml.addPendingDeployment_2)
)
, "node_accept" -> MigrationTestLog(
eventType = "AcceptNode"
, data = e(OtherXml.node_accept_2)
)
, "node_refuse" -> MigrationTestLog(
eventType = "RefuseNode"
, data = e(OtherXml.node_refuse_2)
)
, "directive_add" -> MigrationTestLog(
eventType = "DirectiveAdded"
, data = e(DirectiveXml.directive_add_2)
)
, "directive_modify" -> MigrationTestLog(
eventType = "DirectiveModified"
, data = e(DirectiveXml.directive_modify_2)
)
, "directive_delete" -> MigrationTestLog(
eventType = "DirectiveDeleted"
, data = e(DirectiveXml.directive_delete_2)
)
, "nodeGroup_add" -> MigrationTestLog(
eventType = "NodeGroupAdded"
, data = e(GroupXml.nodeGroup_add_2)
)
, "nodeGroup_modify" -> MigrationTestLog(
eventType = "NodeGroupModified"
, data = e(GroupXml.nodeGroup_modify_2)
)
, "nodeGroup_delete" -> MigrationTestLog(
eventType = "NodeGroupDeleted"
, data = e(GroupXml.nodeGroup_delete_2)
)
)
}
object Migration_10_2_DATA_Other {
val addPendingDeployment_10 =
<addPending alreadyPending="false"></addPending>
val addPendingDeployment_2 =
<addPendingDeployement alreadyPending="false" fileFormat="2"></addPendingDeployement>
val node_accept_10 =
<node action="accept" fileFormat="1.0">
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
<inventoryVersion>2011-10-13T11:43:52.907+02:00</inventoryVersion>
<hostname>centos-5-32</hostname>
<fullOsName>Centos</fullOsName>
<actorIp>127.0.0.1</actorIp>
</node>
val node_accept_2 =
<node action="accept" fileFormat="2">
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
<inventoryVersion>2011-10-13T11:43:52.907+02:00</inventoryVersion>
<hostname>centos-5-32</hostname>
<fullOsName>Centos</fullOsName>
<actorIp>127.0.0.1</actorIp>
</node>
val node_refuse_10 =
<node fileFormat="1.0" action="accept">
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
<inventoryVersion>2011-10-13T11:43:52.907+02:00</inventoryVersion>
<hostname>centos-5-32</hostname>
<fullOsName>Centos</fullOsName>
<actorIp>127.0.0.1</actorIp>
</node>
val node_refuse_2 =
<node fileFormat="2" action="accept">
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
<inventoryVersion>2011-10-13T11:43:52.907+02:00</inventoryVersion>
<hostname>centos-5-32</hostname>
<fullOsName>Centos</fullOsName>
<actorIp>127.0.0.1</actorIp>
</node>
}
object Migration_10_2_DATA_Rule {
val rule_add_10 =
<configurationRule fileFormat="1.0" changeType="add">
<id>e7c21276-d2b5-4fff-9924-96b67db9bd1c</id>
<displayName>configuration</displayName>
<serial>0</serial>
<target>group:f4b27025-b5a9-46fe-8289-cf9d56e07a8a</target>
<policyInstanceIds>
<id>2813aeb2-6930-11e1-b052-0024e8cdea1f</id>
<id>2c1b0d34-6930-11e1-b901-0024e8cdea1f</id>
</policyInstanceIds>
<shortDescription>configurationconfiguration</shortDescription>
<longDescription></longDescription>
<isActivated>true</isActivated>
<isSystem>false</isSystem>
</configurationRule>
val rule_add_2 =
<rule fileFormat="2" changeType="add">
<id>e7c21276-d2b5-4fff-9924-96b67db9bd1c</id>
<displayName>configuration</displayName>
<serial>0</serial>
<target>group:f4b27025-b5a9-46fe-8289-cf9d56e07a8a</target>
<directiveIds>
<id>2813aeb2-6930-11e1-b052-0024e8cdea1f</id>
<id>2c1b0d34-6930-11e1-b901-0024e8cdea1f</id>
</directiveIds>
<shortDescription>configurationconfiguration</shortDescription>
<longDescription></longDescription>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</rule>
val rule_modify_10 =
<configurationRule fileFormat="1.0" changeType="modify">
<id>39720027-952c-4e28-b774-9d5ce63f7a1e</id>
<displayName>Eutelsat CR Test</displayName>
<name>
<from>Eutelsat CR Test</from>
<to>Users and Fstabs CR</to>
</name>
<shortDescription>
<from>Test CR for Eutelsat</from>
<to>Test CR</to>
</shortDescription>
<longDescription>
<from></from>
<to>Test application of two (single) directives, with two multivalued section.</to>
</longDescription>
<target>
<from>
<none></none>
</from>
<to>group:383d521c-e5a7-4dc2-b402-21d425eefd30</to>
</target>
<policyInstanceIds>
<from></from>
<to>
<id>0a50f415-a8da-42aa-9e86-eb045e289de3</id>
</to>
</policyInstanceIds>
<isActivated>
<from>false</from>
<to>true</to>
</isActivated>
</configurationRule>
val rule_modify_2 =
<rule fileFormat="2" changeType="modify">
<id>39720027-952c-4e28-b774-9d5ce63f7a1e</id>
<displayName>Eutelsat CR Test</displayName>
<name>
<from>Eutelsat CR Test</from>
<to>Users and Fstabs CR</to>
</name>
<shortDescription>
<from>Test CR for Eutelsat</from>
<to>Test CR</to>
</shortDescription>
<longDescription>
<from></from>
<to>Test application of two (single) directives, with two multivalued section.</to>
</longDescription>
<target>
<from>
<none></none>
</from>
<to>group:383d521c-e5a7-4dc2-b402-21d425eefd30</to>
</target>
<directiveIds>
<from></from>
<to>
<id>0a50f415-a8da-42aa-9e86-eb045e289de3</id>
</to>
</directiveIds>
<isEnabled>
<from>false</from>
<to>true</to>
</isEnabled>
</rule>
val rule_delete_10 =
<configurationRule fileFormat="1.0" changeType="delete">
<id>ad8c48f7-b278-4f0c-83d7-f9cb28e0d440</id>
<displayName>zada on SLES10</displayName>
<serial>2</serial>
<target>group:9bf723d9-0838-4af8-82f7-37912a5093ca</target>
<policyInstanceIds>
<id>3fa24049-e673-475d-90ec-e5f9b6b81e38</id>
</policyInstanceIds>
<shortDescription></shortDescription>
<longDescription></longDescription>
<isActivated>true</isActivated>
<isSystem>false</isSystem>
</configurationRule>
val rule_delete_2 =
<rule fileFormat="2" changeType="delete">
<id>ad8c48f7-b278-4f0c-83d7-f9cb28e0d440</id>
<displayName>zada on SLES10</displayName>
<serial>2</serial>
<target>group:9bf723d9-0838-4af8-82f7-37912a5093ca</target>
<directiveIds>
<id>3fa24049-e673-475d-90ec-e5f9b6b81e38</id>
</directiveIds>
<shortDescription></shortDescription>
<longDescription></longDescription>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</rule>
}
object Migration_10_2_DATA_Directive {
val directive_add_10 =
<policyInstance fileFormat="1.0" changeType="add">
<id>2fd5dd7e-c83b-4610-96ad-02002024c2f1</id>
<displayName>Name resolution 1</displayName>
<policyTemplateName>dnsConfiguration</policyTemplateName>
<policyTemplateVersion>1.0</policyTemplateVersion>
<section name="sections">
<section name="Nameserver settings">
<var name="DNS_RESOLVERS_EDIT">false</var>
<section name="DNS resolvers">
<var name="DNS_RESOLVERS">192.168.1.1</var>
</section>
<section name="DNS resolvers">
<var name="DNS_RESOLVERS">192.168.1.2</var>
</section>
</section>
<section name="Search suffix settings">
<var name="DNS_SEARCHLIST_EDIT">false</var>
<section name="DNS search list">
<var name="DNS_SEARCHLIST">example1.com</var>
</section>
<section name="DNS search list">
<var name="DNS_SEARCHLIST">example2.com</var>
</section>
<section name="DNS search list">
<var name="DNS_SEARCHLIST">example3.com</var>
</section>
</section>
</section>
<shortDescription></shortDescription>
<longDescription></longDescription>
<priority>5</priority>
<isActivated>true</isActivated>
<isSystem>false</isSystem>
</policyInstance>
val directive_add_2 =
<directive fileFormat="2" changeType="add">
<id>2fd5dd7e-c83b-4610-96ad-02002024c2f1</id>
<displayName>Name resolution 1</displayName>
<techniqueName>dnsConfiguration</techniqueName>
<techniqueVersion>1.0</techniqueVersion>
<section name="sections">
<section name="Nameserver settings">
<var name="DNS_RESOLVERS_EDIT">false</var>
<section name="DNS resolvers">
<var name="DNS_RESOLVERS">192.168.1.1</var>
</section>
<section name="DNS resolvers">
<var name="DNS_RESOLVERS">192.168.1.2</var>
</section>
</section>
<section name="Search suffix settings">
<var name="DNS_SEARCHLIST_EDIT">false</var>
<section name="DNS search list">
<var name="DNS_SEARCHLIST">example1.com</var>
</section>
<section name="DNS search list">
<var name="DNS_SEARCHLIST">example2.com</var>
</section>
<section name="DNS search list">
<var name="DNS_SEARCHLIST">example3.com</var>
</section>
</section>
</section>
<shortDescription></shortDescription>
<longDescription></longDescription>
<priority>5</priority>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</directive>
val directive_modify_10 =
<policyInstance fileFormat="1.0" changeType="modify">
<id>70785952-d3b9-4d8e-9df4-1606af6d1ba3</id>
<policyTemplateName>createFilesFromList</policyTemplateName>
<displayName>creatFileTestPI</displayName>
<parameters>
<from>
<section name="sections">
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile</var>
</section>
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile2</var>
</section>
</section>
</from>
<to>
<section name="sections">
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile</var>
</section>
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile2</var>
</section>
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile3</var>
</section>
</section>
</to>
</parameters>
</policyInstance>
val directive_modify_2 =
<directive fileFormat="2" changeType="modify">
<id>70785952-d3b9-4d8e-9df4-1606af6d1ba3</id>
<techniqueName>createFilesFromList</techniqueName>
<displayName>creatFileTestPI</displayName>
<parameters>
<from>
<section name="sections">
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile</var>
</section>
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile2</var>
</section>
</section>
</from>
<to>
<section name="sections">
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile</var>
</section>
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile2</var>
</section>
<section name="File">
<var name="CREATEFILESFROMLIST_FILE">/tmp/anotherFile3</var>
</section>
</section>
</to>
</parameters>
</directive>
val directive_delete_10 =
<policyInstance fileFormat="1.0" changeType="delete">
<id>2a79eabf-9987-450c-88bf-3c86d4759eb7</id>
<displayName>Edit crontabs to use "yada"</displayName>
<policyTemplateName>checkGenericFileContent</policyTemplateName>
<policyTemplateVersion>2.0</policyTemplateVersion>
<section name="sections">
<section name="File to manage">
<section name="File">
<var name="GENERIC_FILE_CONTENT_PATH">/var/spool/cron/tabs/root</var>
<var name="GENERIC_FILE_CONTENT_PAYLOAD">* * * * * /home/wimics/yada</var>
<var name="GENERIC_FILE_CONTENT_ENFORCE">false</var>
</section>
<section name="Permission adjustment">
<var name="GENERIC_FILE_CONTENT_PERMISSION_ADJUSTMENT">true</var>
<var name="GENERIC_FILE_CONTENT_OWNER">root</var>
<var name="GENERIC_FILE_CONTENT_GROUP">root</var>
<var name="GENERIC_FILE_CONTENT_PERM">644</var>
</section>
<section name="Post-modification hook">
<var name="GENERIC_FILE_CONTENT_POST_HOOK_RUN">false</var>
<var name="GENERIC_FILE_CONTENT_POST_HOOK_COMMAND"></var>
</section>
</section>
</section>
<shortDescription></shortDescription>
<longDescription></longDescription>
<priority>5</priority>
<isActivated>true</isActivated>
<isSystem>false</isSystem>
</policyInstance>
val directive_delete_2 =
<directive fileFormat="2" changeType="delete">
<id>2a79eabf-9987-450c-88bf-3c86d4759eb7</id>
<displayName>Edit crontabs to use "yada"</displayName>
<techniqueName>checkGenericFileContent</techniqueName>
<techniqueVersion>2.0</techniqueVersion>
<section name="sections">
<section name="File to manage">
<section name="File">
<var name="GENERIC_FILE_CONTENT_PATH">/var/spool/cron/tabs/root</var>
<var name="GENERIC_FILE_CONTENT_PAYLOAD">* * * * * /home/wimics/yada</var>
<var name="GENERIC_FILE_CONTENT_ENFORCE">false</var>
</section>
<section name="Permission adjustment">
<var name="GENERIC_FILE_CONTENT_PERMISSION_ADJUSTMENT">true</var>
<var name="GENERIC_FILE_CONTENT_OWNER">root</var>
<var name="GENERIC_FILE_CONTENT_GROUP">root</var>
<var name="GENERIC_FILE_CONTENT_PERM">644</var>
</section>
<section name="Post-modification hook">
<var name="GENERIC_FILE_CONTENT_POST_HOOK_RUN">false</var>
<var name="GENERIC_FILE_CONTENT_POST_HOOK_COMMAND"></var>
</section>
</section>
</section>
<shortDescription></shortDescription>
<longDescription></longDescription>
<priority>5</priority>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</directive>
}
object Migration_10_2_DATA_Group {
val nodeGroup_add_10 =
<nodeGroup fileFormat="1.0" changeType="add">
<id>a73220c8-c3e1-40f1-803b-55d21bc817ec</id>
<displayName>CentOS</displayName>
<description>CentOS Group</description>
<query></query>
<isDynamic>true</isDynamic>
<nodeIds></nodeIds>
<isActivated>true</isActivated>
<isSystem>false</isSystem>
</nodeGroup>
val nodeGroup_add_2 =
<nodeGroup fileFormat="2" changeType="add">
<id>a73220c8-c3e1-40f1-803b-55d21bc817ec</id>
<displayName>CentOS</displayName>
<description>CentOS Group</description>
<query></query>
<isDynamic>true</isDynamic>
<nodeIds></nodeIds>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</nodeGroup>
val nodeGroup_modify_10 =
<nodeGroup fileFormat="1.0" changeType="modify">
<id>hasPolicyServer-root</id>
<displayName>Root server group</displayName>
<nodeIds>
<from>
<id>root</id>
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
</from>
<to>
<id>root</id>
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
<id>06da3556-5204-4bd7-b3b0-fa5e7bcfbbea</id>
</to>
</nodeIds>
</nodeGroup>
val nodeGroup_modify_2 =
<nodeGroup fileFormat="2" changeType="modify">
<id>hasPolicyServer-root</id>
<displayName>Root server group</displayName>
<nodeIds>
<from>
<id>root</id>
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
</from>
<to>
<id>root</id>
<id>248c8e3d-1bf6-4bc1-9398-f8890b015a50</id>
<id>06da3556-5204-4bd7-b3b0-fa5e7bcfbbea</id>
</to>
</nodeIds>
</nodeGroup>
val nodeGroup_delete_10 =
<nodeGroup fileFormat="1.0" changeType="delete">
<id>4e0e8d5e-c87a-445c-ac81-a0e7a2b9e5e6</id>
<displayName>All debian</displayName>
<description></description>
<query>
{{"select":"node","composition":"And","where":[{{"objectType":"node","attribute":"osName","comparator":"eq","value":"Debian"}}]}}
</query>
<isDynamic>true</isDynamic>
<nodeIds>
<id>b9a71482-5030-4699-984d-b03d28bbbf36</id>
<id>0876521e-3c81-4775-85c7-5dd7f9d5d3da</id>
</nodeIds>
<isActivated>true</isActivated>
<isSystem>false</isSystem>
</nodeGroup>
val nodeGroup_delete_2 =
<nodeGroup fileFormat="2" changeType="delete">
<id>4e0e8d5e-c87a-445c-ac81-a0e7a2b9e5e6</id>
<displayName>All debian</displayName>
<description></description>
<query>
{{"select":"node","composition":"And","where":[{{"objectType":"node","attribute":"osName","comparator":"eq","value":"Debian"}}]}}
</query>
<isDynamic>true</isDynamic>
<nodeIds>
<id>b9a71482-5030-4699-984d-b03d28bbbf36</id>
<id>0876521e-3c81-4775-85c7-5dd7f9d5d3da</id>
</nodeIds>
<isEnabled>true</isEnabled>
<isSystem>false</isSystem>
</nodeGroup>
} | jooooooon/rudder | rudder-core/src/test/scala/com/normation/rudder/migration/XmlDataMigration_10_2.scala | Scala | agpl-3.0 | 24,084 |
package es.ucm.fdi.sscheck.gen
import org.scalacheck.{Properties, Gen}
import org.scalacheck.Arbitrary.arbitrary
import org.scalacheck.Prop.{forAll, exists, AnyOperators, collect}
import org.scalatest._
import org.scalatest.Matchers._
import org.scalatest.prop.PropertyChecks._
import org.scalatest.Inspectors.{forAll => testForAll}
import Batch.seq2batch
import BatchGen._
import DStreamGen._
import Buildables.{buildableBatch, buildableDStreamFromBatch}
import DStreamMatchers._
/** Tests for the LTL inspired HO generators defined at BatchGen and DStreamGen
*
* WARNING: due to using nested forall, shrinking might generate wrong counterexamples,
* so the _ORIGINAL versions should be used in case of test failure
*
* Note most tests check completeness of the generators, but not correctness, i.e. that
* all the generated data corresponds to some input data. We could do that but logically
* it makes sense the interpreting the TL formulas as requiring something to happen, but
* not requiring that something doesn't happen, has we don't have negation.
* */
object TLGenTest extends Properties("TLGen temporal logic generators properties") {
/*
* NOTE the use of the import alias org.scalatest.Inspectors.{forAll => testForAll} to
* distinguish between ScalaTest forAll inspector used in matchers, and ScalaTest
* forAll adapter for ScalaCheck
* */
//
// Tests for BatchGen LTL inspired HO generators
//
property("BatchGen.now() applied to a batch generator returns a dstream generator " +
"with exactly one batch, and doesn't introduce new elements") =
forAll ("batch" |: arbitrary[Batch[Int]]) { batch : Batch[Int] =>
// note here we are dropping some elements
val g = BatchGen.now(Gen.someOf(batch))
forAll ("dstream" |: g) { dstream : DStream[Int] =>
dstream should have length (1)
dstream(0).length should be <= (batch.length)
testForAll (dstream(0)) { batch should contain (_)}
true
}
}
property("BatchGen.now() applied to a constant batch generator returns a dstream generator " +
"with exactly that batch as the only batch") =
forAll ("batch" |: arbitrary[Batch[Int]]) { batch : Batch[Int] =>
// using a constant generator
val g = BatchGen.now(batch : Batch[Int])
forAll ("dstream" |: g) { dstream : DStream[Int] =>
dstream should have length (1)
dstream(0) should be (batch)
true
}
}
property("BatchGen.next() returns a dstream generator that has exactly two batches, " +
"the first one is emptyBatch, and the second one is its argument") =
forAll ("batch" |: arbitrary[Batch[Int]]) { batch : Batch[Int] =>
// using a constant generator
val g = BatchGen.next(batch : Batch[Int])
forAll ("dstream" |: g) { dstream : DStream[Int] =>
dstream should have length (2)
dstream(0) should be (Batch.empty)
dstream(1) should be (batch)
true
}
}
property("BatchGen.laterN(n, bg) generates n batches and then bg" ) =
forAll ("batch" |: arbitrary[Batch[Int]], "n" |: Gen.choose(-10, 30)) {
(batch : Batch[Int], n : Int) =>
// using a constant generator
val g = BatchGen.laterN(n, batch)
forAll ("nextDStream" |: g) { nextDStream : DStream[Int] =>
nextDStream should have length (math.max(0, n) + 1)
testForAll (nextDStream.slice(0, n)) {_ should be (Batch.empty)}
nextDStream(nextDStream.length-1) should be (batch)
true
}
}
property("BatchGen.until is a strong until, i.e. the second generator always occurs, and " +
"the first one occours before") =
forAll ("batch1" |: arbitrary[Batch[Int]], "batch2" |: arbitrary[Batch[Int]]) {
(batch1 : Batch[Int], batch2 : Batch[Int]) =>
// using constant generators
val g = BatchGen.until(batch1, batch2)
forAll ("untilDStream" |: g) { untilDStream : DStream[Int] =>
testForAll (untilDStream.slice(0, untilDStream.length-1)) { _ should be (batch1)}
untilDStream(untilDStream.length-1) should be (batch2)
true
}
}
property("BatchGen.eventually eventually produces data from the " +
"argument batch generator") =
forAll ("batch" |: arbitrary[Batch[Int]]) { batch : Batch[Int] =>
forAll ("eventuallyDStream" |: BatchGen.eventually(batch)) { eventuallyDStream : DStream[Int] =>
eventuallyDStream(eventuallyDStream.length-1) should be (batch)
true
}
}
property("BatchGen.always always produces data from the argument batch generator") =
forAll ("batch" |: arbitrary[Batch[Int]]) { batch : Batch[Int] =>
forAll ("alwaysDStream" |: BatchGen.always(batch)) { alwaysDStream : DStream[Int] =>
testForAll (alwaysDStream.toList) {_ should be (batch)}
true
}
}
property("BatchGen.release is a weak relase, i.e either bg2 happens forever, " +
"or it happens until bg1 happens, including the moment when bg1 happens") =
forAll ("batch1" |: arbitrary[Batch[Int]], "batch2" |: arbitrary[Batch[Int]]) {
(batch1 : Batch[Int], batch2 : Batch[Int]) =>
// using constant generators
val g = BatchGen.release(batch1, batch2)
forAll ("releaseDStream" |: g) { releaseDStream : DStream[Int] =>
testForAll (releaseDStream.slice(0, releaseDStream.length-2)) { _ should be (batch2)}
releaseDStream(releaseDStream.length-1) should
(be (batch1 ++ batch2) or be (batch2))
true
}
}
//
// Tests for DStreamGen LTL inspired HO generators
//
// small DStream generator for costly tests
val smallDsg = DStreamGen.ofNtoM(0, 10, BatchGen.ofNtoM(0, 5, arbitrary[Int]))
property("DStreamGen.next() returns a dstream generator that has exactly 1 + the " +
"number of batches of its argument, the first one is emptyBatch, and the rest " +
"are the batches generated by its argument") =
forAll ("dstream" |: arbitrary[DStream[Int]]) { dstream : DStream[Int] =>
// using a constant generator
val g = DStreamGen.next(dstream)
forAll ("nextDStream" |: g) { nextDStream : DStream[Int] =>
nextDStream should have length (1 + dstream.length)
nextDStream(0) should be (Batch.empty)
nextDStream.slice(1, nextDStream.size) should be (dstream)
true
}
}
property("DStreamGen.laterN(n, dsg) generates n batches and then dsg" ) =
forAll ("dstream" |: arbitrary[DStream[Int]], "n" |: Gen.choose(-10, 30)) {
(dstream : DStream[Int], n : Int) =>
// using a constant generator
val g = DStreamGen.laterN(n, dstream)
forAll ("nextDStream" |: g) { nextDStream : DStream[Int] =>
nextDStream should have length (math.max(0, n) + dstream.length)
testForAll (nextDStream.slice(0, n)) {_ should be (Batch.empty)}
nextDStream.slice(n, nextDStream.length) should be (dstream.toList)
true
}
}
property("DStreamGen.until is a strong until, i.e. the second generator always occurs, " +
"and the first one occours before") = {
// explicitly limiting generator sizes to avoid too slow tests
forAll ("dstream1" |: smallDsg, "dstream2" |: smallDsg) {
(dstream1 : DStream[Int], dstream2 : DStream[Int]) =>
// using constant generators
val (dstream1Len, dstream2Len) = (dstream1.length, dstream2.length)
val g = DStreamGen.until(dstream1, dstream2)
forAll ("untilDStream" |: g) { untilDStream : DStream[Int] =>
for {i <- 0 until untilDStream.length - dstream1Len - dstream2Len} {
dstream1 should beSubsetOf (untilDStream.slice(i, i + dstream1Len))
}
val tail = untilDStream.slice(untilDStream.length - dstream2Len, untilDStream.length)
dstream2 should beSubsetOf(tail)
true
}
}
}
property("DStreamGen.eventually eventually produces data from the argument generator") =
forAll ("dstream" |: arbitrary[DStream[Int]]) { dstream : DStream[Int] =>
forAll ("eventuallyDStream" |: DStreamGen.eventually(dstream)) { eventuallyDStream : DStream[Int] =>
val eventuallyDStreamLen = eventuallyDStream.length
val ending = eventuallyDStream.slice(eventuallyDStreamLen - dstream.length, eventuallyDStreamLen)
ending should be (dstream)
true
}
}
property("DStreamGen.always always produces data from the argument generator") =
// explicitly limiting generator sizes to avoid too slow tests
forAll (smallDsg) { dstream : DStream[Int] =>
val dstreamLen = dstream.length
forAll ("alwaysDStream" |: DStreamGen.always(dstream)) { alwaysDStream : DStream[Int] =>
for {i <- 0 until alwaysDStream.length - dstreamLen} {
dstream should beSubsetOf (alwaysDStream.slice(i, i + dstreamLen))
}
true
}
}
property("DStreamGen.release is a weak relase, i.e either the second generator happens forever, " +
"or it happens until the first generator happens, including the moment when the first generator happens") =
{
// explicitly limiting generator sizes to avoid too slow tests
forAll ("dstream1" |: smallDsg, "dstream2" |: smallDsg) {
(dstream1 : DStream[Int], dstream2 : DStream[Int]) =>
// using constant generators
val (dstream1Len, dstream2Len) = (dstream1.length, dstream2.length)
val g = DStreamGen.release(dstream1, dstream2)
forAll ("releaseDStream" |: g) { releaseDStream : DStream[Int] =>
// this is similar to always, but note the use of max to account for
// the case when dstream1 happens and dstream1 is longer than dstream2
// We don't check if dstream1 happens, because it might not
for {i <- 0 until releaseDStream.length - math.max(dstream1Len, dstream2Len)} {
dstream2 should beSubsetOf (releaseDStream.slice(i, i + dstream2Len))
}
true
}
}
}
} | juanrh/spark-testing-base | src/test/scala/es/ucm/fdi/sscheck/gen/TLGenTest.scala | Scala | apache-2.0 | 10,164 |
import scala.io._
import scala.actors._
import Actor._
object PageLoader {
def getPageSize(url : String) = Source.fromURL(url).mkString.length
}
var urls = List("https://www.twitter.com", "https://www.linkedin.com/", "http://gmail.google.com")
def timeMethod(method: () => Unit) = {
val start = System.nanoTime
method()
val end = System.nanoTime
println("Method took " + (end - start)/1000000000.0 + " seconds")
}
def getPageSizeSequentially() = {
for (url <- urls) {
println ("Size for " + url + ":" + PageLoader.getPageSize(url))
}
}
def getPageSizeConcurrently() = {
val caller = self
for (url <- urls) {
actor { caller ! (url, PageLoader.getPageSize(url)) }
}
for ( i <- 1 to urls.size) {
receive {
case (url, size) =>
println ("Size for " + url + ":" + size)
}
}
}
println("Sequential run :")
timeMethod { getPageSizeSequentially }
println("Concurrent run :")
timeMethod {getPageSizeConcurrently} | brice/seven-adventure | Week4/day3/sizer.scala | Scala | gpl-3.0 | 1,018 |
package ilc
package feature
package let
trait FreeVariables extends analysis.FreeVariables with Syntax {
override def termFreeVariables(term: Term): Set[Var] = term match {
case Let(v, exp, body) =>
//If v is free in exp, it is indeed free in the overall let!
body.freeVariables - v ++ exp.freeVariables
case _ =>
super.termFreeVariables(term)
}
}
| inc-lc/ilc-scala | src/main/scala/ilc/feature/let/FreeVariables.scala | Scala | mit | 379 |
package scala.slick.driver
import scala.slick.lifted._
import scala.slick.ast._
import scala.slick.jdbc.{PositionedResult, JdbcType}
import scala.slick.util.MacroSupport.macroSupportInterpolation
import java.sql.{Timestamp, Date, Time}
import scala.slick.profile.{SqlProfile, Capability}
/**
* Slick driver for Microsoft SQL Server.
*
* This driver implements the [[scala.slick.driver.ExtendedProfile]]
* ''without'' the following capabilities:
*
* <ul>
* <li>[[scala.slick.driver.JdbcProfile.capabilities.returnInsertOther]]:
* When returning columns from an INSERT operation, only a single column
* may be specified which must be the table's AutoInc column.</li>
* <li>[[scala.slick.profile.SqlProfile.capabilities.sequence]]:
* Sequences are not supported because SQLServer does not have this
* feature.</li>
* </ul>
*
* @author szeiger
*/
trait SQLServerDriver extends ExtendedDriver { driver =>
override protected def computeCapabilities: Set[Capability] = (super.computeCapabilities
- JdbcProfile.capabilities.returnInsertOther
- SqlProfile.capabilities.sequence
)
override val columnTypes = new JdbcTypes
override def createQueryBuilder(input: QueryBuilderInput): QueryBuilder = new QueryBuilder(input)
override def createColumnDDLBuilder(column: FieldSymbol, table: Table[_]): ColumnDDLBuilder = new ColumnDDLBuilder(column)
override def defaultSqlTypeName(tmd: JdbcType[_]): String = tmd.sqlType match {
case java.sql.Types.BOOLEAN => "BIT"
case java.sql.Types.BLOB => "IMAGE"
case java.sql.Types.CLOB => "TEXT"
case java.sql.Types.DOUBLE => "FLOAT(53)"
case java.sql.Types.FLOAT => "FLOAT(24)"
case _ => super.defaultSqlTypeName(tmd)
}
class QueryBuilder(input: QueryBuilderInput) extends super.QueryBuilder(input) with RowNumberPagination {
override protected val supportsTuples = false
override protected val concatOperator = Some("+")
override protected val useIntForBoolean = true
override protected def buildSelectModifiers(c: Comprehension) {
(c.fetch, c.offset) match {
case (Some(t), Some(d)) => b"top ${d+t} "
case (Some(t), None ) => b"top $t "
case (None, _ ) => if(!c.orderBy.isEmpty) b"top 100 percent "
}
}
override protected def buildOrdering(n: Node, o: Ordering) {
if(o.nulls.last && !o.direction.desc)
b"case when ($n) is null then 1 else 0 end,"
else if(o.nulls.first && o.direction.desc)
b"case when ($n) is null then 0 else 1 end,"
expr(n)
if(o.direction.desc) b" desc"
}
}
class ColumnDDLBuilder(column: FieldSymbol) extends super.ColumnDDLBuilder(column) {
override protected def appendOptions(sb: StringBuilder) {
if(defaultLiteral ne null) sb append " DEFAULT " append defaultLiteral
if(notNull) sb append " NOT NULL"
if(primaryKey) sb append " PRIMARY KEY"
if(autoIncrement) sb append " IDENTITY"
}
}
class JdbcTypes extends super.JdbcTypes {
override val booleanJdbcType = new BooleanJdbcType
override val byteJdbcType = new ByteJdbcType
override val dateJdbcType = new DateJdbcType
override val timeJdbcType = new TimeJdbcType
override val timestampJdbcType = new TimestampJdbcType
override val uuidJdbcType = new UUIDJdbcType {
override def sqlTypeName = "UNIQUEIDENTIFIER"
}
/* SQL Server does not have a proper BOOLEAN type. The suggested workaround is
* BIT with constants 1 and 0 for TRUE and FALSE. */
class BooleanJdbcType extends super.BooleanJdbcType {
override def valueToSQLLiteral(value: Boolean) = if(value) "1" else "0"
}
/* Selecting a straight Date or Timestamp literal fails with a NPE (probably
* because the type information gets lost along the way), so we cast all Date
* and Timestamp values to the proper type. This work-around does not seem to
* be required for Time values. */
class DateJdbcType extends super.DateJdbcType {
override def valueToSQLLiteral(value: Date) = "(convert(date, {d '" + value + "'}))"
}
class TimeJdbcType extends super.TimeJdbcType {
override def valueToSQLLiteral(value: Time) = "(convert(time, {t '" + value + "'}))"
override def nextValue(r: PositionedResult) = {
val s = r.nextString()
val sep = s.indexOf('.')
if(sep == -1) Time.valueOf(s)
else {
val t = Time.valueOf(s.substring(0, sep))
val millis = (("0."+s.substring(sep+1)).toDouble * 1000.0).toInt
t.setTime(t.getTime + millis)
t
}
}
}
class TimestampJdbcType extends super.TimestampJdbcType {
/* TIMESTAMP in SQL Server is a data type for sequence numbers. What we
* want here is DATETIME. */
override def sqlTypeName = "DATETIME"
override def valueToSQLLiteral(value: Timestamp) = "(convert(datetime, {ts '" + value + "'}))"
}
/* SQL Server's TINYINT is unsigned, so we use SMALLINT instead to store a signed byte value.
* The JDBC driver also does not treat signed values correctly when reading bytes from result
* sets, so we read as Short and then convert to Byte. */
class ByteJdbcType extends super.ByteJdbcType {
override def sqlTypeName = "SMALLINT"
//def setValue(v: Byte, p: PositionedParameters) = p.setByte(v)
//def setOption(v: Option[Byte], p: PositionedParameters) = p.setByteOption(v)
override def nextValue(r: PositionedResult) = r.nextShort.toByte
//def updateValue(v: Byte, r: PositionedResult) = r.updateByte(v)
}
}
}
object SQLServerDriver extends SQLServerDriver
| boldradius/slick | src/main/scala/scala/slick/driver/SQLServerDriver.scala | Scala | bsd-2-clause | 5,667 |
/* Copyright (C) 2008-2016 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.la
import org.junit.Assert._
import org.junit.Test
import org.scalatest.junit._
class TestTensor2 extends JUnitSuite with cc.factorie.util.FastLogging {
val eps = 1e-4
@Test
def testDenseTensor2(): Unit = {
val t1 = new DenseTensor2(2,2)
// initial value is 0
assertEquals(0.0, t1(0,0), eps)
assertEquals(0.0, t1(0,1), eps)
assertEquals(0.0, t1(1,0), eps)
assertEquals(0.0, t1(1,1), eps)
// construct an matrix
// | 0.2 0.4 |
// | 0.8 0.6 |
t1(0,0) = 0.2
t1(0,1) = 0.4
t1(1,0) = 0.8
t1(1,1) = 0.6
val t1equal = new DenseTensor2(Array(Array(0.2, 0.4), Array(0.8, 0.6)))
assertArrayEquals(t1.toArray, t1equal.toArray, eps)
assertEquals(0.2, t1(0,0), eps)
val t2 = new DenseTensor2(2,2)
// construct an matrix
// | 0.1 0.3 |
// | 0.9 0.7 |
t2(0,0) = 0.1
t2(0,1) = 0.3
t2(1,0) = 0.9
t2(1,1) = 0.7
val t3 = new DenseTensor1(2)
t3(0) = 0.1
t3(1) = 0.9
// | 0.2 0.4 | * | 0.1 | = | 0.38 |
// | 0.8 0.6 | | 0.9 | | 0.62 |
val t4 = t1 * t3
assertArrayEquals(Array(0.38, 0.62), t4.toArray, eps)
// | 0.2 0.4 | leftMultiply | 0.1 | = | 0.1 0.9 | * | 0.2 0.4 | = | 0.74 |
// | 0.8 0.6 | | 0.9 | | 0.8 0.6 | | 0.58 |
val t5 = t1 leftMultiply t3
assertArrayEquals(Array(0.74, 0.58), t5.toArray, eps)
// println(t1 outer t3)
// not fully implemented, which will cause infinite looping
// t1 outer t2
}
}
| strubell/factorie | src/test/scala/cc/factorie/la/TestTensor2.scala | Scala | apache-2.0 | 2,270 |
package tests
private class /*caret*/Private {
def foo = ???
}
private object /*caret*/Private {
def bar = ???
} | triplequote/intellij-scala | scala/scala-impl/testdata/rename3/privateSamePackage/before/tests/Private.scala | Scala | apache-2.0 | 118 |
/**
* Copyright 2014 Marco Vermeulen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.sdkman.release
import javax.validation.Valid
import io.sdkman.release.request.ReleaseRequest
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.validation.BindingResult
import org.springframework.web.bind.annotation.RequestMethod.POST
import org.springframework.web.bind.annotation._
trait ReleaseController extends CandidatePersistence with VersionPersistence with EntityValidation with Authorisation {
@RequestMapping(value = Array("/release"), method = Array(POST))
def publish(@Valid @RequestBody request: ReleaseRequest,
@RequestHeader(value = "access_token") token: String,
@RequestHeader(value = "consumer") consumer: String,
binding: BindingResult) = {
Authorised(token, consumer, request) {
ValidRequest(binding) {
val candidate = request.getCandidate
val version = request.getVersion
val url = request.getUrl
Created(save(Version(validCandidate(candidate), uniqueVersion(candidate, version), url)))
}
}
}
}
@RestController
class Releases @Autowired()(val versionRepo: VersionRepo,
val candidateRepo: CandidateRepo,
val candidateUpdateRepo: CandidateUpdateRepo,
val secureHeaders: SecureHeaders) extends ReleaseController | sdkman/sdkman-release-api | src/main/scala/io/sdkman/release/releases.scala | Scala | apache-2.0 | 1,965 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations.calculations
import uk.gov.hmrc.ct.CATO01
import uk.gov.hmrc.ct.box.CtTypeConverters
import uk.gov.hmrc.ct.computations.{CP43, CP502, CP509, CP510}
trait NonTradeIncomeCalculator extends CtTypeConverters {
def nonTradeIncomeCalculation(cp43: CP43,
cp502: CP502,
cp509: CP509,
cp510: CP510): CATO01 = {
CATO01(cp43 + cp502 + cp509 + cp510)
}
}
| pncampbell/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/calculations/NonTradeIncomeCalculator.scala | Scala | apache-2.0 | 1,091 |
package scala.slick.jdbc.meta
import java.sql._
import scala.slick.jdbc.{ResultSetInvoker, UnitInvoker, JdbcType}
/**
* A wrapper for a row in the ResultSet returned by DatabaseMetaData.getColumns().
*/
case class MColumn(
table: MQName, column: String, sqlType: Int, typeName: String,
columnSize: Option[Int], decimalDigits: Option[Int], numPrecRadix: Int, nullable: Option[Boolean], remarks: Option[String],
columnDef: Option[String], charOctetLength: Int, ordinalPos: Int, isNullable: Option[Boolean], scope: Option[MQName],
sourceDataType: Option[Int], isAutoInc: Option[Boolean]) {
def sqlTypeName = JdbcType.typeNames.get(sqlType)
def getColumnPrivileges = MColumnPrivilege.getColumnPrivileges(table, column)
}
object MColumn {
def getColumns(tablePattern: MQName, columnPattern: String) = ResultSetInvoker[MColumn](
_.metaData.getColumns(tablePattern.catalog_?, tablePattern.schema_?, tablePattern.name, columnPattern)) { r =>
MColumn(MQName.from(r), r.<<, r.<<, r.<<, r.<<, r.skip.<<, r.<<, r.nextInt match {
case DatabaseMetaData.columnNoNulls => Some(false)
case DatabaseMetaData.columnNullable => Some(true)
case _ => None
}, r.<<, r.<<, r.skip.skip.<<, r.<<, DatabaseMeta.yesNoOpt(r),
if(r.hasMoreColumns) MQName.optionalFrom(r) else None,
r.<<?,
if(r.hasMoreColumns) DatabaseMeta.yesNoOpt(r) else None)
}
}
| boldradius/slick | src/main/scala/scala/slick/jdbc/meta/MColumn.scala | Scala | bsd-2-clause | 1,416 |
package scala.compat.java8.converterImpl
import scala.compat.java8.collectionImpl._
import Stepper._
/** Abstracts all the generic operations of stepping over a backing array
* for some collection where the elements are stored generically and some
* may be missing. Subclasses should set `currentEntry` to `null` when it
* is used as a signal to look for more entries in the array. (This also
* allows a subclass to traverse a sublist by updating `currentEntry`.)
*/
private[java8] abstract class AbstractStepsLikeGapped[Sub >: Null, Semi <: Sub](protected val underlying: Array[AnyRef], protected var i0: Int, protected var iN: Int)
extends EfficientSubstep {
protected var currentEntry: AnyRef = null
def semiclone(half: Int): Semi
def characteristics(): Int = Ordered
def estimateSize(): Long = if (!hasNext) 0 else iN - i0
def hasNext(): Boolean = currentEntry != null || (i0 < iN && {
do { currentEntry = underlying(i0); i0 += 1 } while (currentEntry == null && i0 < iN)
currentEntry != null
})
def substep(): Sub = {
if (iN-1 > i0) {
val half = (i0+iN) >>> 1
val ans = semiclone(half)
i0 = half
ans
}
else null
}
}
/** Abstracts the process of stepping through an incompletely filled array of `AnyRefs`
* and interpreting the contents as the elements of a collection.
*/
private[java8] abstract class StepsLikeGapped[A, STA >: Null <: StepsLikeGapped[A, _]](_underlying: Array[AnyRef], _i0: Int, _iN: Int)
extends AbstractStepsLikeGapped[AnyStepper[A], STA](_underlying, _i0, _iN)
with AnyStepper[A]
{}
/** Abstracts the process of stepping through an incompletely filled array of `AnyRefs`
* and interpreting the contents as the elements of a collection of `Double`s. Subclasses
* are responsible for unboxing the `AnyRef` inside `nextDouble`.
*/
private[java8] abstract class StepsDoubleLikeGapped[STD >: Null <: StepsDoubleLikeGapped[_]](_underlying: Array[AnyRef], _i0: Int, _iN: Int)
extends AbstractStepsLikeGapped[DoubleStepper, STD](_underlying, _i0, _iN)
with DoubleStepper
{}
/** Abstracts the process of stepping through an incompletely filled array of `AnyRefs`
* and interpreting the contents as the elements of a collection of `Int`s. Subclasses
* are responsible for unboxing the `AnyRef` inside `nextInt`.
*/
private[java8] abstract class StepsIntLikeGapped[STI >: Null <: StepsIntLikeGapped[_]](_underlying: Array[AnyRef], _i0: Int, _iN: Int)
extends AbstractStepsLikeGapped[IntStepper, STI](_underlying, _i0, _iN)
with IntStepper
{}
/** Abstracts the process of stepping through an incompletely filled array of `AnyRefs`
* and interpreting the contents as the elements of a collection of `Long`s. Subclasses
* are responsible for unboxing the `AnyRef` inside `nextLong`.
*/
private[java8] abstract class StepsLongLikeGapped[STL >: Null <: StepsLongLikeGapped[_]](_underlying: Array[AnyRef], _i0: Int, _iN: Int)
extends AbstractStepsLikeGapped[LongStepper, STL](_underlying, _i0, _iN)
with LongStepper
{}
| Ichoran/scala-java8-compat | src/main/scala/scala/compat/java8/converterImpl/StepsLikeGapped.scala | Scala | bsd-3-clause | 3,057 |
package org.workcraft.gui;
import java.awt.BorderLayout
import java.awt.Dimension
import java.awt.FlowLayout
import java.awt.event.KeyEvent
import java.awt.event.MouseEvent
import java.awt.event.MouseListener
import java.util.ArrayList
import java.util.Collection
import java.util.Collections
import javax.swing.BorderFactory
import javax.swing.BoxLayout
import javax.swing.DefaultListModel
import javax.swing.JButton
import javax.swing.JCheckBox
import javax.swing.JDialog
import javax.swing.JFrame
import javax.swing.JLabel
import javax.swing.JList
import javax.swing.JPanel
import javax.swing.JScrollPane
import javax.swing.JTextField
import javax.swing.ListSelectionModel
import javax.swing.WindowConstants
import java.awt.event.MouseAdapter
import java.awt.Window
import javax.swing.JOptionPane
import org.workcraft.services.NewModelImpl
import java.awt.Font
import org.workcraft.scala.effects.IO
import org.workcraft.scala.effects.IO._
class CreateWorkDialog private (models: List[NewModelImpl], owner: Window) extends JDialog(owner) {
class ListElement(val newModel: NewModelImpl) {
override def toString = newModel.name
}
var choice: Option[NewModelImpl] = None
setDefaultCloseOperation(WindowConstants.HIDE_ON_CLOSE)
setModal(true)
setTitle("New work")
val contentPane = new JPanel(new BorderLayout())
setContentPane(contentPane)
val modelScroll = new JScrollPane()
val listModel = new DefaultListModel()
val modelList = new JList(listModel)
modelList.setFont(modelList.getFont().deriveFont(Font.BOLD))
modelList.setSelectionMode(ListSelectionModel.SINGLE_SELECTION)
modelList.setLayoutOrientation(JList.VERTICAL_WRAP)
modelList.addListSelectionListener(new javax.swing.event.ListSelectionListener() {
def valueChanged(e: javax.swing.event.ListSelectionEvent) =
if (modelList.getSelectedIndex() == -1)
okButton.setEnabled(false);
else okButton.setEnabled(true);
})
modelList.addMouseListener(new MouseAdapter() {
override def mouseClicked(e: MouseEvent) {
if (e.getClickCount() == 2)
if (modelList.getSelectedIndex() != -1)
create()
}
})
models.sortBy(_.name).foreach(d => listModel.addElement(new ListElement(d)))
modelScroll.setViewportView(modelList)
modelScroll.setBorder(BorderFactory.createTitledBorder("Type"))
val optionsPane = new JPanel()
optionsPane.setBorder(BorderFactory.createTitledBorder("Options"))
optionsPane.setLayout(new BoxLayout(optionsPane, BoxLayout.Y_AXIS))
val chkVisual = new JCheckBox("Create visual model")
chkVisual.setSelected(true)
val chkOpen = new JCheckBox("Open in editor")
chkOpen.setSelected(true)
//optionsPane.add(chkVisual)
optionsPane.add(chkOpen)
optionsPane.add(new JLabel("Model title: "))
val txtTitle = new JTextField()
optionsPane.add(txtTitle)
val dummy = new JPanel()
dummy.setPreferredSize(new Dimension(200, 1000));
dummy.setMaximumSize(new Dimension(200, 1000));
optionsPane.add(dummy)
val buttonsPane = new JPanel(new FlowLayout(FlowLayout.CENTER, 10, 10))
val okButton = new JButton()
okButton.setPreferredSize(new Dimension(100, 20))
okButton.setEnabled(false)
okButton.setText("OK")
okButton.addActionListener(new java.awt.event.ActionListener() {
def actionPerformed(e: java.awt.event.ActionEvent) {
if (okButton.isEnabled())
create()
}
})
val cancelButton = new JButton()
cancelButton.setPreferredSize(new Dimension(100, 20))
cancelButton.setText("Cancel")
cancelButton.addActionListener(new java.awt.event.ActionListener() {
def actionPerformed(e: java.awt.event.ActionEvent) {
cancel()
}
})
buttonsPane.add(okButton)
buttonsPane.add(cancelButton)
contentPane.add(modelScroll, BorderLayout.CENTER)
contentPane.add(optionsPane, BorderLayout.WEST)
contentPane.add(buttonsPane, BorderLayout.SOUTH)
txtTitle.addKeyListener(new java.awt.event.KeyAdapter() {
override def keyPressed(e: java.awt.event.KeyEvent) {
if (e.getKeyCode() == KeyEvent.VK_ENTER)
if (okButton.isEnabled())
create()
}
})
def cancel() = {
choice = None
setVisible(false)
}
def create() = {
choice = modelList.getSelectedValue match {
case e: ListElement => Some(e.newModel)
case _ => None
}
setVisible(false)
}
def createVisual = chkVisual.isSelected
def openInEditor = chkOpen.isSelected
}
object CreateWorkDialog {
def show(models: List[NewModelImpl], parentWindow: Window): IO[Option[(NewModelImpl, Boolean)]] = ioPure.pure {
if (models.isEmpty) {
JOptionPane.showMessageDialog(parentWindow, "Workcraft was unable to find any plug-ins that could create a new model.\\n\\nReconfiguring Workcraft (Utility->Reconfigure) might fix this.\\n\\nIf you are running Workcraft from a development environment such as Eclipse,\\nplease make sure to add the plug-in classes to the classpath in run configuration. ", "Warning", JOptionPane.WARNING_MESSAGE)
None
} else {
val dialog = new CreateWorkDialog(models, parentWindow)
GUI.centerAndSizeToParent(dialog, parentWindow)
dialog.setVisible(true)
dialog.choice.map((_,dialog.chkOpen.isSelected()))
}
}
}
| tuura/workcraft-2.2 | Gui/src/main/scala/org/workcraft/gui/CreateWorkDialog.scala | Scala | gpl-3.0 | 5,242 |
package io.vamp.common.notification
import akka.actor.{ AbstractLoggingActor, Actor, Props }
object LoggingNotificationActor {
def props: Props = Props[LoggingNotificationActor]
}
case class Error(notification: Notification, message: String)
case class Info(notification: Notification, message: String)
trait NotificationActor {
this: Actor ⇒
override def receive: Receive = {
case Error(notification, message) ⇒ error(notification, message)
case Info(notification, message) ⇒ info(notification, message)
}
def error(notification: Notification, message: String)
def info(notification: Notification, message: String)
}
class LoggingNotificationActor extends AbstractLoggingActor with NotificationActor {
override def error(notification: Notification, message: String): Unit = {
log.error(message)
}
override def info(notification: Notification, message: String): Unit = {
log.info(message)
}
}
| dragoslav/vamp | common/src/main/scala/io/vamp/common/notification/NotificationActor.scala | Scala | apache-2.0 | 945 |
package org.pignat.bwatnwa
import processing.core.PApplet
class Embedded extends Tmp {
override def setup(): Unit = {
// original setup code here ...
size(800, 800)
// prevent thread from starving everything else
noLoop()
}
override def draw(): Unit = {
stroke(255)
if (mousePressed) {
line(mouseX, mouseY, pmouseX, pmouseY)
}
}
override def mousePressed(): Unit = {
// do something based on mouse movement
// update the screen (run draw once)
redraw()
}
} | dskntIndustry/WerkStatt | src/main/scala/org/pignat/bwatnwa/Embedded.scala | Scala | agpl-3.0 | 521 |
package org.bitcoins.rpc.client.v16
import org.bitcoins.core.currency.Bitcoins
import org.bitcoins.core.protocol.BitcoinAddress
import org.bitcoins.rpc.client.common.Client
import org.bitcoins.rpc.jsonmodels.ReceivedAccount
import org.bitcoins.rpc.serializers.JsonReaders._
import org.bitcoins.rpc.serializers.JsonSerializers._
import play.api.libs.json.{JsBoolean, JsNumber, JsString}
import scala.concurrent.Future
/**
* Bitcoin Core prior to version 0.17 had the concept of
* accounts. This has later been removed, and replaced
* with a label system, as well as functionality for
* having several distinct wallets active at the same time.
*/
trait V16AccountRpc { self: Client =>
def getAccountAddress(account: String): Future[BitcoinAddress] = {
bitcoindCall[BitcoinAddress]("getaccountaddress", List(JsString(account)))
}
def getReceivedByAccount(
account: String,
confirmations: Int = 1): Future[Bitcoins] = {
bitcoindCall[Bitcoins]("getreceivedbyaccount",
List(JsString(account), JsNumber(confirmations)))
}
def getAccount(address: BitcoinAddress): Future[String] = {
bitcoindCall[String]("getaccount", List(JsString(address.value)))
}
def getAddressesByAccount(account: String): Future[Vector[BitcoinAddress]] = {
bitcoindCall[Vector[BitcoinAddress]]("getaddressesbyaccount",
List(JsString(account)))
}
def listAccounts(
confirmations: Int = 1,
includeWatchOnly: Boolean = false): Future[Map[String, Bitcoins]] = {
bitcoindCall[Map[String, Bitcoins]](
"listaccounts",
List(JsNumber(confirmations), JsBoolean(includeWatchOnly)))
}
def setAccount(address: BitcoinAddress, account: String): Future[Unit] = {
bitcoindCall[Unit]("setaccount",
List(JsString(address.value), JsString(account)))
}
def listReceivedByAccount(
confirmations: Int = 1,
includeEmpty: Boolean = false,
includeWatchOnly: Boolean = false): Future[Vector[ReceivedAccount]] = {
bitcoindCall[Vector[ReceivedAccount]]("listreceivedbyaccount",
List(JsNumber(confirmations),
JsBoolean(includeEmpty),
JsBoolean(includeWatchOnly)))
}
}
| bitcoin-s/bitcoin-s-core | bitcoind-rpc/src/main/scala/org/bitcoins/rpc/client/v16/V16AccountRpc.scala | Scala | mit | 2,361 |
package scaffvis.client.components.common
import japgolly.scalajs.react.ReactElement
import japgolly.scalajs.react.vdom.prefix_<^._
/**
* Wrapper for Bootstrap provided Glyphicons (http://glyphicons.com/)
*/
object GlyphIcon {
type Icon = ReactElement
def base(name: String): Icon = <.span(^.className := s"glyphicon glyphicon-$name")
lazy val cog = base("cog")
lazy val check = base("check")
lazy val exclamationSign = base("exclamation-sign")
lazy val openFile = base("open-file")
lazy val questionSign = base("question-sign")
lazy val refresh = base("refresh")
lazy val remove = base("remove")
lazy val saveFile = base("save-file")
lazy val search = base("search")
lazy val thLarge = base("th-large")
lazy val thList = base("th-list")
lazy val unchecked = base("unchecked")
def checked_?(checked: Boolean) = if (checked) check else unchecked
}
| velkoborsky/scaffvis | client/src/main/scala/scaffvis/client/components/common/GlyphIcon.scala | Scala | gpl-3.0 | 885 |
package mr.merc.map.hex
import mr.merc.map.hex.Direction._
object Direction {
object N extends Direction("N")
object NE extends Direction("NE")
object SE extends Direction("SE")
object S extends Direction("S")
object SW extends Direction("SW")
object NW extends Direction("NW")
val list: List[Direction] = List(N, NE, SE, S, SW, NW)
type DirPair = (Direction, Direction)
def name(n:String):Direction = list.find(_.name.equalsIgnoreCase(n)).get
}
sealed abstract class Direction(val name:String) {
override def toString = name
def opposite = next.next.next
def next:Direction = {
if (this != NW) {
val index = Direction.list.indexOf(this)
Direction.list(index + 1)
} else {
N
}
}
def prev:Direction = {
if (this != N) {
val index = Direction.list.indexOf(this)
Direction.list(index - 1)
} else {
NW
}
}
def neighbours:Set[Direction] = {
val index = Direction.list.indexOf(this)
if (index == 0) {
Set(Direction.list(1), Direction.list.last)
} else if (index == Direction.list.size - 1) {
Set(Direction.list(0), Direction.list(Direction.list.size - 2))
} else {
Set(Direction.list(index - 1), Direction.list(index + 1))
}
}
def isNeighbour(dir:Direction): Boolean = {
neighbours.contains(dir)
}
} | RenualdMarch/merc | src/main/scala/mr/merc/map/hex/Direction.scala | Scala | gpl-3.0 | 1,434 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnetexamples.rnn
import org.apache.mxnet.Callback.Speedometer
import org.apache.mxnet._
import BucketIo.BucketSentenceIter
import org.apache.mxnet.optimizer.SGD
import org.kohsuke.args4j.{CmdLineParser, Option}
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.JavaConverters._
import org.apache.mxnet.module.BucketingModule
import org.apache.mxnet.module.FitParams
/**
* Bucketing LSTM examples
*/
class LstmBucketing {
@Option(name = "--data-train", usage = "training set")
private val dataTrain: String = "example/rnn/sherlockholmes.train.txt"
@Option(name = "--data-val", usage = "validation set")
private val dataVal: String = "example/rnn/sherlockholmes.valid.txt"
@Option(name = "--num-epoch", usage = "the number of training epoch")
private val numEpoch: Int = 5
@Option(name = "--gpus", usage = "the gpus will be used, e.g. '0,1,2,3'")
private val gpus: String = null
@Option(name = "--cpus", usage = "the cpus will be used, e.g. '0,1,2,3'")
private val cpus: String = null
@Option(name = "--save-model-path", usage = "the model saving path")
private val saveModelPath: String = "model/lstm"
}
object LstmBucketing {
private val logger: Logger = LoggerFactory.getLogger(classOf[LstmBucketing])
def perplexity(label: NDArray, pred: NDArray): Float = {
pred.waitToRead()
val labelArr = label.T.toArray.map(_.toInt)
var loss = .0
(0 until pred.shape(0)).foreach(i =>
loss -= Math.log(Math.max(1e-10f, pred.slice(i).toArray(labelArr(i))))
)
Math.exp(loss / labelArr.length).toFloat
}
def runTraining(trainData : String, validationData : String,
ctx : Array[Context], numEpoch : Int): Unit = {
NDArrayCollector.auto().withScope {
val batchSize = 32
val buckets = Array(10, 20, 30, 40, 50, 60)
val numHidden = 200
val numEmbed = 200
val numLstmLayer = 2
logger.info("Building vocab ...")
val vocab = BucketIo.defaultBuildVocab(trainData)
def BucketSymGen(key: AnyRef):
(Symbol, IndexedSeq[String], IndexedSeq[String]) = {
val seqLen = key.asInstanceOf[Int]
val sym = Lstm.lstmUnroll(numLstmLayer, seqLen, vocab.size,
numHidden = numHidden, numEmbed = numEmbed, numLabel = vocab.size)
(sym, IndexedSeq("data"), IndexedSeq("softmax_label"))
}
val initC = (0 until numLstmLayer).map(l =>
(s"l${l}_init_c_beta", (batchSize, numHidden))
)
val initH = (0 until numLstmLayer).map(l =>
(s"l${l}_init_h_beta", (batchSize, numHidden))
)
val initStates = initC ++ initH
val dataTrain = new BucketSentenceIter(trainData, vocab,
buckets, batchSize, initStates)
val dataVal = new BucketSentenceIter(validationData, vocab,
buckets, batchSize, initStates)
val model = new BucketingModule(
symGen = BucketSymGen,
defaultBucketKey = dataTrain.defaultBucketKey,
contexts = ctx)
val fitParams = new FitParams()
fitParams.setEvalMetric(
new CustomMetric(perplexity, name = "perplexity"))
fitParams.setKVStore("device")
fitParams.setOptimizer(
new SGD(learningRate = 0.01f, momentum = 0f, wd = 0.00001f))
fitParams.setInitializer(new Xavier(factorType = "in", magnitude = 2.34f))
fitParams.setBatchEndCallback(new Speedometer(batchSize, 50))
logger.info("Start training ...")
model.fit(
trainData = dataTrain,
evalData = Some(dataVal),
numEpoch = numEpoch, fitParams)
logger.info("Finished training...")
}
}
def main(args: Array[String]): Unit = {
val inst = new LstmBucketing
val parser: CmdLineParser = new CmdLineParser(inst)
try {
parser.parseArgument(args.toList.asJava)
val contexts =
if (inst.gpus != null) inst.gpus.split(',').map(id => Context.gpu(id.trim.toInt))
else if (inst.cpus != null) inst.cpus.split(',').map(id => Context.cpu(id.trim.toInt))
else Array(Context.cpu(0))
runTraining(inst.dataTrain, inst.dataVal, contexts, 5)
} catch {
case ex: Exception =>
logger.error(ex.getMessage, ex)
parser.printUsage(System.err)
sys.exit(1)
}
}
}
| tlby/mxnet | scala-package/examples/src/main/scala/org/apache/mxnetexamples/rnn/LstmBucketing.scala | Scala | apache-2.0 | 5,070 |
package com.wixpress.petri.petri
import org.joda.time.DateTime
import org.specs2.matcher.{Matcher, Scope}
import org.specs2.mutable.SpecificationWithJUnit
import scala.collection.JavaConversions._
/**
* User: Dalias
* Date: 12/10/14
* Time: 4:45 PM
*/
class JdbcMetricsReportsDaoIT extends SpecificationWithJUnit {
sequential
val JDBC_H2_IN_MEM_CONNECTION_STRING: String = "jdbc:h2:mem:test"
trait Context extends Scope{
val dbDriver = DBDriver.dbDriver(JDBC_H2_IN_MEM_CONNECTION_STRING)
val metricsReportDao = new JdbcMetricsReportsDao (dbDriver.jdbcTemplate, 0l)
dbDriver.createSchema()
val simpleExperimentReport: ConductExperimentReport = new ConductExperimentReport("myServer", 11, "the value", 3l)
val anotherSimpleExperimentReport: ConductExperimentReport = new ConductExperimentReport("myServer", 12, "the value", 5l)
}
"Metrics Reports Dao " should {
"get single experiment conduction when only 1 experiment reported" in new Context {
metricsReportDao.addReports(List(simpleExperimentReport))
val experimentConduction = metricsReportDao.getReportedExperimentsSince(scheduledInterval = 500000l).last
experimentConduction.experimentId must beEqualTo(simpleExperimentReport.experimentId)
experimentConduction.totalConduction must beEqualTo(simpleExperimentReport.count)
}
"get multiple experiment conduction when some experiments reported" in new Context {
metricsReportDao.addReports(List(simpleExperimentReport, anotherSimpleExperimentReport))
metricsReportDao.addReports(List(anotherSimpleExperimentReport))
val experimentConductionList = metricsReportDao.getReportedExperimentsSince(scheduledInterval = 500000l)
val simpleExperimentConductTotal = experimentConductionList.filter(exp => exp.experimentId == simpleExperimentReport.experimentId)
val anotherSimpleExperimentTotal = experimentConductionList.filter(exp => exp.experimentId == anotherSimpleExperimentReport.experimentId)
simpleExperimentConductTotal.size must beEqualTo(1)
anotherSimpleExperimentTotal.size must beEqualTo(1)
simpleExperimentConductTotal.last.totalConduction must beEqualTo(simpleExperimentReport.count)
anotherSimpleExperimentTotal.last.totalConduction must beEqualTo(anotherSimpleExperimentReport.count * 2)
}
"get experimentId of experiments that were conducted after given date" in new Context {
metricsReportDao.addReports(List(simpleExperimentReport, anotherSimpleExperimentReport))
val givenDate = new DateTime()
print(givenDate)
metricsReportDao.addReports(List(anotherSimpleExperimentReport, anotherSimpleExperimentReport.copy(serverName = "anotherServer")))
metricsReportDao.getExperimentIdsLastConductedAfterGivenDate(givenDate).toList must contain(exactly(anotherSimpleExperimentReport.experimentId.asInstanceOf[java.lang.Integer] ))
}
"get empty list when no experiments were reported" in new Context {
metricsReportDao.addReports(List(simpleExperimentReport))
metricsReportDao.getReportedExperimentsSince(scheduledInterval = 0) must beEmpty
}
"Create a single report record successfully" in new Context {
metricsReportDao.addReports(List(simpleExperimentReport))
metricsReportDao.getReport(simpleExperimentReport.experimentId).toList must contain(aConductExperimentSummary(simpleExperimentReport))
}
"Update a single report record successfully" in new Context {
metricsReportDao.addReports(List(simpleExperimentReport))
metricsReportDao.addReports(List(simpleExperimentReport.copy(count = 5l)))
metricsReportDao.getReport(simpleExperimentReport.experimentId).toList must contain(aConductExperimentSummary(simpleExperimentReport, 8l , 5l))
}
"Create a couple of report records successfully" in new Context {
private val reportWithBValue: ConductExperimentReport = simpleExperimentReport.copy(experimentValue = "b value", count = 2l)
metricsReportDao.addReports(List(simpleExperimentReport, reportWithBValue))
metricsReportDao.getReport(simpleExperimentReport.experimentId).toList must contain(aConductExperimentSummary(simpleExperimentReport), aConductExperimentSummary(reportWithBValue))
}
"Update a couple of report records successfully" in new Context {
private val reportWithBValue: ConductExperimentReport = simpleExperimentReport.copy(experimentValue = "b value", count = 2l)
metricsReportDao.addReports(List(simpleExperimentReport, reportWithBValue))
//update both reports
metricsReportDao.addReports(List(simpleExperimentReport.copy(count = 7l), reportWithBValue.copy(count = 9l)))
metricsReportDao.getReport(simpleExperimentReport.experimentId).toList must contain(aConductExperimentSummary(simpleExperimentReport, 10l, 7l), aConductExperimentSummary(reportWithBValue, 11l, 9l))
}
}
private def aConductExperimentSummary(report : ConductExperimentReport) : Matcher[ConductExperimentSummary] =
aConductExperimentSummary(report, report.count, report.count)
private def aConductExperimentSummary(report : ConductExperimentReport, totalCount: Long, fiveMinuteCount : Long) : Matcher[ConductExperimentSummary] =
{(summary : ConductExperimentSummary) => summary.experimentId } ^^ beEqualTo(report.experimentId) and
{(summary : ConductExperimentSummary) => summary.experimentValue } ^^ beEqualTo(report.experimentValue) and
{(summary : ConductExperimentSummary) => summary.totalCount } ^^ beEqualTo(totalCount) and
{(summary : ConductExperimentSummary) => summary.fiveMinuteCount } ^^ beEqualTo(fiveMinuteCount) and
{(summary : ConductExperimentSummary) => summary.lastUpdated.getMillis } ^^ beBetween(new DateTime().minusMinutes(1).getMillis, new DateTime().plusMinutes(1).getMillis)
}
| wix/petri | petri-server-core/src/it/java/com/wixpress/petri/petri/JdbcMetricsReportsDaoIT.scala | Scala | bsd-3-clause | 5,870 |
package skutek_examples.sat_solver
import skutek.abstraction._
import skutek.std_effects._
import Solve.Fx
/*
* Ported from OCaml code:
* https://gist.github.com/Drup/4dc772ff82940608834fc65e3b80f583
* The control flow mechanism by (ab)use of exceptions from the original, is replaced by Choice effect
*/
object Solve {
case object Fx extends Choice
type Fx = Fx.type
def apply(formula: List[List[Literal]]): Set[Literal] !! Fx = Env(Set(), formula).unsat
}
case class Literal(name: String, yes: Boolean) {
def unary_~ = copy(yes = !yes)
}
private case class Env(solution: Set[Literal], formula: List[List[Literal]]) {
def assume(l: Literal): Env !! Fx =
if (solution.contains(l))
Return(this)
else
copy(solution = solution + l).bcp
def bcp = formula.foldLeft_!!(copy(formula = Nil))((env, ls) => env.bcpAux(ls))
def bcpAux(ls: List[Literal]): Env !! Fx =
if (ls.exists(l => solution.contains(l)))
Return(this)
else
ls.filter(l => !solution.contains(~l)) match {
case List() => Fx.NoChoice
case List(l) => assume(l)
case ls2 => Return(copy(formula = ls2 :: formula))
}
def unsat: Set[Literal] !! Fx =
formula match {
case Nil => Return(solution)
case (l :: _ ) :: _ => for {
l2 <- Fx.from(l, ~l)
env <- assume(l2)
result <- env.unsat
} yield result
case _ => ???
}
}
| marcinzh/skutek | modules/examples/src/main/scala/skutek_examples/sat_solver/Solve.scala | Scala | mit | 1,427 |
package bootstrap.liftweb
import code.lib.Bootstrap
import code.model._
import code.snippet.{Spell, SpellInfo}
import net.liftweb.common._
import net.liftweb.http._
import net.liftweb.mapper._
import net.liftweb.sitemap.Loc._
import net.liftweb.sitemap._
import net.liftweb.util._
/**
* A class that's instantiated early and run. It allows the application
* to modify lift's environment
*/
class Boot {
def boot() {
configurePersistence()
LiftRules.setSiteMapFunc(() => sitemap)
configureProjectPackages()
configureAjax()
configureEncodings()
LiftRules.noticesEffects.default.set(Bootstrap.Alerts.getNoticeEffects)
}
def userLinkText = User.currentUser.map(_.shortName).openOr("not logged in")
def sitemap = SiteMap(
Menu.i("Index") / "index" >> LocGroup("lift-internals"),
Menu.i("Apps") / "apps" / "#" >> LocGroup("main") >> PlaceHolder submenus (
Menu.i("Chat") / "apps"/ "chat"),
Menu.i("Forms") / "forms" / "#" >> LocGroup("main") >> PlaceHolder submenus (
Menu.i("Dumb") / "forms" / "dumb",
Menu.i("OnSubmit") / "forms" / "onSubmit",
Menu.i("Stateful") / "forms" / "stateful",
Menu.i("RequestVars") / "forms" / "reqvar",
Menu.i("Screen") / "forms" / "screen"),
Menu.i("Search") / "spellbook" / "search" >> LocGroup("main"),
Menu.i("Browse") / "spellbook" / "browse" >> LocGroup("main"),
Menu.param[SpellInfo]("Spell", "Spell", s => Spell.getSpellById(s), p => p.spellId) / "spellbook" / "spell" >> Hidden,
User.loginMenuLoc.openOrThrowException("User Module Login Menu Error"),
User.createUserMenuLoc.openOrThrowException("User Module Create Menu Error"),
Menu("user", userLinkText) / "#" >> LocGroup("user") >> PlaceHolder submenus (
User.logoutMenuLoc.openOrThrowException("User Module Logout Menu Error"),
User.editUserMenuLoc.openOrThrowException("User Module Edit Menu Error"),
User.changePasswordMenuLoc.openOrThrowException("User Module PWD Menu Error"))
)
def configurePersistence(): Unit = {
if (!DB.jndiJdbcConnAvailable_?) {
val vendor = new StandardDBVendor(
Props.get("db.driver") openOr "org.h2.Driver",
Props.get("db.url") openOr "jdbc:h2:lift_proto.db;AUTO_SERVER=TRUE",
Props.get("db.user"),
Props.get("db.password")
)
LiftRules.unloadHooks.append(vendor.closeAllConnections_!)
DB.defineConnectionManager(DefaultConnectionIdentifier, vendor)
}
Schemifier.schemify(true, Schemifier.infoF _, User)
}
def configureProjectPackages(): Unit = {
LiftRules.addToPackages("code")
LiftRules.addToPackages("forms")
}
def configureAjax(): Unit = {
LiftRules.ajaxStart = Full(() => LiftRules.jsArtifacts.show("ajax-loader").cmd)
LiftRules.ajaxEnd = Full(() => LiftRules.jsArtifacts.hide("ajax-loader").cmd)
}
def configureEncodings(): Unit = {
LiftRules.early.append(_.setCharacterEncoding("UTF-8"))
LiftRules.htmlProperties.default.set((r: Req) => new Html5Properties(r.userAgent))
}
}
| jbaiera/lift-play | src/main/scala/bootstrap/liftweb/Boot.scala | Scala | mit | 3,048 |
package com.github.andr83.parsek.pipe
import com.github.andr83.parsek._
import org.scalatest.{FlatSpec, Matchers}
/**
* @author andr83
*/
class CoalesceSpec extends FlatSpec with Matchers {
implicit val context = new PipeContext()
it should "return first non empty value and update with it first field" in {
val pipe = CoalescePipe(fields = Seq("f1".asFieldPath, "f2".asFieldPath))
val value = PMap("f2" -> PString("v2"))
val result = pipe.run(value)
result shouldBe Some(PMap(
"f1" -> "v2",
"f2" -> "v2"
))
}
}
| andr83/parsek | core/src/test/scala/com/github/andr83/parsek/pipe/CoalesceSpec.scala | Scala | mit | 559 |
package util.tasks
/**
* Abstract timed task
* @param time time when task should run (0 to run immediately)
* @param priority task priorityColumn [0,maxInt]
* Created by CAB on 13.10.2014.
*/
abstract class TimedTask (val time:Long, val priority:Int) {
def execute():Unit}
| AlexCAB/FreelanceAnalytics | src/util/tasks/TimedTask.scala | Scala | mit | 282 |
package dc
import java.text.SimpleDateFormat
import java.util.Date
import scala.collection.SortedMap
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import dc.json.Checkpoint
import dc.json.Measurement
import dc.serverapi.CloudantApi
object HandleCheckpoints {
var continue: Boolean = true
abstract class MenuOption(text: String) {
override def toString = text
def handle()
}
case object MenuSetDatabase extends MenuOption("Set database") {
override def handle() {
closeFiles()
println("Cloudant database: ")
val cloudantName = readLine
println("Database name: ")
currentCloudantApi = Some(CloudantApi(cloudantName, readLine))
currentCloudantApi match {
case Some(cloudantApi) => {
currentCheckpointFile = Some(getFileWriteStream(getFileName(cloudantApi.cloudantName, cloudantApi.databaseName, "checkpoints")))
currentMeasurementFile = Some(getFileWriteStream(getFileName(cloudantApi.cloudantName, cloudantApi.databaseName, "measurements")))
}
case None => { println("Not a valid server") }
}
}
}
case object MenuListCheckpoints extends MenuOption("List all checkpoints") {
override def handle() {
println("handleCheckpoints")
currentCloudantApi match {
case Some(cloudantApi) => {
val checkpointStreamFuture = cloudantApi.checkpointSequence()
val iterator = Await.result(checkpointStreamFuture, Duration.Inf).iterator
while (iterator.hasNext) {
println(iterator.next())
}
}
case None => {}
}
}
}
case object MenuListMeasurements extends MenuOption("List measurements by checkpoint") {
override def handle() {
println("Checkpoint id: ")
val checkpointId = readLine
currentCloudantApi match {
case Some(cloudantApi) => {
val measurementStreamFuture = cloudantApi.measurementSequence(checkpointId)
val iterator = Await.result(measurementStreamFuture, Duration.Inf).iterator
while (iterator.hasNext) {
println(iterator.next())
}
}
case None => {}
}
}
}
case object MenuPrintAllToFile extends MenuOption("Print everything to file") {
override def handle() {
println("handlePrintAllToFile")
currentCloudantApi match {
case Some(cloudantApi) => {
val checkpointFile = currentCheckpointFile.getOrElse(System.out)
val measurementFile = currentMeasurementFile.getOrElse(System.out)
val checkpointStreamFuture = cloudantApi.checkpointSequence()
checkpointFile.println(Checkpoint.keysShort(SEPERATOR))
measurementFile.println(Measurement.keysShort(SEPERATOR))
val checkpointIterator = Await.result(checkpointStreamFuture, Duration.Inf).iterator
while (checkpointIterator.hasNext) {
val checkpoint: Checkpoint = checkpointIterator.next()
checkpointFile.println(checkpoint.valuesShort(SEPERATOR))
val measurementStreamFuture = cloudantApi.measurementSequence(checkpoint._id)
val measurementIterator = Await.result(measurementStreamFuture, Duration.Inf).iterator
while (measurementIterator.hasNext) {
val measurement = measurementIterator.next()
measurementFile.println(measurement.valuesShort(SEPERATOR, checkpoint.checkpoint_name))
}
}
}
case None => {}
}
}
}
case object MenuExit extends MenuOption("Exit") {
override def handle() {
closeFiles()
HandleCheckpoints.continue = false
}
}
private val SEPERATOR = ","
private val menuOptions: SortedMap[Int, MenuOption] = SortedMap(
1 -> MenuSetDatabase,
2 -> MenuListCheckpoints,
3 -> MenuListMeasurements,
4 -> MenuPrintAllToFile,
5 -> MenuExit)
private var currentCloudantApi: Option[CloudantApi] = None
private var currentCheckpointFile: Option[java.io.PrintStream] = None
private var currentMeasurementFile: Option[java.io.PrintStream] = None
def main(args: Array[String]) {
try {
while (continue) {
println()
printMenu()
handleInput()
}
} catch {
case e: Exception => {
println(e.getMessage())
println()
e.printStackTrace()
}
}
//This is needed to terminate the program
CloudantApi.shutdown
}
private def getFileWriteStream(filename: String): java.io.PrintStream = {
new java.io.PrintStream(new java.io.FileOutputStream(filename))
}
private def getFileName(server: String, database: String, typeOfData: String): String = {
val sdf = new SimpleDateFormat("MM_dd");
val dateObj = new Date()
val dateString = sdf.format(dateObj);
server + "_" + database + "_" + typeOfData + dateString + ".csv"
}
private def closeFiles() {
currentCheckpointFile match {
case Some(file) => {
file.close()
currentCheckpointFile = None
}
case None => {}
}
currentMeasurementFile match {
case Some(file) => {
file.close()
currentMeasurementFile = None
}
case None => {}
}
}
def handleInput() {
val pos = readInt
if (pos < 1 || pos > menuOptions.size)
return
menuOptions(pos).handle()
}
private def printMenu() {
println("")
println("Options")
println("")
menuOptions.foreach{ case (nr, menuOption) => println(nr + ": " + menuOption.toString) }
}
}
| MagnusAk78/dynamic-checklist-server | tools/src/main/scala/dc/HandleCheckpoints.scala | Scala | gpl-3.0 | 5,614 |
package cilib
package exec
import zio.prelude._
abstract class MonadStep[M[+_]: IdentityFlatten: Covariant] {
def liftR[A](r: RVar[A]): M[A]
}
| cirg-up/cilib | exec/src/main/scala/cilib/MonadStep.scala | Scala | apache-2.0 | 147 |
package club.diybio.bank
import utest._
object DummyFrontendTest extends TestSuite {
val tests = TestSuite {
"this is simple frontend dummy test" - {
assert(true)
}
}
}
| denigma/plasmid-bank | frontend/src/test/scala/club/diybio/bank/DummyFrontendTest.scala | Scala | mpl-2.0 | 189 |
package io.swagger.client.model
import io.swagger.client.core.ApiModel
import org.joda.time.DateTime
case class VariableUserSettings (
/* User ID */
user: Int,
/* Variable DISPLAY name */
variable: String,
/* Estimated duration of time following the onset delay in which a stimulus produces a perceivable effect */
durationOfAction: Int,
/* fillingValue */
fillingValue: Int,
/* joinWith */
joinWith: String,
/* maximumValue */
maximumValue: Float,
/* minimumValue */
minimumValue: Float,
/* name */
name: String,
/* onsetDelay */
onsetDelay: Int,
/* unit */
unit: String)
extends ApiModel
| QuantiModo/QuantiModo-SDK-Akka-Scala | src/main/scala/io/swagger/client/model/VariableUserSettings.scala | Scala | gpl-2.0 | 638 |
package org.lolhens.renderengine.model
import org.lolhens.renderengine.vector.Vector3f
/**
* Created by LolHens on 05.10.2014.
*/
class BoundingBox(val min: Vector3f, val max: Vector3f) {
def contain(cube: BoundingBox): BoundingBox = new BoundingBox(min.min(cube.min), max.max(cube.max))
}
object NullBoundingBox extends BoundingBox(Vector3f.Zero, Vector3f.Zero) {}
| LolHens/LibRenderEngine | src/main/scala/org/lolhens/renderengine/model/BoundingBox.scala | Scala | gpl-2.0 | 375 |
/*
* Copyright 2014 porter <https://github.com/eikek/porter>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package porter.model
@SerialVersionUID(20131121)
final case class DefaultPermission(parts: Parts) extends Permission {
def implies(other: Permission) = other match {
case dp: DefaultPermission =>
val compareParts = (parts zip dp.parts) forall {
case (mp, op) => implies(mp, op)
}
if (!compareParts || parts.length <= dp.parts.length) compareParts && !parts.isEmpty
else parts.drop(dp.parts.length) forall (_ == Set("*"))
case _ => false
}
private def implies(a: Set[String], b: Set[String]): Boolean = {
a == Set("*") || b.subsetOf(a)
}
override def toString = parts.map(gl => gl.mkString(",")).mkString(":")
}
object DefaultPermission {
val factory: PermissionFactory = {
case str => DefaultPermission(str)
}
def apply(str: String): DefaultPermission = {
new DefaultPermission(split(str))
}
def split(str: String): Parts = {
def splitter(chars: List[Char], sep: Char): List[String] =
chars.foldRight(List("")) { (c, list) =>
if (c == sep) "" :: list
else (list.head + c) :: list.tail
}
val parts = splitter(str.toList, ':')
if (parts.exists(_.isEmpty)) {
throw new IllegalArgumentException(s"Invalid permission string: '$str'")
}
parts map { s =>
splitter(s.toList, ',').filter(_.nonEmpty).toSet
}
}
}
| eikek/porter | api/src/main/scala/porter/model/DefaultPermission.scala | Scala | apache-2.0 | 1,975 |
/*
* Copyright 2012-2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.edda
import org.slf4j.LoggerFactory
object CollectionManager {
private[this] val logger = LoggerFactory.getLogger(getClass)
var collections: Map[String, Queryable] = Map()
def register(name: String, collection: Queryable) {
if (logger.isInfoEnabled) logger.info("Registering collection " + collection)
collections = collections + (name -> collection)
}
def get(name: String): Option[Queryable] = {
collections.get(name)
}
def names(): Set[String] = collections.keySet
def start()(implicit req: RequestId) {
if (logger.isInfoEnabled) logger.info(s"$req Starting collections")
collections.values.foreach(_.start())
}
def stop()(implicit req: RequestId) {
if (logger.isInfoEnabled) logger.info(s"$req Stopping collections")
collections.values.foreach(_.stop())
}
}
| wstrucke/edda | src/main/scala/com/netflix/edda/CollectionManager.scala | Scala | apache-2.0 | 1,441 |
package net.fwbrasil.activate.json
import net.fwbrasil.activate.ActivateContext
import net.fwbrasil.activate.entity.BaseEntity
trait JsonContext[J] {
val context: ActivateContext
def createEntityFromJson[E <: BaseEntity: Manifest](json: String): E
def createEntityFromJson[E <: BaseEntity: Manifest](json: J): E
def updateEntityFromJson[E <: BaseEntity: Manifest](json: String, id: E#ID): E
def updateEntityFromJson[E <: BaseEntity: Manifest](json: J, id: E#ID): E
def updateEntityFromJson[E <: BaseEntity: Manifest](json: String, entity: E): E
def updateEntityFromJson[E <: BaseEntity: Manifest](json: J, entity: E): E
def updateEntityFromJson[E <: BaseEntity: Manifest](json: String): E
def updateEntityFromJson[E <: BaseEntity: Manifest](json: J): E
def createOrUpdateEntityFromJson[E <: BaseEntity: Manifest](json: String): E
def createOrUpdateEntityFromJson[E <: BaseEntity: Manifest](json: J): E
def createJsonStringFromEntity[E <: BaseEntity: Manifest](entity: E, depth: Int = 0, excludeFields: List[String] = List(), includeFields: List[String] = List()): String
def createJsonFromEntity[E <: BaseEntity: Manifest](entity: E, depth: Int = 0, excludeFields: List[String] = List(), includeFields: List[String] = List()): J
def fullDepth = Int.MaxValue
implicit class EntityJsonMethods[E <: BaseEntity: Manifest](val entity: E) {
def updateFromJson(json: J): E =
JsonContext.this.updateEntityFromJson(json, entity)
def updateFromJson(json: String): E =
JsonContext.this.updateEntityFromJson(json, entity)
def toJsonString: String = toJsonString()
def toJsonString(depth: Int = 0, excludeFields: List[String] = List(), includeFields: List[String] = List()): String =
createJsonStringFromEntity(entity, depth, excludeFields, includeFields)
def toJson: J = toJson()
def toJson(depth: Int = 0, excludeFields: List[String] = List(), includeFields: List[String] = List()): J =
createJsonFromEntity(entity, depth, excludeFields, includeFields)
}
}
| avramirez/activate | activate-core/src/main/scala/net/fwbrasil/activate/json/JsonContext.scala | Scala | lgpl-2.1 | 2,120 |
/*
* Copyright (C) 2016 Department for Business, Energy and Industrial Strategy
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package services
import eu.timepit.refined.auto._
import models.ApplicationFormId
import org.scalatest.{Matchers, WordSpecLike}
class ApplicationFormURLsTest extends WordSpecLike with Matchers {
"ApplicationFormURLsTest" should {
val urls = new ApplicationFormURLs("")
val id = ApplicationFormId(1L)
"generate correct url for applicationForm" in {
urls.applicationForm(id) shouldBe "/application_form/1"
}
"generate correct url for application" in {
urls.application(id) shouldBe "/application_form/1/application"
}
}
}
| UKGovernmentBEIS/rifs-frontend-play | src/test/scala/services/ApplicationFormURLsTest.scala | Scala | gpl-3.0 | 1,301 |
package edu.gemini.pit.catalog
import edu.gemini.model.p1.{immutable => I}
import java.net.URL
import java.net.URLEncoder.{encode => urlencode}
import edu.gemini.spModel.core._
import votable._
import java.util.UUID
object Simbad extends Catalog with App {
private lazy val hosts = Array("simbad.u-strasbg.fr", "simbak.cfa.harvard.edu")
private lazy val simbad = hosts.map(apply).reduceLeft(_ || _)
def find(id:String)(callback:Result => Unit) {
simbad.find(id)(callback)
}
def apply(host:String):Catalog = new Simbad(host)
find("sirius") {
case Success(t, cs) => println((t, cs))
case x => println(x)
}
}
class Simbad private (val host:String) extends VOTableCatalog {
def url(id:String) = new URL(s"http://$host/simbad/sim-id?output.format=VOTABLE&Ident=${urlencode(id, "UTF-8")}")
def decode(vot:VOTable):Seq[I.Target] = for {
// In the List monad here, eventually iterating rows
resource <- vot.resources
table @ Table("simbad", _, _, _) <- resource.tables
row <- table.data.tableData.rows
kvs = table.fields.zip(row)
// Local find function
str = (s:String) => kvs.find(_._1.ucd.exists(_.toLowerCase == s.toLowerCase)).map(_._2)
num = (s:String) => str(s).flatMap(_.toDoubleOption)
// Find magnitudes checking ucd and field name
magStr = (b: String, s:String) => kvs.find(i => i._1.ucd.exists(_.toLowerCase == s.toLowerCase) && i._1.id.endsWith(b)).map(_._2)
magNum = (b: MagnitudeBand, s:String) => magStr(b.name, s).flatMap(_.toDoubleOption)
// Switch to Option here to pull out data
epoch <- vot.definitions.map(_.cooSys.epoch).map {
case "J2000" => I.CoordinatesEpoch.J_2000
case s => I.CoordinatesEpoch.forName(s)
}
name <- str("meta.id;meta.main")
ra <- num("pos.eq.ra;meta.main").map(d => RightAscension.fromAngle(Angle.fromDegrees(d)))
dec <- num("pos.eq.dec;meta.main").flatMap(d => Declination.fromAngle(Angle.fromDegrees(d)))
// Mags get pulled out into a list
mags = for {
(k, Some(v)) <- Map(
MagnitudeBand._u -> magNum(MagnitudeBand._u, "phot.mag;em.opt.U"),
MagnitudeBand._g -> magNum(MagnitudeBand._g, "phot.mag;em.opt.B"),
MagnitudeBand._r -> magNum(MagnitudeBand._r, "phot.mag;em.opt.R"),
MagnitudeBand._i -> magNum(MagnitudeBand._i, "phot.mag;em.opt.I"),
MagnitudeBand._z -> magNum(MagnitudeBand._z, "phot.mag;em.opt.I"),
MagnitudeBand.U -> magNum(MagnitudeBand.U, "phot.mag;em.opt.U"),
MagnitudeBand.V -> magNum(MagnitudeBand.V, "phot.mag;em.opt.V"),
MagnitudeBand.B -> magNum(MagnitudeBand.B, "phot.mag;em.opt.B"),
MagnitudeBand.R -> magNum(MagnitudeBand.R, "phot.mag;em.opt.R"),
MagnitudeBand.J -> magNum(MagnitudeBand.J, "phot.mag;em.ir.J"),
MagnitudeBand.H -> magNum(MagnitudeBand.H, "phot.mag;em.ir.H"),
MagnitudeBand.K -> magNum(MagnitudeBand.K, "phot.mag;em.ir.K"))
} yield new Magnitude(v, k, k.defaultSystem)
// Proper Motion
pm = for {
dRa <- num("pos.pm;pos.eq.ra")
dDec <- num("pos.pm;pos.eq.dec")
} yield I.ProperMotion(dRa, dDec) // TODO: are these correct?
} yield I.SiderealTarget(UUID.randomUUID(), cleanName(name), Coordinates(ra, dec), epoch, pm, mags.toList)
private def cleanName(s:String) = if (s.startsWith("NAME ")) s.substring(5) else s
}
| arturog8m/ocs | bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/catalog/Simbad.scala | Scala | bsd-3-clause | 4,489 |
/*
* # Highly divisible triangular number
* ## Problem 12
* The sequence of triangle numbers is generated by adding the natural numbers. So the 7th triangle number would be 1 + 2 + 3 + 4 + 5 + 6 + 7 = 28. The first ten terms would be:
* > 1, 3, 6, 10, 15, 21, 28, 36, 45, 55, ...
* Let us list the factors of the first seven triangle numbers:
* 1: 1
* 3: 1,3
* 6: 1,2,3,6
* 10: 1,2,5,10
* 15: 1,3,5,15
* 21: 1,3,7,21
* 28: 1,2,4,7,14,28
* We can see that 28 is the first triangle number to have over five divisors.
*
* What is the value of the first triangle number to have over five hundred divisors?
*/
import scala.math.sqrt
object Euler12 {
def triangles(i: Int): Int = (i * (i+1))/2
def divisorCount(n: Int): Int = {
(1 to sqrt(n).toInt).count(n % _ == 0) * 2
}
def main(args: Array[String]) {
println(Iterator.from(1).map(triangles).find(divisorCount(_) > 500))
}
}
| NotBobTheBuilder/ProjectEuler | scala/p12.scala | Scala | mit | 934 |
/**
* CSPFJ - CSP solving API for Java
* Copyright (C) 2006 Julien VION
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
package concrete.constraint.linear
;
import bitvectors.BitVector
import concrete._
import concrete.constraint.Constraint
final class LeC(val v: Variable, val constant: Int) extends Constraint(Array(v)) {
def init(ps: ProblemState): ProblemState = ps
def check(t: Array[Int]): Boolean = t(0) <= constant
def advise(ps: ProblemState, event: Event, p: Int) = 1
override def consistent(ps: ProblemState, mod: Iterable[Int]): Outcome = if (ps.dom(v).head <= constant) ps else Contradiction(scope)
def revise(ps: ProblemState, mod: BitVector): Outcome = ps.removeAfter(v, constant).entail(this)
def simpleEvaluation = 1
override def toString(ps: ProblemState) = s"${v.toString(ps)} <= $constant"
}
final class LtC(val v: Variable, var constant: Int) extends Constraint(Array(v)) {
def init(ps: ProblemState): ProblemState = ps
def check(t: Array[Int]): Boolean = t(0) < constant
def advise(ps: ProblemState, event: Event, p: Int) = 1
def revise(ps: ProblemState, mod: BitVector): Outcome = ps.removeFrom(v, constant) //.entail(this)
def simpleEvaluation = 1
override def consistent(ps: ProblemState, mod: Iterable[Int]): Outcome = if (ps.dom(v).head < constant) ps else Contradiction(scope)
override def toString(ps: ProblemState) = s"${v.toString(ps)} < $constant"
}
final class GeC(val v: Variable, val constant: Int) extends Constraint(Array(v)) {
def init(ps: ProblemState): ProblemState = ps
def check(t: Array[Int]): Boolean = t(0) >= constant
def advise(ps: ProblemState, event: Event, p: Int) = 1
def revise(ps: ProblemState, mod: BitVector): Outcome = ps.removeUntil(v, constant).entail(this)
def simpleEvaluation = 1
override def consistent(ps: ProblemState, mod: Iterable[Int]): Outcome = if (ps.dom(v).last >= constant) ps else Contradiction(scope)
override def toString(ps: ProblemState) = s"${v.toString(ps)} >= $constant"
}
final class GtC(val v: Variable, var constant: Int) extends Constraint(Array(v)) {
def init(ps: ProblemState): ProblemState = ps
def check(t: Array[Int]): Boolean = t(0) > constant
def advise(ps: ProblemState, event: Event, p: Int) = 1
def revise(ps: ProblemState, mod: BitVector): Outcome = ps.removeTo(v, constant) //.entail(this)
def simpleEvaluation = 1
override def consistent(ps: ProblemState, mod: Iterable[Int]): Outcome = if (ps.dom(v).last > constant) ps else Contradiction(scope)
override def toString(ps: ProblemState) = s"${v.toString(ps)} > $constant"
}
/**
* Constraint v0 + constant >(=) v1
*/
final class Gt(val v0: Variable, val constant: Int, val v1: Variable, val strict: Boolean)
extends Constraint(Array(v0, v1)) {
val simpleEvaluation = 1
def init(ps: ProblemState): ProblemState = {
ps
}
def this(v0: Variable, v1: Variable, strict: Boolean) =
this(v0, 0, v1, strict)
override def check(t: Array[Int]): Boolean = {
if (strict) {
t(0) + constant > t(1)
} else {
t(0) + constant >= t(1)
}
}
def revise(ps: ProblemState, mod: BitVector): Outcome = {
if (strict) {
ps
.removeTo(v0, ps.dom(v1).head - constant)
.removeFrom(v1, ps.dom(v0).last + constant)
.entailIf(this, mod =>
mod.dom(v1).last < mod.dom(v0).head + constant)
} else {
ps
.removeUntil(v0, ps.dom(v1).head - constant)
.removeAfter(v1, ps.dom(v0).last + constant)
.entailIf(this, mod =>
mod.dom(v1).last <= mod.dom(v0).head + constant)
}
}
override def consistent(ps: ProblemState, mod: Iterable[Int]): Outcome = {
val max0 = ps.dom(v0).last + constant
val min1 = ps.dom(v1).head
if (max0 > min1 || !strict && max0 == min1) ps else Contradiction(scope)
}
override def toString(ps: ProblemState) =
s"${v0.toString(ps)} ${
if (constant > 0) {
" + " + constant
} else if (constant < 0) {
" - " + (-constant)
} else {
""
}
} ${if (strict) " > " else " >= "} ${v1.toString(ps)}"
def advise(ps: ProblemState, event: Event, p: Int): Int = if (event <= BoundRemoval) 2 else -1
}
| concrete-cp/concrete | src/main/scala/concrete/constraint/linear/Gt.scala | Scala | lgpl-2.1 | 4,944 |
package com.twitter.finagle
import com.twitter.io.Buf
package object decoder {
/**
* A `Decoder` performs protocol decoding. As `Buf`s arrive on the wire, a
* decoder accumulates them until completed messages arrive. The return value
* is an ordered sequence of any completed messages as a result of accumulating
* the additional Buf. If no complete messages are present, an empty collection is
* returned.
* Stateful implementations should be expected.
*/
private[finagle] type Decoder[T] = (Buf => IndexedSeq[T])
/**
* A `Framer` performs protocol framing. As `Buf`s arrive on the wire, a
* framer accumulates them until completed frames arrive. The return value
* is an ordered sequence of any completed frames as a result of accumulating
* the additional Buf. If no complete frames are present, an empty collection is
* returned.
* Stateful implementations should be expected.
*
* @see [[LengthFieldFramer]] as an example
* implementation.
*/
private[finagle] type Framer = Decoder[Buf]
}
| koshelev/finagle | finagle-core/src/main/scala/com/twitter/finagle/decoder/package.scala | Scala | apache-2.0 | 1,061 |
package com.github.alixba.vast
trait VideoClicks extends VASTElement {
def clicksTracking: Seq[ClickTracking]
def customClicks: Seq[CustomClick]
} | AlixBa/vast | src/main/scala/com/github/alixba/vast/VideoClicks.scala | Scala | mit | 154 |
def factorial(n: Long): Long = n match {
case 0 => 1
case x if x > 0 => factorial(n - 1) * n
}
println(factorial(3))
println(factorial(0))
println(factorial(20))
| spolnik/7-languages-in-7-weeks | scala/factorial.scala | Scala | apache-2.0 | 167 |
package org.neo4j.scala.util
package scalax
package rules
/**
* A workaround for the difficulties of dealing with
* a contravariant 'In' parameter type...
*/
class InRule[In, +Out, +A, +X](rule: Rule[In, Out, A, X]) {
def mapRule[Out2, B, Y](f: Result[Out, A, X] => In => Result[Out2, B, Y]): Rule[In, Out2, B, Y] = rule.factory.rule {
in: In => f(rule(in))(in)
}
/**Creates a rule that succeeds only if the original rule would fail on the given context. */
def unary_! : Rule[In, In, Unit, Nothing] = mapRule {
case Success(_, _) => in: In => Failure
case _ => in: In => Success(in, ())
}
/**Creates a rule that succeeds if the original rule succeeds, but returns the original input. */
def & : Rule[In, In, A, X] = mapRule {
case Success(_, a) => in: In => Success(in, a)
case Failure => in: In => Failure
case Error(x) => in: In => Error(x)
}
}
class SeqRule[S, +A, +X](rule: Rule[S, S, A, X]) {
import rule.factory._
def ? = rule mapRule {
case Success(out, a) => in: S => Success(out, Some(a))
case Failure => in: S => Success(in, None)
case Error(x) => in: S => Error(x)
}
/**Creates a rule that always succeeds with a Boolean value.
* Value is 'true' if this rule succeeds, 'false' otherwise */
def -? = ? map {_ isDefined}
def * = from[S] {
// tail-recursive function with reverse list accumulator
def rep(in: S, acc: List[A]): Result[S, List[A], X] = rule(in) match {
case Success(out, a) => rep(out, a :: acc)
case Failure => Success(in, acc.reverse)
case err: Error[_] => err
}
in => rep(in, Nil)
}
def + = rule ~++ *
def ~>?[B >: A, X2 >: X](f: => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f ?) yield fs.foldLeft[B](a) {(b,
f) => f(b)
}
def ~>*[B >: A, X2 >: X](f: => Rule[S, S, B => B, X2]) = for (a <- rule; fs <- f *) yield fs.foldLeft[B](a) {(b,
f) => f(b)
}
def ~*~[B >: A, X2 >: X](join: => Rule[S, S, (B, B) => B, X2]) = {
this ~>* (for (f <- join; a <- rule) yield f(_: B, a))
}
/**Repeats this rule one or more times with a separator (which is discarded) */
def +/[X2 >: X](sep: => Rule[S, S, Any, X2]) = rule ~++ (sep -~ rule *)
/**Repeats this rule zero or more times with a separator (which is discarded) */
def */[X2 >: X](sep: => Rule[S, S, Any, X2]) = +/(sep) | state[S].nil
def *~-[Out, X2 >: X](end: => Rule[S, Out, Any, X2]) = (rule - end *) ~- end
def +~-[Out, X2 >: X](end: => Rule[S, Out, Any, X2]) = (rule - end +) ~- end
/**Repeats this rule num times */
def times(num: Int): Rule[S, S, Seq[A], X] = from[S] {
val result = new collection.mutable.ArraySeq[A](num)
// more compact using HoF but written this way so it's tail-recursive
def rep(i: Int, in: S): Result[S, Seq[A], X] = {
if (i == num) Success(in, result)
else rule(in) match {
case Success(out, a) => {
result(i) = a
rep(i + 1, out)
}
case Failure => Failure
case err: Error[_] => err
}
}
in => rep(0, in)
}
}
| extendedmind/neo4j-scala | src/main/scala/org/neo4j/scala/util/scalax/rules/SeqRule.scala | Scala | agpl-3.0 | 3,289 |
import compiletime.uninitialized
class Memo[A](x: => A):
private var cached1: A = uninitialized
private var cached: A = uninitialized
private var known: Boolean = false
def force =
if !known then
known = true
cached = x
val y = cached1
cached
| dotty-staging/dotty | tests/pos/i11225.scala | Scala | apache-2.0 | 278 |
package se.gigurra.gat.util
import com.jogamp.opengl.util.glsl.ShaderCode
import com.jogamp.opengl.util.glsl.ShaderProgram
import Edit.EditCls
import javax.media.opengl.GL2ES2
object ShaderUtil {
def buildShader(gl: GL2ES2, shaderType: Int, sourceCode: String): ShaderCode = {
new ShaderCode(shaderType, 1, Array(Array(sourceCode))).edit(_.compile(gl))
}
def buildVertexShader(gl: GL2ES2, sourceCode: String): ShaderCode = {
buildShader(gl, GL2ES2.GL_VERTEX_SHADER, sourceCode)
}
def buildVertexShaderFromFile(gl: GL2ES2, filePath: String): ShaderCode = {
buildVertexShader(gl, FileUtil.file2String(filePath))
}
def buildFragmentShader(gl: GL2ES2, sourceCode: String): ShaderCode = {
buildShader(gl, GL2ES2.GL_FRAGMENT_SHADER, sourceCode)
}
def buildFragmentShaderFromFile(gl: GL2ES2, filePath: String): ShaderCode = {
buildFragmentShader(gl, FileUtil.file2String(filePath))
}
def buildProgram(gl: GL2ES2, shaders: ShaderCode*): ShaderProgram = {
val shaderProgram = new ShaderProgram()
for (shader <- shaders)
shaderProgram.add(shader)
if (!shaderProgram.link(gl, System.err))
throw new RuntimeException("Compiling shader failed")
shaderProgram
}
def buildProgramFromFile(
gl: GL2ES2,
vertexShaderFilePath: String,
fragmentShaderFilePath: String): ShaderProgram = {
buildProgram(
gl,
buildVertexShaderFromFile(gl, vertexShaderFilePath),
buildFragmentShaderFromFile(gl, fragmentShaderFilePath))
}
}
| GiGurra/gat | src/main/scala/se/gigurra/gat/util/ShaderUtil.scala | Scala | gpl-2.0 | 1,523 |
package scala.slick.lifted
import scala.slick.ast.{Node, BinaryNode}
import scala.slick.SlickException
object Case {
/** A when...then clause in a Case expression. */
final case class WhenNode(val left: Node, val right: Node) extends BinaryNode {
protected[this] def nodeRebuild(left: Node, right: Node): Node = copy(left = left, right = right)
}
/** A 'case' expression. All 'clauses' must be of type WhenNode. */
final case class CaseNode(val clauses: IndexedSeq[Node], val elseClause: Node) extends Node {
val nodeChildren = elseClause +: clauses
def nodeMapChildren(f: Node => Node): Node = {
val e = f(elseClause)
val c = nodeMapNodes(clauses, f)
if(e.ne(elseClause) || c.isDefined) CaseNode(c.getOrElse(clauses), e)
else this
}
}
def If[C <: Column[_] : CanBeQueryCondition](cond: C) = new UntypedWhen(Node(cond))
@deprecated("Use If instead of when", "0.10.0-M2")
def when[C <: Column[_] : CanBeQueryCondition](cond: C) = If(cond)
final class UntypedWhen(cond: Node) {
def Then[B : BaseTypeMapper](res: Column[B]) = new TypedCase[B,B](IndexedSeq(new WhenNode(cond, Node(res))))
@deprecated("Use Then instead of then", "0.10.0-M2")
def `then`[B : BaseTypeMapper](res: Column[B]) = Then(res)
def Then[B](res: Column[Option[B]]) = res.typeMapper match {
case tmt: OptionTypeMapper[_] =>
new TypedCase[B,Option[B]](IndexedSeq(new WhenNode(cond, Node(res))))(tmt.base, tmt)
case tm => throw new SlickException("Unexpected non-Option TypeMapper "+tm+" for Option type")
}
@deprecated("Use Then instead of then", "0.10.0-M2")
def `then`[B](res: Column[Option[B]]) = Then(res)
}
final class TypedCase[B : TypeMapper, T : TypeMapper](clauses: IndexedSeq[Node])
extends Column[Option[B]] {
def nodeDelegate = CaseNode(clauses, ConstColumn.NULL)
def If[C <: Column[_] : CanBeQueryCondition](cond: C) = new TypedWhen[B,T](Node(cond), clauses)
@deprecated("Use If instead of when", "0.10.0-M2")
def when[C <: Column[_] : CanBeQueryCondition](cond: C) = If(cond)
def Else(res: Column[T]): Column[T] = new TypedCaseWithElse[T](clauses, Node(res))
@deprecated("Use Else instead of otherwise", "0.10.0-M2")
def otherwise(res: Column[T]): Column[T] = Else(res)
}
final class TypedWhen[B : TypeMapper, T : TypeMapper](cond: Node, parentClauses: IndexedSeq[Node]) {
def Then(res: Column[T]) = new TypedCase[B,T](new WhenNode(cond, Node(res)) +: parentClauses)
@deprecated("Use Then instead of then", "0.10.0-M2")
def `then`(res: Column[T]) = Then(res)
}
final class TypedCaseWithElse[T : TypeMapper](clauses: IndexedSeq[Node], elseClause: Node) extends Column[T] {
def nodeDelegate = CaseNode(clauses, elseClause)
}
}
| zefonseca/slick-1.0.0-scala.2.11.1 | src/main/scala/scala/slick/lifted/Case.scala | Scala | bsd-2-clause | 2,785 |
package org.apache.spark.mllib.clustering
import org.apache.spark.broadcast.Broadcast
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.{SparkContext, Logging}
import org.apache.spark.annotation.Experimental
import org.apache.spark.mllib.linalg.{Vector, Vectors}
import org.apache.spark.mllib.linalg.BLAS.{axpy, scal}
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.Utils
import org.apache.spark.util.random.XORShiftRandom
/**
* Created by Jason on 2015/12/4 0004.
*/
object MyKmean extends KMeans{
// def pointCost2(centers: TraversableOnce[VectorWithNorm],
// point: Vector) = KMeans.pointCost(centers, new VectorWithNorm(point))
/**
* Returns the index of the closest center to the given point, as well as the squared distance.
*/
def findClosest(centers: TraversableOnce[Vector],p: Vector): (Int,Double) = {
KMeans.findClosest(clusterCentersWithNorm(centers), new VectorWithNorm(p))
}
def clusterCentersWithNorm(clusterCenters: TraversableOnce[Vector]): TraversableOnce[VectorWithNorm] =
clusterCenters.map(new VectorWithNorm(_))
def fastSquaredDistance( v1: Vector, norm1:Double, v2: Vector, norm2:Double): Double = {
KMeans.fastSquaredDistance(new VectorWithNorm(v1,norm1),new VectorWithNorm(v2,norm2))
}
}
| henryhezhe2003/simiTerm | src/main/scala/org/apache/spark/mllib/clustering/MyKmean.scala | Scala | apache-2.0 | 1,387 |
package org.opencompare.api.java.io
import org.opencompare.api.java.PCMFactory
import org.opencompare.api.java.interpreter.CellContentInterpreter
import org.scalatest.{Matchers, FlatSpec}
import scala.io.Source
import collection.JavaConversions._
/**
* Created by gbecan on 20/11/15.
*/
abstract class CsvScalaLoaderTest(val factory : PCMFactory, val cellContentInterpreter: CellContentInterpreter) extends FlatSpec with Matchers {
it should "load CSV with feature groups" in {
val csv = Source.fromInputStream(getClass.getClassLoader.getResourceAsStream("csv/Feature-group.csv")).mkString
val csvLoader = new CSVLoader(factory, cellContentInterpreter)
val pcmContainers = csvLoader.load(csv)
pcmContainers shouldNot be ('empty)
val pcm = pcmContainers.head.getPcm
withClue("features") (pcm.getFeatures.size() should be (2))
withClue("concrete features")(pcm.getConcreteFeatures.size() should be (3))
for (feature <- pcm.getFeatures) {
withClue("top feature name") (Set("Products", "FG") should contain (feature.getName))
}
for (feature <- pcm.getConcreteFeatures) {
withClue("feature name") (Set("Products", "F1", "F2") should contain (feature.getName))
}
withClue("products")(pcm.getProducts.size() should be (2))
}
}
| gbecan/OpenCompare | org.opencompare/api-java/src/test/scala/org/opencompare/api/java/io/CsvScalaLoaderTest.scala | Scala | apache-2.0 | 1,298 |
/*
* Copyright 2012 Jahziah Wagner <jahziah[dot]wagner[at]gmail[dot]com>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.openlobby.primer
import scala.collection.mutable.ListBuffer
class GenericObserver[T] {
private val observers = new ListBuffer[T]
/**
* Register as an observer.
*/
def register(observer : T) = observers append observer
/**
* Unregister as an observer.
*
* This should be done if a module is removed.
*/
def unregister(observer : T) = observers remove observers.indexOf(observer)
/**
* Returns a scala.collection.mutable.ListBuffer of observers.
*/
def getObservers = observers
}
| jahwag/OpenLobby | modules/Primer/src/main/scala/com/openlobby/primer/GenericObserver.scala | Scala | apache-2.0 | 1,183 |
/**
* Copyright 2016, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.doperables.spark.wrappers.estimators
import io.deepsense.deeplang.params.ParamPair
import io.deepsense.deeplang.params.selections.NameSingleColumnSelection
class MultilayerPerceptronClassifierSmokeTest extends AbstractEstimatorModelWrapperSmokeTest {
override def className: String = "MultilayerPerceptronClassifier"
override val estimator = new MultilayerPerceptronClassifier()
import estimator._
override val estimatorParams: Seq[ParamPair[_]] = Seq(
featuresColumn -> NameSingleColumnSelection("myFeatures"),
labelColumn -> NameSingleColumnSelection("myRating"),
layersParam -> Array(3.0, 2.0, 1.0),
maxIterations -> 120.0,
predictionColumn -> "prediction",
seed -> 100.0,
tolerance -> 2E-5
)
}
| deepsense-io/seahorse-workflow-executor | deeplang/src/it/scala/io/deepsense/deeplang/doperables/spark/wrappers/estimators/MultilayerPerceptronClassifierSmokeTest.scala | Scala | apache-2.0 | 1,371 |
/*
* Copyright 2013 The SIRIS Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* The SIRIS Project is a cooperation between Beuth University, Berlin and the
* HCI Group at the University of Würzburg. The project is funded by the German
* Federal Ministry of Education and Research (grant no. 17N4409).
*/
package simx.core.component
import simx.core.entity.component.ComponentAspect
import simx.core.ontology.GroundedSymbol
/**
* User: dwiebusch
* Date: 20.11.13
* Time: 18:51
*/
abstract class SingletonComponent(val componentType : GroundedSymbol, val componentName : Symbol){
protected def componentAspect : ComponentAspect[_ <: Component]
//ComponentCreationComponent.create(componentAspect)
}
| simulator-x/core | src/simx/core/component/SingletonComponent.scala | Scala | apache-2.0 | 1,262 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn
import org.scalatest.{FlatSpec, Matchers}
import com.intel.analytics.bigdl.dllib.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl.dllib.utils.RandomGenerator._
import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest
import com.intel.analytics.bigdl.dllib.utils.{T, Table}
import scala.util.Random
class PoolerSpec extends FlatSpec with Matchers {
"updateOutput Float type" should "work properly" in {
val feature1 = Array(
0.023863613605499268, 0.100520193576812744, 0.579659581184387207,
0.491799056529998779, 0.695049762725830078, 0.174113810062408447,
0.514802277088165283, 0.645381748676300049, 0.610754907131195068,
0.642783403396606445, 0.261436760425567627, 0.865309834480285645,
0.779586195945739746, 0.805720150470733643, 0.039021611213684082,
0.052066206932067871, 0.859684348106384277, 0.286012887954711914,
0.183007895946502686, 0.657920598983764648, 0.486495614051818848,
0.339991390705108643, 0.349600136280059814, 0.292829811573028564,
0.874850273132324219, 0.923728287220001221, 0.853209257125854492,
0.078126728534698486, 0.975298523902893066, 0.889039456844329834,
0.757552802562713623, 0.009770631790161133, 0.639949500560760498,
0.384162366390228271, 0.993775784969329834, 0.225636243820190430,
0.152042329311370850, 0.518522977828979492, 0.346138358116149902,
0.560805261135101318, 0.197446644306182861, 0.270632088184356689,
0.537619173526763916, 0.282237291336059570, 0.418838739395141602,
0.348786175251007080, 0.827486872673034668, 0.671141088008880615,
0.734223365783691406, 0.461709976196289062, 0.463822364807128906,
0.256826639175415039, 0.187998294830322266, 0.387186825275421143,
0.027970135211944580, 0.336534321308135986, 0.078408479690551758,
0.748133420944213867, 0.996697187423706055, 0.590924799442291260,
0.363863050937652588, 0.244512259960174561, 0.605456709861755371,
0.989919960498809814, 0.998104333877563477, 0.318823933601379395,
0.293298780918121338, 0.240437865257263184, 0.269145488739013672,
0.321916043758392334, 0.241542100906372070, 0.097301602363586426,
0.139740049839019775, 0.727295756340026855, 0.735020518302917480,
0.977046966552734375, 0.562069535255432129, 0.962157845497131348,
0.896494269371032715, 0.919544279575347900, 0.769982337951660156,
0.902598083019256592, 0.699079096317291260, 0.970299720764160156,
0.877977848052978516, 0.445257008075714111, 0.903108179569244385,
0.029258608818054199, 0.953712522983551025, 0.740538537502288818,
0.229142010211944580, 0.324616789817810059, 0.546005189418792725,
0.471910834312438965, 0.479964077472686768, 0.404208302497863770,
0.816056787967681885, 0.116290867328643799, 0.845461726188659668,
0.313867926597595215, 0.281320571899414062, 0.693770170211791992,
0.623112499713897705, 0.370123684406280518, 0.595665276050567627,
0.433298051357269287, 0.971214890480041504, 0.087709188461303711,
0.069373369216918945, 0.274347186088562012, 0.470574259757995605,
0.883642554283142090, 0.518250524997711182, 0.118440926074981689,
0.606658637523651123, 0.529120385646820068, 0.991135418415069580,
0.020969033241271973, 0.601271688938140869, 0.031737148761749268,
0.699844896793365479, 0.006896257400512695, 0.478346049785614014,
0.267558634281158447, 0.762180626392364502, 0.907826840877532959,
0.316000878810882568, 0.405982732772827148)
val feature2 = Array(
0.873747766017913818, 0.145658850669860840, 0.256294071674346924,
0.280913352966308594, 0.062630355358123779, 0.272662281990051270,
0.524160504341125488, 0.110454082489013672, 0.619955241680145264,
0.568557560443878174, 0.214293479919433594, 0.648296296596527100,
0.165463507175445557, 0.419352889060974121, 0.852317929267883301,
0.628634154796600342, 0.678495228290557861, 0.896998584270477295,
0.890723347663879395, 0.488525688648223877, 0.384370744228363037,
0.571207761764526367, 0.788873314857482910, 0.954643964767456055,
0.969983577728271484, 0.203537940979003906, 0.782353222370147705,
0.848326086997985840, 0.304318606853485107, 0.800064325332641602,
0.424848318099975586, 0.603751122951507568)
val feature3 = Array(
0.883362829685211182, 0.017709493637084961, 0.740627527236938477,
0.975574254989624023, 0.904063880443572998, 0.293959677219390869,
0.301572918891906738, 0.235482156276702881)
val features = new Table()
features.insert(Tensor(Storage(feature1.map(x => x.toFloat))).resize(1, 2, 8, 8))
features.insert(Tensor(Storage(feature2.map(x => x.toFloat))).resize(1, 2, 4, 4))
features.insert(Tensor(Storage(feature3.map(x => x.toFloat))).resize(1, 2, 2, 2))
val rois = Tensor[Float](
T(T(0, 0, 10, 10),
T(0, 0, 60, 60),
T(0, 0, 500, 500))).resize(3, 4)
val input = T(features, T(rois))
val pooler = Pooler[Float](
resolution = 2, scales = Array(0.125f, 0.0625f, 0.03125f), samplingRatio = 2)
val res = pooler.forward(input)
val expectedRes = Array(
0.023863614098541271, 0.057400867850185459,
0.280628564810485104, 0.305623784018421591,
0.998104330194710485, 0.700919154915548130,
0.622570158108509184, 0.567865130189475190,
0.482630044810485019, 0.516544848104851085,
0.549660193754091783, 0.254286142185043710,
0.634043431284018491, 0.601322654816104865,
0.528360197830765149, 0.564136290194751285,
0.243893563747406006, 0.000000000000000000,
0.000000000000000000, 0.000000000000000000,
0.058870539069175720, 0.000000000000000000,
0.000000000000000000, 0.000000000000000000)
for (i <- expectedRes.indices) {
assert(Math.abs(res.storage().array()(i) - expectedRes(i)) < 1e-6)
}
}
"updateOutput Double type" should "work properly" in {
val feature1 = Array(
0.023863613605499268, 0.100520193576812744, 0.579659581184387207,
0.491799056529998779, 0.695049762725830078, 0.174113810062408447,
0.514802277088165283, 0.645381748676300049, 0.610754907131195068,
0.642783403396606445, 0.261436760425567627, 0.865309834480285645,
0.779586195945739746, 0.805720150470733643, 0.039021611213684082,
0.052066206932067871, 0.859684348106384277, 0.286012887954711914,
0.183007895946502686, 0.657920598983764648, 0.486495614051818848,
0.339991390705108643, 0.349600136280059814, 0.292829811573028564,
0.874850273132324219, 0.923728287220001221, 0.853209257125854492,
0.078126728534698486, 0.975298523902893066, 0.889039456844329834,
0.757552802562713623, 0.009770631790161133, 0.639949500560760498,
0.384162366390228271, 0.993775784969329834, 0.225636243820190430,
0.152042329311370850, 0.518522977828979492, 0.346138358116149902,
0.560805261135101318, 0.197446644306182861, 0.270632088184356689,
0.537619173526763916, 0.282237291336059570, 0.418838739395141602,
0.348786175251007080, 0.827486872673034668, 0.671141088008880615,
0.734223365783691406, 0.461709976196289062, 0.463822364807128906,
0.256826639175415039, 0.187998294830322266, 0.387186825275421143,
0.027970135211944580, 0.336534321308135986, 0.078408479690551758,
0.748133420944213867, 0.996697187423706055, 0.590924799442291260,
0.363863050937652588, 0.244512259960174561, 0.605456709861755371,
0.989919960498809814, 0.998104333877563477, 0.318823933601379395,
0.293298780918121338, 0.240437865257263184, 0.269145488739013672,
0.321916043758392334, 0.241542100906372070, 0.097301602363586426,
0.139740049839019775, 0.727295756340026855, 0.735020518302917480,
0.977046966552734375, 0.562069535255432129, 0.962157845497131348,
0.896494269371032715, 0.919544279575347900, 0.769982337951660156,
0.902598083019256592, 0.699079096317291260, 0.970299720764160156,
0.877977848052978516, 0.445257008075714111, 0.903108179569244385,
0.029258608818054199, 0.953712522983551025, 0.740538537502288818,
0.229142010211944580, 0.324616789817810059, 0.546005189418792725,
0.471910834312438965, 0.479964077472686768, 0.404208302497863770,
0.816056787967681885, 0.116290867328643799, 0.845461726188659668,
0.313867926597595215, 0.281320571899414062, 0.693770170211791992,
0.623112499713897705, 0.370123684406280518, 0.595665276050567627,
0.433298051357269287, 0.971214890480041504, 0.087709188461303711,
0.069373369216918945, 0.274347186088562012, 0.470574259757995605,
0.883642554283142090, 0.518250524997711182, 0.118440926074981689,
0.606658637523651123, 0.529120385646820068, 0.991135418415069580,
0.020969033241271973, 0.601271688938140869, 0.031737148761749268,
0.699844896793365479, 0.006896257400512695, 0.478346049785614014,
0.267558634281158447, 0.762180626392364502, 0.907826840877532959,
0.316000878810882568, 0.405982732772827148)
val feature2 = Array(
0.873747766017913818, 0.145658850669860840, 0.256294071674346924,
0.280913352966308594, 0.062630355358123779, 0.272662281990051270,
0.524160504341125488, 0.110454082489013672, 0.619955241680145264,
0.568557560443878174, 0.214293479919433594, 0.648296296596527100,
0.165463507175445557, 0.419352889060974121, 0.852317929267883301,
0.628634154796600342, 0.678495228290557861, 0.896998584270477295,
0.890723347663879395, 0.488525688648223877, 0.384370744228363037,
0.571207761764526367, 0.788873314857482910, 0.954643964767456055,
0.969983577728271484, 0.203537940979003906, 0.782353222370147705,
0.848326086997985840, 0.304318606853485107, 0.800064325332641602,
0.424848318099975586, 0.603751122951507568)
val feature3 = Array(
0.883362829685211182, 0.017709493637084961, 0.740627527236938477,
0.975574254989624023, 0.904063880443572998, 0.293959677219390869,
0.301572918891906738, 0.235482156276702881)
val features = new Table()
features.insert(Tensor(Storage(feature1.map(x => x))).resize(1, 2, 8, 8))
features.insert(Tensor(Storage(feature2.map(x => x))).resize(1, 2, 4, 4))
features.insert(Tensor(Storage(feature3.map(x => x))).resize(1, 2, 2, 2))
val rois = Tensor[Double](
T(T(0, 0, 10, 10),
T(0, 0, 60, 60),
T(0, 0, 500, 500))).resize(3, 4)
val input = T(features, T(rois))
val pooler = Pooler[Double](resolution = 2, scales = Array(0.125f, 0.0625f, 0.03125f),
samplingRatio = 2)
val res = pooler.forward(input)
val expectedRes = Array(
0.023863614098541271, 0.057400867850185459,
0.280628564810485104, 0.305623784018421591,
0.998104330194710485, 0.700919154915548130,
0.622570158108509184, 0.567865130189475190,
0.482630044810485019, 0.516544848104851085,
0.549660193754091783, 0.254286142185043710,
0.634043431284018491, 0.601322654816104865,
0.528360197830765149, 0.564136290194751285,
0.243893563747406006, 0.000000000000000000,
0.000000000000000000, 0.000000000000000000,
0.058870539069175720, 0.000000000000000000,
0.000000000000000000, 0.000000000000000000)
for (i <- expectedRes.indices) {
assert(Math.abs(res.storage().array()(i) - expectedRes(i)) < 1e-6)
}
}
}
class PoolerSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val input = T()
RNG.setSeed(10)
val feature0 = Tensor[Float](1, 2, 8, 8).apply1(_ => RNG.uniform(-1, 1).toFloat)
val feature1 = Tensor[Float](1, 2, 4, 4).apply1(_ => RNG.uniform(-1, 1).toFloat)
val feature2 = Tensor[Float](1, 2, 2, 2).apply1(_ => RNG.uniform(-1, 1).toFloat)
val features = T(feature0, feature1, feature2)
val rois = Tensor[Float](
T(T(0, 0, 10, 10),
T(0, 0, 60, 60),
T(0, 0, 500, 500))).resize(3, 4)
input(1.0f) = features
input(2.0f) = rois
val pooler = new Pooler[Float](resolution = 2, scales = Array(0.25f, 0.125f, 0.0625f),
samplingRatio = 2).setName("pooler")
runSerializationTest(pooler, input)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/PoolerSpec.scala | Scala | apache-2.0 | 12,847 |
package es.weso.rdf.jena
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
import org.scalatest.Matchers
import org.scalatest.FunSpec
import es.weso.rdf.triples.RDFTriple
import es.weso.rdf.nodes._
import es.weso.rdf.jena._
import com.hp.hpl.jena.rdf.model.ModelFactory
import es.weso.rdf._
import es.weso.rdf.PREFIXES._
@RunWith(classOf[JUnitRunner])
class RDFJenaSpec
extends FunSpec
with JenaBased
with Matchers {
describe("Adding triples") {
it("should be able to add a single triple with IRIs") {
val emptyModel = ModelFactory.createDefaultModel
val rdf: RDFAsJenaModel = RDFAsJenaModel(emptyModel)
val map: Map[String, IRI] = Map("" -> IRI("http://example.org#"))
val pm: PrefixMap = PrefixMap(map)
rdf.addPrefixMap(pm)
rdf.addTriples(Set(RDFTriple(IRI("http://example.org#a"), IRI("http://example.org#b"), IRI("http://example.org#c"))))
val m2 = str2model("""|@prefix : <http://example.org#> .
|:a :b :c .
|""".stripMargin)
shouldBeIsomorphic(rdf.model, m2)
}
it("should be able to add some triples with BNodes") {
val emptyModel = ModelFactory.createDefaultModel
val rdf: RDFAsJenaModel = RDFAsJenaModel(emptyModel)
val map: Map[String, IRI] = Map(
"" -> IRI("http://example.org#"), "foaf" -> IRI("http://foaf.org#")
)
val pm: PrefixMap = PrefixMap(map)
rdf.addPrefixMap(pm)
rdf.addTriples(Set(
RDFTriple(IRI("http://example.org#a"), IRI("http://foaf.org#knows"), BNodeId("b" + 1)), RDFTriple(BNodeId("b" + 1), IRI("http://foaf.org#knows"), BNodeId("b" + 2)), RDFTriple(BNodeId("b" + 2), IRI("http://foaf.org#name"), StringLiteral("pepe"))
))
val m2 = str2model("""|@prefix : <http://example.org#> .
|@prefix foaf: <http://foaf.org#> .
|:a foaf:knows _:x .
|_:x foaf:knows _:y .
|_:y foaf:name "pepe" .
|""".stripMargin)
shouldBeIsomorphic(rdf.model, m2)
}
}
describe("Parsing other formats") {
it("Should be able to parse NTriples") {
val m1 = str2model("""|@prefix : <http://example.org#> .
|:a :b :c .
|""".stripMargin)
val str_triples = "<http://example.org#a> <http://example.org#b> <http://example.org#c> ."
val rdf: RDFAsJenaModel = RDFAsJenaModel(ModelFactory.createDefaultModel())
val rdf2 = rdf.parse(str_triples, "NTRIPLES").get
val m2 = RDFAsJenaModel.extractModel(rdf2)
shouldBeIsomorphic(m1, m2)
}
}
describe("Querying RDF graphs") {
it("Should be able to get objects of some type") {
val str = """|@prefix : <http://example.org#> .
|:a a :C ; :p 1 .
|:b a :C, :D .
|""".stripMargin
val rdf = RDFAsJenaModel.empty.parse(str, "TURTLE").get
val typeC = IRI("http://example.org#C")
val triples = rdf.triplesWithType(typeC)
val a = IRI("http://example.org#a")
val b = IRI("http://example.org#b")
val t1 = RDFTriple(a, rdf_type, typeC)
val t2 = RDFTriple(b, rdf_type, typeC)
triples should be(Set(t1, t2))
}
it("Should be able to get subjects") {
val str = """|@prefix : <http://example.org#> .
|:a a :C ; :p 1 .
|:b a :C, :D .
|""".stripMargin
val rdf = RDFAsJenaModel.empty.parse(str, "TURTLE").get
val a = IRI("http://example.org#a")
val b = IRI("http://example.org#b")
val p = IRI("http://example.org#p")
val typeC = IRI("http://example.org#C")
val triples = rdf.triplesWithSubject(a)
val t1 = RDFTriple(a, rdf_type, typeC)
val t2 = RDFTriple(a, p, IntegerLiteral(1))
triples should be(Set(t1, t2))
}
it("Should be able to get subjects with xsd:date") {
val str = """|@prefix : <http://example.org#> .
|@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
|:a :date "25/10/2015"^^xsd:date .
|""".stripMargin
val rdf = RDFAsJenaModel.empty.parse(str, "TURTLE").get
val a = IRI("http://example.org#a")
val date = IRI("http://example.org#date")
val value = DatatypeLiteral("25/10/2015", IRI("http://www.w3.org/2001/XMLSchema#date"))
val triples = rdf.triplesWithSubject(a)
val t1 = RDFTriple(a, date, value)
triples should be(Set(t1))
}
it("Should be able to get subjects with xsd:integer") {
val str = """|@prefix : <http://example.org#> .
|@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
|:a :age 15 .
|""".stripMargin
val rdf = RDFAsJenaModel.empty.parse(str, "TURTLE").get
val a = IRI("http://example.org#a")
val age = IRI("http://example.org#age")
val value = IntegerLiteral(15)
val triples = rdf.triplesWithSubject(a)
val t1 = RDFTriple(a, age, value)
triples should be(Set(t1))
}
it("Should be able to get subjects with datatype :xxx") {
val str = """|@prefix : <http://example.org#> .
|@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
|:a :age "15"^^:xxx .
|""".stripMargin
val rdf = RDFAsJenaModel.empty.parse(str, "TURTLE").get
val a = IRI("http://example.org#a")
val age = IRI("http://example.org#age")
val value = DatatypeLiteral("15", IRI("http://example.org#xxx"))
val triples = rdf.triplesWithSubject(a)
val t1 = RDFTriple(a, age, value)
triples should be(Set(t1))
}
it("Should be able to get subjects with lang literal") {
val str = """|@prefix : <http://example.org#> .
|@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .
|:a :age "hi"@en .
|""".stripMargin
val rdf = RDFAsJenaModel.empty.parse(str, "TURTLE").get
val a = IRI("http://example.org#a")
val age = IRI("http://example.org#age")
val value = LangLiteral("hi", Lang("en"))
val triples = rdf.triplesWithSubject(a)
val t1 = RDFTriple(a, age, value)
triples should be(Set(t1))
}
}
}
| labra/wesin | src/test/scala/es/weso/rdf/jena/RDFJenaSpec.scala | Scala | lgpl-3.0 | 6,365 |
package immortan
import java.util.concurrent.{Executors, TimeUnit}
import com.google.common.cache.CacheBuilder
import fr.acinq.bitcoin.Crypto
import fr.acinq.bitcoin.Crypto.PublicKey
import fr.acinq.eclair.router.Graph.GraphStructure.{DirectedGraph, GraphEdge}
import fr.acinq.eclair.router.RouteCalculation.handleRouteRequest
import fr.acinq.eclair.router.Router.{Data, PublicChannel, RouteRequest}
import fr.acinq.eclair.router.{ChannelUpdateExt, Router}
import fr.acinq.eclair.wire._
import fr.acinq.eclair.{CltvExpiryDelta, MilliSatoshi}
import immortan.PathFinder._
import immortan.crypto.Tools._
import immortan.crypto.{CanBeRepliedTo, StateMachine}
import immortan.fsm.SendMultiPart
import immortan.utils.Rx
import rx.lang.scala.Subscription
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.concurrent.{ExecutionContext, ExecutionContextExecutor}
import scala.util.Random.shuffle
object PathFinder {
val CMDStartPeriodicResync = "cmd-start-periodic-resync"
val CMDLoadGraph = "cmd-load-graph"
val CMDResync = "cmd-resync"
val WAITING = 0
val OPERATIONAL = 1
sealed trait PathFinderRequest { val sender: CanBeRepliedTo }
case class FindRoute(sender: CanBeRepliedTo, request: RouteRequest) extends PathFinderRequest
case class GetExpectedPaymentFees(sender: CanBeRepliedTo, cmd: SendMultiPart) extends PathFinderRequest
case class GetExpectedRouteFees(sender: CanBeRepliedTo, payee: PublicKey) extends PathFinderRequest
case class ExpectedFees(interHop: AvgHopParams, payeeHop: AvgHopParams) {
def partialRoute(interHops: Int): Seq[AvgHopParams] = List.fill(interHops)(interHop) :+ payeeHop
def accumulate(hasRelayFee: AvgHopParams, acc: MilliSatoshi): MilliSatoshi = hasRelayFee.relayFee(acc) + acc
def percentOf(amount: MilliSatoshi, interHops: Int): Double = ratio(amount, totalWithFeeReserve(amount, interHops) - amount)
def totalWithFeeReserve(amount: MilliSatoshi, interHops: Int): MilliSatoshi = partialRoute(interHops).foldRight(amount)(accumulate)
def totalCltvDelta(interHops: Int): CltvExpiryDelta = partialRoute(interHops).map(_.cltvExpiryDelta).reduce(_ + _)
}
}
abstract class PathFinder(val normalBag: NetworkBag, val hostedBag: NetworkBag) extends StateMachine[Data] { me =>
private val extraEdgesCache = CacheBuilder.newBuilder.expireAfterWrite(1, TimeUnit.DAYS).maximumSize(500).build[java.lang.Long, GraphEdge]
val extraEdges: mutable.Map[java.lang.Long, GraphEdge] = extraEdgesCache.asMap.asScala
var listeners: Set[CanBeRepliedTo] = Set.empty
var subscription: Option[Subscription] = None
var syncMaster: Option[SyncMaster] = None
implicit val context: ExecutionContextExecutor = ExecutionContext fromExecutor Executors.newSingleThreadExecutor
def process(changeMessage: Any): Unit = scala.concurrent.Future(me doProcess changeMessage)
private val RESYNC_PERIOD: Long = 1000L * 3600 * 24 * 4
// We don't load routing data on every startup but when user (or system) actually needs it
become(Data(channels = Map.empty, hostedChannels = Map.empty, DirectedGraph.empty), WAITING)
def getLastTotalResyncStamp: Long
def getLastNormalResyncStamp: Long
def updateLastTotalResyncStamp(stamp: Long): Unit
def updateLastNormalResyncStamp(stamp: Long): Unit
def getPHCExtraNodes: Set[RemoteNodeInfo]
def getExtraNodes: Set[RemoteNodeInfo]
def doProcess(change: Any): Unit = (change, state) match {
case (CMDStartPeriodicResync, WAITING | OPERATIONAL) if subscription.isEmpty =>
val repeat = Rx.repeat(Rx.ioQueue, Rx.incHour, times = 97 to Int.MaxValue by 97)
// Resync every RESYNC_PERIOD hours + 1 hour to trigger a full resync, not just PHC resync
val delay = Rx.initDelay(repeat, getLastTotalResyncStamp, RESYNC_PERIOD, preStartMsec = 500)
subscription = delay.subscribe(_ => me process CMDResync).asSome
case (calc: GetExpectedRouteFees, OPERATIONAL) => calc.sender process calcExpectedFees(calc.payee)
case (calc: GetExpectedPaymentFees, OPERATIONAL) => calc.sender process calc.cmd.copy(expectedRouteFees = calcExpectedFees(calc.cmd.targetNodeId).asSome)
case (fr: FindRoute, OPERATIONAL) => fr.sender process handleRouteRequest(data.graph replaceEdge fr.request.localEdge, fr.request)
case (request: PathFinderRequest, WAITING) =>
// We need a loaded routing data to process these requests
// load that data before proceeding if it's absent
me process CMDLoadGraph
me process request
case (CMDResync, WAITING) =>
// We need a loaded routing data to sync properly
// load that data before proceeding if it's absent
me process CMDLoadGraph
me process CMDResync
case (CMDLoadGraph, WAITING) =>
val normalShortIdToPubChan = normalBag.getRoutingData
val hostedShortIdToPubChan = hostedBag.getRoutingData
val searchGraph1 = DirectedGraph.makeGraph(normalShortIdToPubChan ++ hostedShortIdToPubChan).addEdges(extraEdges.values)
become(Data(normalShortIdToPubChan, hostedShortIdToPubChan, searchGraph1), OPERATIONAL)
case (CMDResync, OPERATIONAL) if System.currentTimeMillis - getLastNormalResyncStamp > RESYNC_PERIOD =>
val setupData = SyncMasterShortIdData(LNParams.syncParams.syncNodes, getExtraNodes, Set.empty, Map.empty)
val requestNodeAnnounceForChan = for {
info <- getExtraNodes ++ getPHCExtraNodes
edges <- data.graph.vertices.get(info.nodeId)
} yield shuffle(edges).head.desc.shortChannelId
val normalSync = new SyncMaster(normalBag.listExcludedChannels, requestNodeAnnounceForChan, data, LNParams.syncParams.maxNodesToSyncFrom) { self =>
override def onNodeAnnouncement(nodeAnnouncement: NodeAnnouncement): Unit = listeners.foreach(_ process nodeAnnouncement)
override def onChunkSyncComplete(pureRoutingData: PureRoutingData): Unit = me process pureRoutingData
override def onTotalSyncComplete: Unit = me process self
}
syncMaster = normalSync.asSome
listeners.foreach(_ process CMDResync)
normalSync process setupData
case (CMDResync, OPERATIONAL) if System.currentTimeMillis - getLastTotalResyncStamp > RESYNC_PERIOD =>
// Normal resync has happened recently, but PHC resync is outdated (PHC failed last time due to running out of attempts)
// in this case we skip normal sync and start directly with PHC sync to save time and increase PHC sync success chances
attemptPHCSync
case (phcPure: CompleteHostedRoutingData, OPERATIONAL) =>
// First, completely replace PHC data with obtained one
hostedBag.processCompleteHostedData(phcPure)
// Then reconstruct graph with new PHC data
val hostedShortIdToPubChan = hostedBag.getRoutingData
val searchGraph = DirectedGraph.makeGraph(data.channels ++ hostedShortIdToPubChan).addEdges(extraEdges.values)
become(Data(data.channels, hostedShortIdToPubChan, searchGraph), OPERATIONAL)
updateLastTotalResyncStamp(System.currentTimeMillis)
listeners.foreach(_ process phcPure)
case (pure: PureRoutingData, OPERATIONAL) =>
// Notify listener about graph sync progress here
// Update db here to not overload SyncMaster
listeners.foreach(_ process pure)
normalBag.processPureData(pure)
case (sync: SyncMaster, OPERATIONAL) =>
// Get rid of channels that peers know nothing about
val normalShortIdToPubChan = normalBag.getRoutingData
val oneSideShortIds = normalBag.listChannelsWithOneUpdate
val ghostIds = normalShortIdToPubChan.keySet.diff(sync.provenShortIds)
val normalShortIdToPubChan1 = normalShortIdToPubChan -- ghostIds -- oneSideShortIds
val searchGraph = DirectedGraph.makeGraph(normalShortIdToPubChan1 ++ data.hostedChannels).addEdges(extraEdges.values)
become(Data(normalShortIdToPubChan1, data.hostedChannels, searchGraph), OPERATIONAL)
// Update normal checkpoint, if PHC sync fails this time we'll jump to it next time
updateLastNormalResyncStamp(System.currentTimeMillis)
// Perform database cleaning in a different thread since it's slow and we are operational
Rx.ioQueue.foreach(_ => normalBag.removeGhostChannels(ghostIds, oneSideShortIds), none)
// Remove by now useless reference, this may be used to define if sync is on
syncMaster = None
// Notify that normal graph sync is complete
listeners.foreach(_ process sync)
attemptPHCSync
// We always accept and store disabled channels:
// - to reduce subsequent sync traffic if channel remains disabled
// - to account for the case when channel suddenly becomes enabled but we don't know
// - if channel stays disabled for a long time it will be pruned by peers and then by us
case (cu: ChannelUpdate, OPERATIONAL) if data.channels.contains(cu.shortChannelId) =>
val data1 = resolve(data.channels(cu.shortChannelId), cu, normalBag)
become(data1, OPERATIONAL)
case (cu: ChannelUpdate, OPERATIONAL) if data.hostedChannels.contains(cu.shortChannelId) =>
val data1 = resolve(data.hostedChannels(cu.shortChannelId), cu, hostedBag)
become(data1, OPERATIONAL)
case (cu: ChannelUpdate, OPERATIONAL) =>
extraEdges.get(cu.shortChannelId).foreach { extEdge =>
// Last chance: not a known public update, maybe it's a private one
val edge1 = extEdge.copy(updExt = extEdge.updExt withNewUpdate cu)
val data1 = resolveKnownDesc(storeOpt = None, edge1)
become(data1, OPERATIONAL)
}
case (edge: GraphEdge, WAITING | OPERATIONAL) if !data.channels.contains(edge.desc.shortChannelId) =>
// We add assisted routes to graph as if they are normal channels, also rememeber them to refill later if graph gets reloaded
// these edges will be private most of the time, but they also may be public but yet not visible to us for some reason
extraEdgesCache.put(edge.updExt.update.shortChannelId, edge)
val data1 = data.copy(graph = data.graph replaceEdge edge)
become(data1, state)
case _ =>
}
def resolve(pubChan: PublicChannel, upd1: ChannelUpdate, store: NetworkBag): Data = {
// Resoving normal/hosted public channel updates we get while trying to route payments
val desc = Router.getDesc(upd1, pubChan.ann)
pubChan.getChannelUpdateSameSideAs(upd1) match {
case Some(oldExt) if oldExt.update.timestamp < upd1.timestamp =>
// We have an old updateExt and obtained one is newer, this is fine
val edge = GraphEdge(desc, oldExt withNewUpdate upd1)
resolveKnownDesc(storeOpt = Some(store), edge)
case None =>
// Somehow we don't have an old updateExt, create a new one
val edge = GraphEdge(desc, ChannelUpdateExt fromUpdate upd1)
resolveKnownDesc(storeOpt = Some(store), edge)
case _ =>
// Our updateExt is newer
data
}
}
def resolveKnownDesc(storeOpt: Option[NetworkBag], edge: GraphEdge): Data = storeOpt match {
// Resolves channel updates which we extract from remote node errors while trying to route payments
// store is optional to make sure private normal/hosted channel updates never make it to our database
case Some(store) if edge.updExt.update.htlcMaximumMsat.isEmpty =>
// Will be queried on next sync and will most likely be excluded
store.removeChannelUpdate(edge.updExt.update.shortChannelId)
data.copy(graph = data.graph removeEdge edge.desc)
case Some(store) =>
// This is a legitimate public update, refresh everywhere
store.addChannelUpdateByPosition(edge.updExt.update)
data.copy(graph = data.graph replaceEdge edge)
case None =>
// This is a legitimate private/unknown-public update
extraEdgesCache.put(edge.updExt.update.shortChannelId, edge)
// Don't save this in DB but update runtime graph
data.copy(graph = data.graph replaceEdge edge)
}
def nodeIdFromUpdate(cu: ChannelUpdate): Option[Crypto.PublicKey] =
data.channels.get(cu.shortChannelId).map(_.ann getNodeIdSameSideAs cu) orElse
data.hostedChannels.get(cu.shortChannelId).map(_.ann getNodeIdSameSideAs cu) orElse
extraEdges.get(cu.shortChannelId).map(_.desc.from)
def attemptPHCSync: Unit = {
if (LNParams.syncParams.phcSyncNodes.nonEmpty) {
val master = new PHCSyncMaster(data) { override def onSyncComplete(pure: CompleteHostedRoutingData): Unit = me process pure }
master process SyncMasterPHCData(LNParams.syncParams.phcSyncNodes, getPHCExtraNodes, activeSyncs = Set.empty)
} else updateLastTotalResyncStamp(System.currentTimeMillis)
}
def calcExpectedFees(nodeId: PublicKey): ExpectedFees = {
val payeeHops = data.graph.vertices.getOrElse(nodeId, default = Nil).map(_.updExt)
val payeeAvgParams = if (payeeHops.isEmpty) data.avgHopParams else Router.getAvgHopParams(payeeHops)
ExpectedFees(data.avgHopParams, payeeAvgParams)
}
}
| btcontract/wallet | app/src/main/java/immortan/PathFinder.scala | Scala | apache-2.0 | 12,974 |
package scorex.benchmarks
import java.util.concurrent.TimeUnit
import org.openjdk.jmh.annotations._
import scorex.benchmarks.Helpers._
import scorex.crypto.authds.avltree.batch.Operation
object AVLBatchPerformance {
@State(Scope.Thread)
class Basic(proverCnt: Int, opsCnt: Int) {
val preparedOperations = proverCnt
val operationsToApply = opsCnt
var prover: Prover = _
var operations: Seq[Operation] = _
@Setup(Level.Iteration)
def up: Unit = {
prover = getProver(preparedOperations)
val inserts = generateInserts(preparedOperations until (preparedOperations + operationsToApply))
operations = inserts
}
}
class StateWith1000000 extends Basic(1000000, 100000)
class StateWith2000000 extends Basic(2000000, 100000)
class StateWith4000000 extends Basic(4000000, 100000)
class StateWith8000000 extends Basic(8000000, 100000)
class StateWith16000000 extends Basic(16000000, 100000)
class StateWith32000000 extends Basic(32000000, 100000)
}
@BenchmarkMode(Array(Mode.AverageTime))
@OutputTimeUnit(TimeUnit.SECONDS)
@Fork(1)
class AVLBatchPerformance {
import AVLBatchPerformance._
@Benchmark
def apply100KinBatchesOf2KToProverWith1M(s: StateWith1000000): Unit = {
import s._
operations.grouped(2000).foreach { batch =>
batch.foreach(prover.performOneOperation)
prover.generateProof()
}
}
@Benchmark
def apply100KinBatchesOf2KToProverWith2M(s: StateWith2000000): Unit = {
import s._
operations.grouped(2000).foreach { batch =>
batch.foreach(prover.performOneOperation)
prover.generateProof()
}
}
@Benchmark
def apply100KinBatchesOf2KToProverWith4M(s: StateWith4000000): Unit = {
import s._
operations.grouped(2000).foreach { batch =>
batch.foreach(prover.performOneOperation)
prover.generateProof()
}
}
@Benchmark
def apply100KinBatchesOf2KToProverWith8M(s: StateWith8000000): Unit = {
import s._
operations.grouped(2000).foreach { batch =>
batch.foreach(prover.performOneOperation)
prover.generateProof()
}
}
@Benchmark
def apply100KinBatchesOf2KToProverWith16M(s: StateWith16000000): Unit = {
import s._
operations.grouped(2000).foreach { batch =>
batch.foreach(prover.performOneOperation)
prover.generateProof()
}
}
@Benchmark
def apply100KinBatchesOf2KToProverWith32M(s: StateWith32000000): Unit = {
import s._
operations.grouped(2000).foreach { batch =>
batch.foreach(prover.performOneOperation)
prover.generateProof()
}
}
}
| ScorexProject/scrypto | benchmarks/src/main/scala/scorex.benchmarks/AVLBatchPerformance.scala | Scala | cc0-1.0 | 2,591 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.kafka010
import org.apache.hadoop.conf.Configuration
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.deploy.security.HadoopDelegationTokenManager
class KafkaHadoopDelegationTokenManagerSuite extends SparkFunSuite {
private val hadoopConf = new Configuration()
test("default configuration") {
val manager = new HadoopDelegationTokenManager(new SparkConf(false), hadoopConf, null)
assert(manager.isProviderLoaded("kafka"))
}
}
| maropu/spark | external/kafka-0-10-token-provider/src/test/scala/org/apache/spark/kafka010/KafkaHadoopDelegationTokenManagerSuite.scala | Scala | apache-2.0 | 1,297 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.display.angular.paragraphscope
import org.apache.zeppelin.display.AngularObject
import org.apache.zeppelin.display.angular.AbstractAngularModel
/**
* Represents ng-model in paragraph scope
*/
class AngularModel(name: String)
extends org.apache.zeppelin.display.angular.AbstractAngularModel(name) {
def this(name: String, newValue: Any) = {
this(name)
value(newValue)
}
override protected def getAngularObject(): AngularObject[Any] = {
registry.get(name,
context.getNoteId, context.getParagraphId).asInstanceOf[AngularObject[Any]]
}
override protected def addAngularObject(value: Any): AngularObject[Any] = {
registry.add(name, value,
context.getNoteId, context.getParagraphId).asInstanceOf[AngularObject[Any]]
}
}
object AngularModel {
def apply(name: String): AbstractAngularModel = {
new AngularModel(name)
}
def apply(name: String, newValue: Any): AbstractAngularModel = {
new AngularModel(name, newValue)
}
} | ankurmitujjain/incubator-zeppelin | zeppelin-display/src/main/scala/org/apache/zeppelin/display/angular/paragraphscope/AngularModel.scala | Scala | apache-2.0 | 1,811 |
package ee.cone.c4gate
import com.squareup.wire.ProtoAdapter
import ee.cone.c4actor.IdGenUtil
import ee.cone.c4proto.{HasId, ToByteString}
trait KeyGenerator {
def idGenUtil: IdGenUtil
def genPK[P <: Product](model: P, adapter: ProtoAdapter[Product] with HasId): String =
idGenUtil.srcIdFromSerialized(adapter.id,ToByteString(adapter.encode(model)))
}
| wregs/c4proto | c4gate-client/src/main/scala/ee/cone/c4gate/OrigKeyGenerator.scala | Scala | apache-2.0 | 363 |
package lila.rating
import reactivemongo.bson.BSONDocument
import lila.db.BSON
case class Glicko(
rating: Double,
deviation: Double,
volatility: Double) {
def intRating = rating.toInt
def intDeviation = deviation.toInt
def intDeviationDoubled = (deviation * 2).toInt
def intervalMin = (rating - deviation * 2).toInt
def intervalMax = (rating + deviation * 2).toInt
def interval = intervalMin -> intervalMax
override def toString = s"$intRating $intDeviation"
}
case object Glicko {
val minRating = 800
val default = Glicko(1500d, 350d, 0.06d)
def range(rating: Double, deviation: Double) = (
rating - (deviation * 2),
rating + (deviation * 2)
)
implicit val glickoBSONHandler = new BSON[Glicko] {
def reads(r: BSON.Reader): Glicko = Glicko(
rating = r double "r",
deviation = r double "d",
volatility = r double "v")
def writes(w: BSON.Writer, o: Glicko) = BSONDocument(
"r" -> w.double(o.rating),
"d" -> w.double(o.deviation),
"v" -> w.double(o.volatility))
}
sealed abstract class Result(val v: Double) {
def negate: Result
}
object Result {
case object Win extends Result(1) { def negate = Loss }
case object Loss extends Result(0) { def negate = Win }
case object Draw extends Result(0.5) { def negate = Draw }
}
lazy val tube = lila.db.BsTube(glickoBSONHandler)
}
| danilovsergey/i-bur | modules/rating/src/main/Glicko.scala | Scala | mit | 1,400 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.stream.table.validation
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.bridge.scala._
import org.apache.flink.table.api.{Session, Slide, Tumble, ValidationException}
import org.apache.flink.table.planner.plan.utils.JavaUserDefinedAggFunctions.WeightedAvgWithMerge
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
class GroupWindowValidationTest extends TableTestBase {
@Test
def testInvalidWindowProperty(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Window properties can only be used on windowed tables.")
val util = streamTestUtil()
val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
table
.groupBy('string)
.select('string, 'string.start) // property in non windowed table
}
@Test
def testGroupByWithoutWindowAlias(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("GroupBy must contain exactly one window alias.")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)]("T1", 'rowtime, 'int, 'string)
table
.window(Tumble over 5.milli on 'long as 'w)
.groupBy('string)
.select('string, 'int.count)
}
@Test
def testInvalidRowTimeRef(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Cannot resolve field [int]")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
.window(Tumble over 5.milli on 'rowtime as 'w)
.groupBy('w, 'string)
.select('string, 'int.count)
.window(Slide over 5.milli every 1.milli on 'int as 'w2) // 'Int does not exist in input.
.groupBy('w2)
.select('string)
}
@Test
def testInvalidTumblingSize(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("A tumble window expects a size value literal")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
.window(Tumble over "WRONG" on 'rowtime as 'w) // string is not a valid interval
.groupBy('w, 'string)
.select('string, 'int.count)
}
@Test
def testInvalidTumblingSizeType(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"Tumbling window expects a size literal of a day-time interval or BIGINT type.")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
// row interval is not valid for session windows
.window(Tumble over 10 on 'rowtime as 'w)
.groupBy('w, 'string)
.select('string, 'int.count)
}
@Test
def testTumbleUdAggWithInvalidArgs(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Given parameters do not match any signature. \\n" +
"Actual: (java.lang.String, java.lang.Integer) \\nExpected: (int, int), (long, int), " +
"(long, int, int, java.lang.String)")
val util = streamTestUtil()
val weightedAvg = new WeightedAvgWithMerge
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
.window(Tumble over 2.hours on 'rowtime as 'w)
.groupBy('w, 'string)
.select('string, weightedAvg('string, 'int)) // invalid UDAGG args
}
@Test
def testInvalidSlidingSize(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("A sliding window expects a size value literal")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
// field reference is not a valid interval
.window(Slide over "WRONG" every "WRONG" on 'rowtime as 'w)
.groupBy('w, 'string)
.select('string, 'int.count)
}
@Test
def testInvalidSlidingSlide(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("A sliding window expects the same type of size and slide.")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
// row and time intervals may not be mixed
.window(Slide over 12.rows every 1.minute on 'rowtime as 'w)
.groupBy('w, 'string)
.select('string, 'int.count)
}
@Test
def testInvalidSlidingSizeType(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"A sliding window expects a size literal of a day-time interval or BIGINT type.")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
// row interval is not valid for session windows
.window(Slide over 10 every 10.milli on 'rowtime as 'w)
.groupBy('w, 'string)
.select('string, 'int.count)
}
@Test
def testSlideUdAggWithInvalidArgs(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Given parameters do not match any signature. \\n" +
"Actual: (java.lang.String, java.lang.Integer) \\nExpected: (int, int), (long, int), " +
"(long, int, int, java.lang.String)")
val util = streamTestUtil()
val weightedAvg = new WeightedAvgWithMerge
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
.window(Slide over 2.hours every 30.minutes on 'rowtime as 'w)
.groupBy('w, 'string)
.select('string, weightedAvg('string, 'int)) // invalid UDAGG args
}
@Test
def testInvalidSessionGap(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"A session window expects a gap literal of a day-time interval type.")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
// row interval is not valid for session windows
.window(Session withGap 10.rows on 'rowtime as 'w)
.groupBy('w, 'string)
.select('string, 'int.count)
}
@Test
def testInvalidSessionGapType(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage(
"A session window expects a gap literal of a day-time interval type.")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
// row interval is not valid for session windows
.window(Session withGap 10 on 'rowtime as 'w)
.groupBy('w, 'string)
.select('string, 'int.count)
}
@Test
def testInvalidWindowAlias1(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Only unresolved reference supported for alias of a " +
"group window.")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)]("T1", 'rowtime, 'int, 'string)
table
// expression instead of a symbol
.window(Session withGap 100.milli on 'long as concat("A", "B"))
.groupBy(concat("A", "B"))
.select('string, 'int.count)
}
@Test
def testInvalidWindowAlias2(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Cannot resolve field [string]")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'rowtime.rowtime, 'int, 'string)
table
// field name "string" is already present
.window(Session withGap 100.milli on 'rowtime as 'string)
.groupBy('string)
.select('string, 'int.count)
}
@Test
def testSessionUdAggWithInvalidArgs(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Given parameters do not match any signature. \\n" +
"Actual: (java.lang.String, java.lang.Integer) \\nExpected: (int, int), (long, int), " +
"(long, int, int, java.lang.String)")
val util = streamTestUtil()
val weightedAvg = new WeightedAvgWithMerge
val table = util.addDataStream[(Long, Int, String)](
"T1", 'long, 'int, 'string, 'rowtime.rowtime)
table
.window(Session withGap 2.hours on 'rowtime as 'w)
.groupBy('w, 'string)
.select('string, weightedAvg('string, 'int)) // invalid UDAGG args
}
@Test
def testInvalidWindowPropertyOnRowCountsTumblingWindow(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Window start and Window end cannot be selected " +
"for a row-count tumble window.")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'long, 'int, 'string, 'proctime.proctime)
table
.window(Tumble over 2.rows on 'proctime as 'w)
.groupBy('w, 'string)
.select('string, 'w.start, 'w.end) // invalid start/end on rows-count window
}
@Test
def testInvalidWindowPropertyOnRowCountsSlidingWindow(): Unit = {
expectedException.expect(classOf[ValidationException])
expectedException.expectMessage("Window start and Window end cannot be selected for a " +
"row-count slide window.")
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)](
"T1", 'long, 'int, 'string, 'proctime.proctime)
table
.window(Slide over 10.rows every 5.rows on 'proctime as 'w)
.groupBy('w, 'string)
.select('string, 'w.start, 'w.end) // invalid start/end on rows-count window
}
}
| GJL/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/GroupWindowValidationTest.scala | Scala | apache-2.0 | 10,871 |
package lila.game
import lila.db.{ BSON, ByteArray }
import org.joda.time.DateTime
import reactivemongo.bson._
import chess.{ CheckCount, Color, Clock, White, Black, Status, Mode }
import chess.variant.Variant
object BSONHandlers {
private[game] implicit val checkCountWriter = new BSONWriter[CheckCount, BSONArray] {
def write(cc: CheckCount) = BSONArray(cc.white, cc.black)
}
implicit val StatusBSONHandler = new BSONHandler[BSONInteger, Status] {
def read(bsonInt: BSONInteger): Status = Status(bsonInt.value) err s"No such status: ${bsonInt.value}"
def write(x: Status) = BSONInteger(x.id)
}
implicit val gameBSONHandler = new BSON[Game] {
import Game.BSONFields._
import CastleLastMoveTime.castleLastMoveTimeBSONHandler
import PgnImport.pgnImportBSONHandler
import Player.playerBSONHandler
private val emptyPlayerBuilder = playerBSONHandler.read(BSONDocument())
def reads(r: BSON.Reader): Game = {
val nbTurns = r int turns
val winC = r boolO winnerColor map Color.apply
val (whiteId, blackId) = r str playerIds splitAt 4
val uids = ~r.getO[List[String]](playerUids)
val (whiteUid, blackUid) = (uids.headOption.filter(_.nonEmpty), uids.lift(1).filter(_.nonEmpty))
def player(field: String, color: Color, id: Player.Id, uid: Player.UserId): Player = {
val builder = r.getO[Player.Builder](field)(playerBSONHandler) | emptyPlayerBuilder
val win = winC map (_ == color)
builder(color)(id)(uid)(win)
}
val createdAtValue = r date createdAt
Game(
id = r str id,
whitePlayer = player(whitePlayer, White, whiteId, whiteUid),
blackPlayer = player(blackPlayer, Black, blackId, blackUid),
binaryPieces = r bytes binaryPieces,
binaryPgn = r bytesD binaryPgn,
status = r.get[Status](status),
turns = nbTurns,
startedAtTurn = r intD startedAtTurn,
clock = r.getO[Color => Clock](clock)(clockBSONHandler(createdAtValue)) map (_(Color(0 == nbTurns % 2))),
positionHashes = r.bytesD(positionHashes).value,
checkCount = {
val counts = r.intsD(checkCount)
CheckCount(~counts.headOption, ~counts.lastOption)
},
castleLastMoveTime = r.get[CastleLastMoveTime](castleLastMoveTime)(castleLastMoveTimeBSONHandler),
daysPerTurn = r intO daysPerTurn,
binaryMoveTimes = (r bytesO moveTimes) | ByteArray.empty,
mode = Mode(r boolD rated),
variant = Variant(r intD variant) | chess.variant.Standard,
next = r strO next,
bookmarks = r intD bookmarks,
createdAt = createdAtValue,
updatedAt = r dateO updatedAt,
metadata = Metadata(
source = r intO source flatMap Source.apply,
pgnImport = r.getO[PgnImport](pgnImport)(PgnImport.pgnImportBSONHandler),
relay = r.getO[Relay](relay)(Relay.relayBSONHandler),
tournamentId = r strO tournamentId,
simulId = r strO simulId,
tvAt = r dateO tvAt,
analysed = r boolD analysed)
)
}
def writes(w: BSON.Writer, o: Game) = BSONDocument(
id -> o.id,
playerIds -> (o.whitePlayer.id + o.blackPlayer.id),
playerUids -> w.listO(List(~o.whitePlayer.userId, ~o.blackPlayer.userId)),
whitePlayer -> w.docO(playerBSONHandler write ((_: Color) => (_: Player.Id) => (_: Player.UserId) => (_: Player.Win) => o.whitePlayer)),
blackPlayer -> w.docO(playerBSONHandler write ((_: Color) => (_: Player.Id) => (_: Player.UserId) => (_: Player.Win) => o.blackPlayer)),
binaryPieces -> o.binaryPieces,
binaryPgn -> w.byteArrayO(o.binaryPgn),
status -> o.status,
turns -> o.turns,
startedAtTurn -> w.intO(o.startedAtTurn),
clock -> (o.clock map { c => clockBSONHandler(o.createdAt).write(_ => c) }),
positionHashes -> w.bytesO(o.positionHashes),
checkCount -> o.checkCount.nonEmpty.option(o.checkCount),
castleLastMoveTime -> castleLastMoveTimeBSONHandler.write(o.castleLastMoveTime),
daysPerTurn -> o.daysPerTurn,
moveTimes -> (BinaryFormat.moveTime write o.moveTimes),
rated -> w.boolO(o.mode.rated),
variant -> o.variant.exotic.option(o.variant.id).map(w.int),
next -> o.next,
bookmarks -> w.intO(o.bookmarks),
createdAt -> w.date(o.createdAt),
updatedAt -> o.updatedAt.map(w.date),
source -> o.metadata.source.map(_.id),
pgnImport -> o.metadata.pgnImport,
relay -> o.metadata.relay,
tournamentId -> o.metadata.tournamentId,
simulId -> o.metadata.simulId,
tvAt -> o.metadata.tvAt.map(w.date),
analysed -> w.boolO(o.metadata.analysed)
)
}
import lila.db.ByteArray.ByteArrayBSONHandler
def clockBSONHandler(since: DateTime) = new BSONHandler[BSONBinary, Color => Clock] {
def read(bin: BSONBinary) = BinaryFormat clock since read {
ByteArrayBSONHandler read bin
}
def write(clock: Color => Clock) = ByteArrayBSONHandler write {
BinaryFormat clock since write clock(chess.White)
}
}
}
| pavelo65/lila | modules/game/src/main/BSONHandlers.scala | Scala | mit | 5,104 |
/*
* Copyright 2015 eleflow.com.br.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.sparkts.models
import ml.dmlc.xgboost4j.java.Rabit
import ml.dmlc.xgboost4j.scala.DMatrix
import ml.dmlc.xgboost4j.{LabeledPoint => XGBLabeledPoint}
import ml.dmlc.xgboost4j.scala.spark.{XGBoost, XGBoostModel}
import org.apache.spark.TaskContext
import org.apache.spark.ml.feature.LabeledPoint
import org.apache.spark.ml.linalg.DenseVector
import org.apache.spark.rdd.RDD
import scala.collection.JavaConverters._
/**
* Created by dirceu on 25/08/16.
*/
object UberXGBoostModel {
def train(trainLabel: RDD[LabeledPoint],
configMap: Map[String, Any],
round: Int,
nWorkers: Int): XGBoostModel = {
val trainData = trainLabel.cache
XGBoost.trainWithRDD(trainData, configMap, round, nWorkers,useExternalMemory = true, missing
= Float.NaN)
}
def labelPredict(testSet: RDD[XGBLabeledPoint],
useExternalCache: Boolean,
booster: XGBoostModel): RDD[(Float, Float)] = {
val broadcastBooster = testSet.sparkContext.broadcast(booster)
testSet.mapPartitions { testData =>
val (toPredict, toLabel) = testData.duplicate
val dMatrix = new DMatrix(toPredict)
val prediction = broadcastBooster.value.booster.predict(dMatrix).flatten.toIterator
toLabel.map(_.label).zip(prediction)
}
}
def labelPredict(testSet: RDD[DenseVector],
booster: XGBoostModel): RDD[(Float, Float)] = {
val broadcastBooster = testSet.sparkContext.broadcast(booster)
val rdd = testSet.cache
broadcastBooster.value.predict(testSet,missingValue = Float.NaN).map(value => (value(0),
value(1)))
// testSet.
// testSet.mapPartitions { testData =>
// val (toPredict, toLabel) = testData.duplicate
// val dMatrix = new DMatrix(toPredict)
//
// val prediction = broadcastBooster.value.booster.predict(dMatrix).flatten.toIterator
// toLabel.map(_.label).zip(prediction)
// }
}
}
| eleflow/uberdata | iuberdata_core/src/main/scala/com/cloudera/sparkts/models/UberXGBoostModel.scala | Scala | apache-2.0 | 2,561 |
/*
* Copyright 2017 rdbc contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.rdbc.examples.playscala
import java.time.Instant
import play.api.libs.json.{Json, Writes}
object Record {
implicit val writes: Writes[Record] = Json.writes[Record]
}
final case class Record(integer: Option[Int],
timestamp: Option[Instant],
varchar: Option[String])
| rdbc-io/rdbc-examples | play-scala/app/io/rdbc/examples/playscala/Record.scala | Scala | apache-2.0 | 932 |
Subsets and Splits