code
stringlengths
5
1M
repo_name
stringlengths
5
109
path
stringlengths
6
208
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
5
1M
package com.phosphene.kafkastorm.kafka import java.util.Properties import kafka.server.{KafkaServerStartable, KafkaConfig} import kafka.utils.Logging /** * Runs an in-memory, "embedded" instance of a Kafka broker, which listens at `127.0.0.1:9092`. * * Requires a running ZooKeeper instance to connect to. By default, it expects a ZooKeeper instance running at * `127.0.0.1:2181`. You can specify a different ZooKeeper instance by setting the `zookeeper.connect` parameter in the * broker's configuration. * * @param config Broker configuration settings. */ class KafkaEmbedded(config: Properties = new Properties) extends Logging { private val defaultZkConnect = "127.0.0.1:2181" private val effectiveConfig = { val c = new Properties c.load(this.getClass.getResourceAsStream("/broker-defaults.properties")) c.putAll(config) c } private val kafkaConfig = new KafkaConfig(effectiveConfig) private val kafka = new KafkaServerStartable(kafkaConfig) /** * This broker's `metadata.broker.list` value. Example: `127.0.0.1:9092`. * * You can use this to tell Kafka producers and consumers how to connect to this instance. */ val brokerList = kafka.serverConfig.hostName + ":" + kafka.serverConfig.port /** * The ZooKeeper connection string aka `zookeeper.connect`. */ val zookeeperConnect = { val zkConnectLookup = Option(effectiveConfig.getProperty("zookeeper.connect")) zkConnectLookup match { case Some(zkConnect) => zkConnect case _ => warn(s"zookeeper.connect is not configured -- falling back to default setting $defaultZkConnect") defaultZkConnect } } /** * Start the broker. */ def start() { debug(s"Starting embedded Kafka broker at $brokerList (using ZooKeeper server at $zookeeperConnect) ...") kafka.startup() debug("Embedded Kafka broker startup completed") } /** * Stop the broker. */ def stop() { debug("Shutting down embedded Kafka broker...") kafka.shutdown() debug("Embedded Kafka broker shutdown completed") } }
phosphene/kafka-storm-test-demo
src/main/scala/com/phosphene/kafkastorm/kafka/KafkaEmbedded.scala
Scala
apache-2.0
2,087
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.streaming import java.sql.Date import org.apache.commons.lang3.StringUtils import org.apache.spark.sql.catalyst.plans.logical.{EventTimeTimeout, ProcessingTimeTimeout} import org.apache.spark.sql.execution.streaming.GroupStateImpl._ import org.apache.spark.sql.streaming.{GroupState, GroupStateTimeout} import org.apache.spark.unsafe.types.CalendarInterval /** * Internal implementation of the [[GroupState]] interface. Methods are not thread-safe. * * @param optionalValue Optional value of the state * @param batchProcessingTimeMs Processing time of current batch, used to calculate timestamp * for processing time timeouts * @param timeoutConf Type of timeout configured. Based on this, different operations will * be supported. * @param hasTimedOut Whether the key for which this state wrapped is being created is * getting timed out or not. */ private[sql] class GroupStateImpl[S] private( optionalValue: Option[S], batchProcessingTimeMs: Long, eventTimeWatermarkMs: Long, timeoutConf: GroupStateTimeout, override val hasTimedOut: Boolean) extends GroupState[S] { private var value: S = optionalValue.getOrElse(null.asInstanceOf[S]) private var defined: Boolean = optionalValue.isDefined private var updated: Boolean = false // whether value has been updated (but not removed) private var removed: Boolean = false // whether value has been removed private var timeoutTimestamp: Long = NO_TIMESTAMP // ========= Public API ========= override def exists: Boolean = defined override def get: S = { if (defined) { value } else { throw new NoSuchElementException("State is either not defined or has already been removed") } } override def getOption: Option[S] = { if (defined) { Some(value) } else { None } } override def update(newValue: S): Unit = { if (newValue == null) { throw new IllegalArgumentException("'null' is not a valid state value") } value = newValue defined = true updated = true removed = false } override def remove(): Unit = { defined = false updated = false removed = true } override def setTimeoutDuration(durationMs: Long): Unit = { if (timeoutConf != ProcessingTimeTimeout) { throw new UnsupportedOperationException( "Cannot set timeout duration without enabling processing time timeout in " + "map/flatMapGroupsWithState") } if (durationMs <= 0) { throw new IllegalArgumentException("Timeout duration must be positive") } timeoutTimestamp = durationMs + batchProcessingTimeMs } override def setTimeoutDuration(duration: String): Unit = { setTimeoutDuration(parseDuration(duration)) } @throws[IllegalArgumentException]("if 'timestampMs' is not positive") @throws[IllegalStateException]("when state is either not initialized, or already removed") @throws[UnsupportedOperationException]( "if 'timeout' has not been enabled in [map|flatMap]GroupsWithState in a streaming query") override def setTimeoutTimestamp(timestampMs: Long): Unit = { checkTimeoutTimestampAllowed() if (timestampMs <= 0) { throw new IllegalArgumentException("Timeout timestamp must be positive") } if (eventTimeWatermarkMs != NO_TIMESTAMP && timestampMs < eventTimeWatermarkMs) { throw new IllegalArgumentException( s"Timeout timestamp ($timestampMs) cannot be earlier than the " + s"current watermark ($eventTimeWatermarkMs)") } timeoutTimestamp = timestampMs } @throws[IllegalArgumentException]("if 'additionalDuration' is invalid") @throws[IllegalStateException]("when state is either not initialized, or already removed") @throws[UnsupportedOperationException]( "if 'timeout' has not been enabled in [map|flatMap]GroupsWithState in a streaming query") override def setTimeoutTimestamp(timestampMs: Long, additionalDuration: String): Unit = { checkTimeoutTimestampAllowed() setTimeoutTimestamp(parseDuration(additionalDuration) + timestampMs) } @throws[IllegalStateException]("when state is either not initialized, or already removed") @throws[UnsupportedOperationException]( "if 'timeout' has not been enabled in [map|flatMap]GroupsWithState in a streaming query") override def setTimeoutTimestamp(timestamp: Date): Unit = { checkTimeoutTimestampAllowed() setTimeoutTimestamp(timestamp.getTime) } @throws[IllegalArgumentException]("if 'additionalDuration' is invalid") @throws[IllegalStateException]("when state is either not initialized, or already removed") @throws[UnsupportedOperationException]( "if 'timeout' has not been enabled in [map|flatMap]GroupsWithState in a streaming query") override def setTimeoutTimestamp(timestamp: Date, additionalDuration: String): Unit = { checkTimeoutTimestampAllowed() setTimeoutTimestamp(timestamp.getTime + parseDuration(additionalDuration)) } override def toString: String = { s"GroupState(${getOption.map(_.toString).getOrElse("<undefined>")})" } // ========= Internal API ========= /** Whether the state has been marked for removing */ def hasRemoved: Boolean = removed /** Whether the state has been updated */ def hasUpdated: Boolean = updated /** Return timeout timestamp or `TIMEOUT_TIMESTAMP_NOT_SET` if not set */ def getTimeoutTimestamp: Long = timeoutTimestamp private def parseDuration(duration: String): Long = { if (StringUtils.isBlank(duration)) { throw new IllegalArgumentException( "Provided duration is null or blank.") } val intervalString = if (duration.startsWith("interval")) { duration } else { "interval " + duration } val cal = CalendarInterval.fromString(intervalString) if (cal == null) { throw new IllegalArgumentException( s"Provided duration ($duration) is not valid.") } if (cal.milliseconds < 0 || cal.months < 0) { throw new IllegalArgumentException(s"Provided duration ($duration) is not positive") } val millisPerMonth = CalendarInterval.MICROS_PER_DAY / 1000 * 31 cal.milliseconds + cal.months * millisPerMonth } private def checkTimeoutTimestampAllowed(): Unit = { if (timeoutConf != EventTimeTimeout) { throw new UnsupportedOperationException( "Cannot set timeout timestamp without enabling event time timeout in " + "map/flatMapGroupsWithState") } } } private[sql] object GroupStateImpl { // Value used represent the lack of valid timestamp as a long val NO_TIMESTAMP = -1L def createForStreaming[S]( optionalValue: Option[S], batchProcessingTimeMs: Long, eventTimeWatermarkMs: Long, timeoutConf: GroupStateTimeout, hasTimedOut: Boolean): GroupStateImpl[S] = { new GroupStateImpl[S]( optionalValue, batchProcessingTimeMs, eventTimeWatermarkMs, timeoutConf, hasTimedOut) } def createForBatch(timeoutConf: GroupStateTimeout): GroupStateImpl[Any] = { new GroupStateImpl[Any]( optionalValue = None, batchProcessingTimeMs = NO_TIMESTAMP, eventTimeWatermarkMs = NO_TIMESTAMP, timeoutConf, hasTimedOut = false) } }
mike0sv/spark
sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/GroupStateImpl.scala
Scala
apache-2.0
8,126
package com.fijimf import org.joda.time.{Months, LocalDate} case class AdjustableRateMortgage ( originalBalance: Double, originalTerm: Int, origDate: LocalDate, firstPayDate: LocalDate, currentBalance: Double, currentTerm: Int, currentRate: Double, index: String, margin: Double, nextRateReset: LocalDate, rateResetFreq: Int, periodicCapFloor: Double, lifetimeCap: Double, lifetimeFloor: Double ) extends Asset[AdjustableRateMortgage, ArmContext] { def amortize(z: AccrualPeriod, ctx: ArmContext): (AdjustableRateMortgage, Cashflow) = { val payment = levelPayment(currentRate / 12.0, currentTerm) * currentBalance val interest = currentRate * (1.0 / 12.0) * currentBalance val principal = Math.min(currentBalance, payment - interest) val nextAccrual: AccrualPeriod = z.copy(period = z.period + 1, startDate = z.endDate, endDate = z.endDate.plusMonths(1)) val c = Cashflow(nextAccrual, principal, interest, 0) val newArm: AdjustableRateMortgage = copy(currentBalance = currentBalance - principal, currentTerm = currentTerm - 1) if (c.accrualPeriod.startDate.isEqual(nextRateReset) || c.accrualPeriod.startDate.isAfter(nextRateReset)) { (resetRate(ctx, newArm), c) } else { (newArm, c) } } def resetRate(ctx: ArmContext, arm: AdjustableRateMortgage): AdjustableRateMortgage = { val indexValue: Double = ctx.indexValue(index, nextRateReset) val chg: Double = ((indexValue + margin / 100.0) / 100.0) - currentRate val newRate = currentRate + Math.min(Math.max(-periodicCapFloor/100, chg), periodicCapFloor/100) arm.copy(currentRate = Math.max(Math.min(newRate, lifetimeCap/100), lifetimeFloor/100), nextRateReset = nextRateReset.plusMonths(12 / rateResetFreq)) } def amortize(ctx: ArmContext): Stream[(AdjustableRateMortgage, Cashflow)] = { Stream.iterate(amortize(AccrualPeriod(0, origDate, origDate, 0), ctx))((tuple: (AdjustableRateMortgage, Cashflow)) => { tuple._1.amortize(tuple._2.accrualPeriod, ctx) }) } override def cashflows(ctx: ArmContext): Stream[(AdjustableRateMortgage, Cashflow)] = { amortize(ctx) } }
fijimf/msfcf
msfcf-core/src/main/scala/com/fijimf/AdjustableRateMortgage.scala
Scala
mit
2,206
abstract class GenericParamInput[T] { val x: T def foo { val y = x /*start*/ println(y) /*end*/ } } /* abstract class GenericParamInput[T] { val x: T def foo { val y = x testMethodName(y) } def testMethodName(y: T): Unit = { println(y) } } */
whorbowicz/intellij-scala
testdata/extractMethod/input/GenericParamInput.scala
Scala
apache-2.0
285
package edu.cornell.cdm89.scalaspec.pde import akka.actor.{Actor, ActorLogging, ActorRef} import breeze.linalg.DenseVector import edu.cornell.cdm89.scalaspec.domain.GllElement.{Coords, InitialData} import edu.cornell.cdm89.scalaspec.domain.Subdomain import edu.cornell.cdm89.scalaspec.ode.ElementState import edu.cornell.cdm89.scalaspec.spectral.GllBasis class TrianglePulseInitialData(subdomain: ActorRef, center: Double, halfWidth: Double, height: Double) extends Actor with ActorLogging { // TODO: Refactor into base class def receive = { case 'ProvideId => subdomain ! 'GetLocalElements case Subdomain.ElementsList(elements) => elements foreach { _ ! 'GetCoords } context.become(idProvider(elements)) } def idProvider(elements: Seq[ActorRef]): Receive = { // TODO: Count responses case Coords(xs) => //log.info("ID received coords") val t0 = 0.0 val width = xs(xs.length-1) - xs(0) val basis = GllBasis(xs.length-1) val psi0 = xs map { x => if ((x < center-halfWidth) || (x > center+halfWidth)) 0.0 else if (x < center) (height/halfWidth) * (halfWidth - (center - x)) else (height/halfWidth) * (halfWidth - (x - center)) } val pi0 = DenseVector.zeros[Double](xs.length) val phi0 = basis.differentiate(psi0) :* (2.0/width) sender ! InitialData(ElementState(t0, xs, Vector(psi0, pi0, phi0))) } }
cdmuhlb/DGenerate
src/main/scala/edu/cornell/cdm89/scalaspec/pde/TrianglePulseInitialData.scala
Scala
mit
1,428
/* * Copyright 2014 Frugal Mechanic (http://frugalmechanic.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fm.xml import fm.common.Implicits._ import javax.xml.stream.XMLStreamWriter import scala.collection.mutable.ArrayBuffer object CommentingXMLStreamWriter { private sealed trait Command { def apply(writer: XMLStreamWriter): Unit } private sealed trait StartElement extends Command { def localName: String } private sealed trait EmptyElement extends StartElement private sealed trait Attribute extends Command { def localName: String; def value: String } private sealed trait Text extends Command { def data: String } private case class Namespace(prefix: String, namespaceURI: String) extends Command { def apply(writer: XMLStreamWriter): Unit = writer.writeNamespace(prefix, namespaceURI) } private object StartElement { def apply(localName: String): StartElement1 = StartElement1(localName) def apply(namespaceURI: String, localName: String): StartElement2 = StartElement2(namespaceURI, localName) def apply(prefix: String, localName: String, namespaceURI: String): StartElement3 = StartElement3(prefix, localName, namespaceURI) } private object EmptyElement { def apply(localName: String): EmptyElement1 = EmptyElement1(localName) def apply(namespaceURI: String, localName: String): EmptyElement2 = EmptyElement2(namespaceURI, localName) def apply(prefix: String, localName: String, namespaceURI: String): EmptyElement3 = EmptyElement3(prefix, localName, namespaceURI) } private object Attribute { def apply(localName: String, value: String): Attribute2 = Attribute2(localName, value) def apply(namespaceURI: String, localName: String, value: String): Attribute3 = Attribute3(namespaceURI, localName, value) def apply(prefix: String, namespaceURI: String, localName: String, value: String): Attribute4 = Attribute4(prefix, namespaceURI, localName, value) } private object Chars { def apply(text: Array[Char], start: Int, length: Int): Chars = Chars(new String(text, start, length)) } private case class Chars(data: String) extends Text { def apply(writer: XMLStreamWriter): Unit = writer.writeCharacters(data) } private case class CData(data: String) extends Text { def apply(writer: XMLStreamWriter): Unit = writer.writeCData(data) } private case class StartElement1(localName: String) extends StartElement { def apply(writer: XMLStreamWriter): Unit = writer.writeStartElement(localName) } private case class StartElement2(namespaceURI: String, localName: String) extends StartElement { def apply(writer: XMLStreamWriter): Unit = writer.writeStartElement(namespaceURI, localName) } private case class StartElement3(prefix: String, localName: String, namespaceURI: String) extends StartElement { def apply(writer: XMLStreamWriter): Unit = writer.writeStartElement(prefix, localName, namespaceURI) } private case class EmptyElement1(localName: String) extends EmptyElement { def apply(writer: XMLStreamWriter): Unit = writer.writeEmptyElement(localName) } private case class EmptyElement2(namespaceURI: String, localName: String) extends EmptyElement { def apply(writer: XMLStreamWriter): Unit = writer.writeEmptyElement(namespaceURI, localName) } private case class EmptyElement3(prefix: String, localName: String, namespaceURI: String) extends EmptyElement { def apply(writer: XMLStreamWriter): Unit = writer.writeEmptyElement(prefix, localName, namespaceURI) } private case class Attribute2(localName: String, value: String) extends Attribute { def apply(writer: XMLStreamWriter): Unit = writer.writeAttribute(localName, value) } private case class Attribute3(namespaceURI: String, localName: String, value: String) extends Attribute { def apply(writer: XMLStreamWriter): Unit = writer.writeAttribute(namespaceURI, localName, value) } private case class Attribute4(prefix: String, namespaceURI: String, localName: String, value: String) extends Attribute { def apply(writer: XMLStreamWriter): Unit = writer.writeAttribute(prefix, namespaceURI, localName, value) } private case object EndElement extends Command { def apply(writer: XMLStreamWriter): Unit = writer.writeEndElement() } } final case class CommentingXMLStreamWriter(protected val self: IndentingXMLStreamWriter, comments: XMLCommentProvider) extends XMLStreamWriterProxy { import CommentingXMLStreamWriter._ // We buffer at most 1 complete element (start tag, attributes, chars/cdata, end tag) private[this] val buffer: ArrayBuffer[Command] = new ArrayBuffer() private[this] val localNames: ArrayBuffer[String] = new ArrayBuffer() private def push(localName: String): Unit = localNames += localName private def pop(): Unit = localNames.remove(localNames.size - 1) protected def stackKey(): String = localNames.mkString(".") /** * This is called before the start element is written */ protected def beforeStartElement(localName: String): Unit = { flushBuffer() push(localName) } /** * This is called after the closing element tag is written */ protected def afterEndElement(): Unit = { flushBuffer() pop() } protected def flushBuffer(): Unit = { if (buffer.size == 0) return // TODO: add stricter checking. Should be StartElement, optional Attributes, optional Text, and optional EndElement //val localName: String = buffer.collect{ case start: StartElement => start.localName }.head val localNamePath: String = stackKey() val attributes: Map[String, String] = buffer.collect { case attr: Attribute => (attr.localName, attr.value) }.toMap val value: Option[String] = buffer.collect{ case txt: Text => txt.data }.mkString("").toBlankOption // Comments before the start element comments.leadingComment(localNamePath, attributes, value).foreach{ comment: String => self.writeIndentedComment(comment.requireLeading(" ").requireTrailing(" ")) } buffer.foreach{ command: Command => command(self) } // Comments after the end element (or the start element if there are nested elements under this one) comments.trailingComment(localNamePath, attributes, value).foreach{ comment: String => self.writeCharacters(" ") self.writeComment(comment.requireLeading(" ").requireTrailing(" ")) } buffer.clear() } // // These are all the XMLStreamWriter overrides that hook into handleStartElement/handleEndElement: // override def writeEmptyElement(localName: String): Unit = { beforeStartElement(localName) buffer += EmptyElement(localName) afterEndElement() } override def writeEmptyElement(prefix: String, localName: String, namespaceURI: String): Unit = { beforeStartElement(localName) buffer += EmptyElement(prefix, localName, namespaceURI) afterEndElement() } override def writeEmptyElement(namespaceURI: String, localName: String): Unit = { beforeStartElement(localName) buffer += EmptyElement(namespaceURI, localName) afterEndElement() } override def writeStartElement(prefix: String, localName: String, namespaceURI: String): Unit = { beforeStartElement(localName) buffer += StartElement(prefix, localName, namespaceURI) } override def writeStartElement(namespaceURI: String, localName: String): Unit = { beforeStartElement(localName) buffer += StartElement(namespaceURI, localName) } override def writeStartElement(localName: String): Unit = { beforeStartElement(localName) buffer += StartElement(localName) } override def writeEndElement(): Unit = { buffer += EndElement afterEndElement() } override def writeAttribute(namespaceURI: String, localName: String, value: String): Unit = { buffer += Attribute(namespaceURI, localName, value) } override def writeAttribute(prefix: String, namespaceURI: String, localName: String, value: String): Unit = { buffer += Attribute(prefix, namespaceURI, localName, value) } override def writeAttribute(localName: String, value: String): Unit = { buffer += Attribute(localName, value) } override def writeCData(data: String): Unit = { buffer += CData(data) } override def writeCharacters(text: Array[Char], start: Int, length: Int): Unit = { buffer += Chars(text, start, length) } override def writeCharacters(text: String): Unit = { buffer += Chars(text) } override def writeEndDocument(): Unit = { flushBuffer() self.writeEndDocument() } override def setPrefix(prefix: String, uri: String): Unit = { //println(s"setPrefix($prefix, $uri)") self.setPrefix(prefix, uri) } override def writeDTD(dtd: String): Unit = self.writeDTD(dtd) override def writeDefaultNamespace(namespaceURI: String): Unit = { //println(s"writeDefaultNamespace($namespaceURI)") self.writeDefaultNamespace(namespaceURI) } override def writeEntityRef(name: String): Unit = { //println(s"writeEntityRef($name)") self.writeEntityRef(name) } override def writeNamespace(prefix: String, namespaceURI: String): Unit = { buffer += Namespace(prefix, namespaceURI) } override def writeProcessingInstruction(target: String, data: String): Unit = { //println(s"writeProcessingInstruction($target, $data)") self.writeProcessingInstruction(target, data) } override def writeProcessingInstruction(target: String): Unit = { //println(s"writeProcessingInstruction($target)") self.writeProcessingInstruction(target) } }
frugalmechanic/fm-xml
src/main/scala/fm/xml/CommentingXMLStreamWriter.scala
Scala
apache-2.0
10,204
package security import models.User import reactivemongo.api.indexes.Index import repository.{MongoProfileRepository, MongoTokenRepository} import securesocial.core.services.UserService import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future object MongoUserService extends UserService[User] with MongoProfileRepository with MongoTokenRepository { override def indexes(): Future[Seq[Index]] = { val profileIndexes = super[MongoProfileRepository].indexes() val tokenIndexes = super[MongoTokenRepository].indexes() Future.sequence(Seq(profileIndexes, tokenIndexes)).map(_.flatten) } }
timothygordon32/reactive-todolist
app/security/MongoUserService.scala
Scala
mit
635
//: ---------------------------------------------------------------------------- //: Copyright (C) 2015 Verizon. All Rights Reserved. //: //: Licensed under the Apache License, Version 2.0 (the "License"); //: you may not use this file except in compliance with the License. //: You may obtain a copy of the License at //: //: http://www.apache.org/licenses/LICENSE-2.0 //: //: Unless required by applicable law or agreed to in writing, software //: distributed under the License is distributed on an "AS IS" BASIS, //: WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //: See the License for the specific language governing permissions and //: limitations under the License. //: //: ---------------------------------------------------------------------------- package funnel import java.lang.management.GarbageCollectorMXBean import java.lang.management.ManagementFactory import java.util.concurrent.{ExecutorService, ScheduledExecutorService} import collection.JavaConversions._ import scala.concurrent.duration._ import scalaz.concurrent.Strategy import scalaz.stream._ /** Functions for adding various JVM metrics to a `Monitoring` instance. */ object JVM { /** * Add various JVM metrics to a `Monitoring` instance. */ def instrument(I: Instruments)( implicit ES: ExecutorService = Monitoring.defaultPool, TS: ScheduledExecutorService = Monitoring.schedulingPool, t: Duration = 30 seconds): Unit = { val mxBean = ManagementFactory.getMemoryMXBean val gcs = ManagementFactory.getGarbageCollectorMXBeans.toList val pools = ManagementFactory.getMemoryPoolMXBeans.toList def threadCount(s: Thread.State): Int = Thread.getAllStackTraces.keySet.toList.map( _.getState).filter(_ == s).length val ST = Strategy.Executor(ES) import I._ gcs.foreach { gc => val name = gc.getName.replace(' ', '-') val numCollections = numericGauge(s"jvm/gc/$name", 0, Units.Count) val collectionTime = numericGauge(s"jvm/gc/$name/time", 0, Units.Milliseconds) time.awakeEvery(t)(ST,TS).map { _ => numCollections.set(gc.getCollectionCount.toDouble) collectionTime.set(gc.getCollectionTime.toDouble) }.run.runAsync(_ => ()) } def TC(state: Thread.State) = numericGauge(s"jvm/threads/${state.toString.toLowerCase}", 0d) def MB(lbl: String, desc: String): Gauge[Periodic[Stats], Double] = Gauge.scale(1/1e6)(numericGauge(lbl, 0.0, Units.Megabytes, desc)) val newThreads = TC(Thread.State.NEW) val runnableThreads = TC(Thread.State.RUNNABLE) val blockedThreads = TC(Thread.State.BLOCKED) val waitingThreads = TC(Thread.State.WAITING) val timedWaitingThreads = TC(Thread.State.TIMED_WAITING) val terminatedThreads = TC(Thread.State.TERMINATED) val totalInit = MB("jvm/memory/total/init", "The amount of memory that the JVM initially requests from the operating system for memory management.") val totalUsed = MB("jvm/memory/total/used", "The amount of used memory.") val totalMax = MB("jvm/memory/total/max", "The maximum amount of memory that can be used for memory management.") val totalCommitted = MB("jvm/memory/total/committed", "The amount of memory that is committed for the JVM to use.") val heapInit = MB("jvm/memory/heap/init", "The amount of heap memory that the JVM initially requests from the operating system.") val heapUsed = MB("jvm/memory/heap/used", "The amount of used heap memory.") val heapUsage = numericGauge("jvm/memory/heap/usage", 0.0, Units.Ratio, "Ratio of heap memory in use.") val heapMax = MB("jvm/memory/heap/max", "The maximum amount of heap memory that can be used for memory management.") val heapCommitted = MB("jvm/memory/heap/committed", "The amount of heap memory that is committed for the JVM to use.") val nonheapInit = MB("jvm/memory/nonheap/init", "The amount of nonheap memory that the JVM initially requests from the operating system for memory management.") val nonheapUsed = MB("jvm/memory/nonheap/used", "The amount of used nonheap memory.") val nonheapUsage = numericGauge("jvm/memory/nonheap/usage", 0.0, Units.Ratio, "Ratio of nonheap memory in use.") val nonheapMax = MB("jvm/memory/nonheap/max", "The maximum amount of nonheap memory that can be used for memory management.") val nonheapCommitted = MB("jvm/memory/nonheap/committed", "The amount of nonheap memory that is committed for the JVM to use.") time.awakeEvery(t)(ST,TS).map { _ => import mxBean.{getHeapMemoryUsage => heap, getNonHeapMemoryUsage => nonheap} totalInit.set(heap.getInit + nonheap.getInit) totalUsed.set(heap.getUsed + nonheap.getUsed) totalMax.set(heap.getMax + nonheap.getMax) totalCommitted.set(heap.getCommitted + nonheap.getCommitted) heapInit.set(heap.getInit) heapUsed.set(heap.getUsed) heapUsage.set(heap.getUsed.toDouble / heap.getMax) heapMax.set(heap.getMax) heapCommitted.set(heap.getCommitted) nonheapInit.set(nonheap.getInit) nonheapUsed.set(nonheap.getUsed) nonheapUsage.set(nonheap.getUsed.toDouble / nonheap.getMax) nonheapMax.set(nonheap.getMax) nonheapCommitted.set(nonheap.getCommitted) newThreads.set(threadCount(Thread.State.NEW)) runnableThreads.set(threadCount(Thread.State.RUNNABLE)) blockedThreads.set(threadCount(Thread.State.BLOCKED)) waitingThreads.set(threadCount(Thread.State.WAITING)) timedWaitingThreads.set(threadCount(Thread.State.TIMED_WAITING)) terminatedThreads.set(threadCount(Thread.State.TERMINATED)) }.run.runAsync(_ => ()) } }
neigor/funnel
core/src/main/scala/JVM.scala
Scala
apache-2.0
6,061
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.execution.datasources.parquet import java.io.File import scala.reflect.ClassTag import scala.reflect.runtime.universe.TypeTag import org.apache.spark.sql.test.SQLTestUtils import org.apache.spark.sql.{DataFrame, SaveMode, SQLContext} /** * A helper trait that provides convenient facilities for Parquet testing. * * NOTE: Considering classes `Tuple1` ... `Tuple22` all extend `Product`, it would be more * convenient to use tuples rather than special case classes when writing test cases/suites. * Especially, `Tuple1.apply` can be used to easily wrap a single type/value. */ private[sql] trait ParquetTest extends SQLTestUtils { protected def _sqlContext: SQLContext /** * Writes `data` to a Parquet file, which is then passed to `f` and will be deleted after `f` * returns. */ protected def withParquetFile[T <: Product: ClassTag: TypeTag] (data: Seq[T]) (f: String => Unit): Unit = { withTempPath { file => _sqlContext.createDataFrame(data).write.parquet(file.getCanonicalPath) f(file.getCanonicalPath) } } /** * Writes `data` to a Parquet file and reads it back as a [[DataFrame]], * which is then passed to `f`. The Parquet file will be deleted after `f` returns. */ protected def withParquetDataFrame[T <: Product: ClassTag: TypeTag] (data: Seq[T]) (f: DataFrame => Unit): Unit = { withParquetFile(data)(path => f(_sqlContext.read.parquet(path))) } /** * Writes `data` to a Parquet file, reads it back as a [[DataFrame]] and registers it as a * temporary table named `tableName`, then call `f`. The temporary table together with the * Parquet file will be dropped/deleted after `f` returns. */ protected def withParquetTable[T <: Product: ClassTag: TypeTag] (data: Seq[T], tableName: String) (f: => Unit): Unit = { withParquetDataFrame(data) { df => _sqlContext.registerDataFrameAsTable(df, tableName) withTempTable(tableName)(f) } } protected def makeParquetFile[T <: Product: ClassTag: TypeTag]( data: Seq[T], path: File): Unit = { _sqlContext.createDataFrame(data).write.mode(SaveMode.Overwrite).parquet(path.getCanonicalPath) } protected def makeParquetFile[T <: Product: ClassTag: TypeTag]( df: DataFrame, path: File): Unit = { df.write.mode(SaveMode.Overwrite).parquet(path.getCanonicalPath) } protected def makePartitionDir( basePath: File, defaultPartitionName: String, partitionCols: (String, Any)*): File = { val partNames = partitionCols.map { case (k, v) => val valueString = if (v == null || v == "") defaultPartitionName else v.toString s"$k=$valueString" } val partDir = partNames.foldLeft(basePath) { (parent, child) => new File(parent, child) } assert(partDir.mkdirs(), s"Couldn't create directory $partDir") partDir } }
ArvinDevel/onlineAggregationOnSparkV2
sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetTest.scala
Scala
apache-2.0
3,715
package test object Foo { def foo = 1 } object Bar { def foo = 2 } object Test123 { import Foo.foo object Inner { import Bar._ //!! println(foo) } }
yusuke2255/dotty
test/x/names.scala
Scala
bsd-3-clause
173
package com.kakao.shaded.jackson.module.scala.deser import com.kakao.shaded.jackson.core.{JsonParser, JsonToken} import com.kakao.shaded.jackson.databind._ import com.kakao.shaded.jackson.databind.deser._ import com.kakao.shaded.jackson.databind.deser.std.StdDeserializer import com.kakao.shaded.jackson.databind.jsontype.TypeDeserializer import com.kakao.shaded.jackson.module.scala.JacksonModule import com.kakao.shaded.jackson.module.scala.deser.EitherDeserializer.ElementDeserializerConfig private class EitherDeserializer(javaType: JavaType, config: DeserializationConfig, leftDeserializerConfig: ElementDeserializerConfig, rightDeserializerConfig: ElementDeserializerConfig) extends StdDeserializer[Either[AnyRef, AnyRef]](classOf[Either[AnyRef, AnyRef]]) with ContextualDeserializer { override def createContextual(ctxt: DeserializationContext, property: BeanProperty): JsonDeserializer[Either[AnyRef, AnyRef]] = { def deserializerConfigFor(param: Int, inType: JavaType, property: BeanProperty): ElementDeserializerConfig = { val containedType = javaType.containedType(param) val paramDeserializer = Option( ctxt.findContextualValueDeserializer(containedType, property) ) val typeDeserializer = Option(property).map(p => BeanDeserializerFactory.instance.findPropertyTypeDeserializer(ctxt.getConfig, containedType, p.getMember) ) ElementDeserializerConfig( paramDeserializer, typeDeserializer ) } javaType.containedTypeCount match { case 2 => val leftDeserializerConfig = deserializerConfigFor(0, javaType, property) val rightDeserializerConfig = deserializerConfigFor(1, javaType, property) new EitherDeserializer(javaType, config, leftDeserializerConfig, rightDeserializerConfig) case _ => this } } private def deserializeValue(`type`: JsonToken, config: ElementDeserializerConfig, jp: JsonParser, ctxt: DeserializationContext) = (config, `type`) match { case (_, JsonToken.VALUE_NULL) => null case (ElementDeserializerConfig(Some(ed), Some(td)), _) => ed.deserializeWithType(jp, ctxt, td) case (ElementDeserializerConfig(Some(ed), _), _) => ed.deserialize(jp, ctxt) case (_, _) => throw ctxt.mappingException(javaType.getRawClass) } private def deserializeEither(jp: JsonParser, ctxt: DeserializationContext): Either[AnyRef, AnyRef] = { jp.nextToken() val key = jp.getCurrentName val `type` = jp.nextToken() val result = key match { case ("l") => Left(deserializeValue(`type`, leftDeserializerConfig, jp, ctxt)) case ("r") => Right(deserializeValue(`type`, rightDeserializerConfig, jp, ctxt)) case _ => throw ctxt.mappingException(javaType.getRawClass) } // consume END_OBJECT jp.nextToken() result } override def deserialize(jp: JsonParser, ctxt: DeserializationContext): Either[AnyRef, AnyRef] = deserializeEither(jp, ctxt) override def deserializeWithType(jp: JsonParser, ctxt: DeserializationContext, typeDeserializer: TypeDeserializer): Either[AnyRef, AnyRef] = deserializeEither(jp, ctxt) } private object EitherDeserializer { case class ElementDeserializerConfig(deserializer: Option[JsonDeserializer[AnyRef]], typeDeseriazlier: Option[TypeDeserializer]) object ElementDeserializerConfig { val empty = ElementDeserializerConfig(None, None) } } private object EitherDeserializerResolver extends Deserializers.Base { private val EITHER = classOf[Either[_, _]] override def findBeanDeserializer(`type`: JavaType, config: DeserializationConfig, beanDesc: BeanDescription) = { val rawClass = `type`.getRawClass if (!EITHER.isAssignableFrom(rawClass)) null else new EitherDeserializer( `type`, config, ElementDeserializerConfig.empty, ElementDeserializerConfig.empty ) } } trait EitherDeserializerModule extends JacksonModule { this += EitherDeserializerResolver }
kakao/mango
mango-shaded/src/main/scala/com/kakao/shaded/jackson/module/scala/deser/EitherDeserializer.scala
Scala
apache-2.0
4,019
package com.github.gdefacci.briscola.game import scalaz.{-\\/, \\/, \\/-} import com.github.gdefacci.briscola.player._ object GamePlayersValidator { def withValidPlayersAndTeams[T](gamePlayers:GamePlayers, playerById:PlayerId => Option[Player])(f:(Set[Player], Option[Teams]) => T):BriscolaError \\/ T = { val players = GamePlayers.getPlayers(gamePlayers) for ( _ <- checkPlayersNumber(players); players <- checkAllPlayersExists(playerById, players); teams <- checkValidTeams(gamePlayers); _ <- checkPlayersBelongOnlyOneTeam(teams) ) yield { f(players, teams) } } private def checkPlayersBelongOnlyOneTeam(optTeams:Option[Teams]):BriscolaError \\/ Unit = { lazy val success = \\/-(()) optTeams.map { teams => val allPlayers = teams.teams.flatMap(_.players) val playerTeams = allPlayers.map( pl => pl -> teams.teams.filter( t => t.players.contains(pl)) ) playerTeams.find( p => p._2.size > 1) match { case None => success case Some((player, teams)) => -\\/(PlayerCanHaveOnlyOneTeam(player, teams.map( t =>TeamInfo(t.name)))) } } getOrElse success } private def checkValidTeams(gamePlayers:GamePlayers):BriscolaError \\/ Option[Teams] = { val teams = GamePlayers.teams(gamePlayers) teams match { case r @ Some(teams) => if (teams.teams.size > GameState.MAX_TEAMS_NUMBER) -\\/(TooManyTeams(teams, GameState.MAX_TEAMS_NUMBER)) else if (teams.teams.size < GameState.MIN_TEAMS_NUMBER) -\\/(TooFewTeams(teams, GameState.MIN_TEAMS_NUMBER)) else { val playersNumber = teams.teams.head.players.size if (!teams.teams.forall( t => t.players.size == playersNumber )) -\\/(TeamsMustHaveSameNumberOfPlayers(teams)) else if (playersNumber > GameState.TEAM_MAX_PLAYERS_NUMBER) -\\/(TooManyPlayersPerTeam(teams, GameState.TEAM_MAX_PLAYERS_NUMBER)) else if (playersNumber < GameState.TEAM_MIN_PLAYERS_NUMBER) -\\/(TooFewPlayersPerTeam(teams, GameState.TEAM_MIN_PLAYERS_NUMBER)) else \\/-(r) } case _=> \\/-(None) } } private def checkPlayersNumber(players:Set[PlayerId]):BriscolaError \\/ Unit = { val playersNumber = players.size if (playersNumber > GameState.MAX_PLAYERS) -\\/(TooManyPlayers(players, GameState.MAX_PLAYERS)) else if (playersNumber < GameState.MIN_PLAYERS) -\\/(TooFewPlayers(players, GameState.MIN_PLAYERS)) else \\/-(()) } private def checkAllPlayersExists(playerById:PlayerId => Option[Player], players:Set[PlayerId]):PlayersDoNotExist \\/ Set[Player] = { players.foldLeft[PlayersDoNotExist \\/ Set[Player]](\\/-(Set.empty)) { (acc, i) => acc match { case err @ -\\/(PlayersDoNotExist(nonExistingPlayers)) => { playerById(i) match { case Some(p) => err case None => -\\/(PlayersDoNotExist(nonExistingPlayers + i)) } } case \\/-(players) => playerById(i) match { case Some(p) => \\/-(players + p) case None => -\\/(PlayersDoNotExist(Set(i))) } } } } }
gdefacci/briscola
ddd-briscola/src/main/scala/com/github/gdefacci/briscola/game/GamePlayersValidator.scala
Scala
bsd-3-clause
3,116
package com.benkolera.Rt import org.joda.time.format.DateTimeFormat import org.joda.time.{DateTimeZone,DateTime} import scalaz._ package object Formatter { def fieldListToString( l: List[String] ) = { l.mkString(",\\n ") } private val dtf = DateTimeFormat.forPattern("yyyy-MM-dd HH:mm:ss") def fieldDateTimeToString(tz:DateTimeZone )(dt: DateTime) = { dtf.print( dt.withZone( tz ) ) } def fieldOptDateTimeToString( tz:DateTimeZone )( dt: Option[DateTime] ) = { dt.map( fieldDateTimeToString(tz) _ ).getOrElse( "Not set" ) } def fieldsToContentString( ts: List[(String,String)] ) = { def tupleToString( t:(String,String) ) = { val valueEscaped = t._2.replace( "\\n" , "\\n " ) s"${t._1}: $valueEscaped\\n" } ts.foldLeft( Cord.empty )( _ ++ tupleToString(_) ).toString } }
benkolera/scala-rt
src/main/scala/Rt/Formatter/package.scala
Scala
mit
834
/* * Copyright 2011-2022 GatlingCorp (https://gatling.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gatling.core.check.css import io.gatling.commons.validation._ import io.gatling.core.check._ import jodd.lagarto.dom.NodeSelector private object CssExtractors { def find[X: NodeConverter]( query: String, nodeAttribute: Option[String], occurrence: Int, selectors: CssSelectors ): FindCriterionExtractor[NodeSelector, (String, Option[String]), X] = new FindCriterionExtractor[NodeSelector, (String, Option[String]), X]( "css", (query, nodeAttribute), occurrence, selectors.extractAll(_, (query, nodeAttribute)).lift(occurrence).success ) def findAll[X: NodeConverter]( query: String, nodeAttribute: Option[String], selectors: CssSelectors ): FindAllCriterionExtractor[NodeSelector, (String, Option[String]), X] = new FindAllCriterionExtractor[NodeSelector, (String, Option[String]), X]( "css", (query, nodeAttribute), selectors.extractAll(_, (query, nodeAttribute)).liftSeqOption.success ) def count(query: String, nodeAttribute: Option[String], selectors: CssSelectors): CountCriterionExtractor[NodeSelector, (String, Option[String])] = new CountCriterionExtractor[NodeSelector, (String, Option[String])]( "css", (query, nodeAttribute), prepared => Some(selectors.extractAll[String](prepared, (query, nodeAttribute)).size).success ) }
gatling/gatling
gatling-core/src/main/scala/io/gatling/core/check/css/CssExtractors.scala
Scala
apache-2.0
2,006
/* * Copyright (c) 2014-2015 by its authors. Some rights reserved. * See the project homepage at: http://www.monifu.org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package monifu.reactive.internals.operators import monifu.reactive.Observable import monifu.reactive.exceptions.DummyException import scala.concurrent.duration.Duration.Zero object FilterSuite extends BaseOperatorSuite { def count(sourceCount: Int) = { sourceCount } def sum(sourceCount: Int): Long = sourceCount.toLong * (sourceCount + 1) def createObservable(sourceCount: Int) = { require(sourceCount > 0, "sourceCount should be strictly positive") Some { val o = if (sourceCount == 1) Observable.unit(2L).filter(_ % 2 == 0) else Observable.range(1, sourceCount * 2 + 1, 1).filter(_ % 2 == 0) Sample(o, count(sourceCount), sum(sourceCount), Zero, Zero) } } def observableInError(sourceCount: Int, ex: Throwable) = { require(sourceCount > 0, "sourceCount should be strictly positive") Some { val ex = DummyException("dummy") val o = if (sourceCount == 1) createObservableEndingInError(Observable.unit(2L), ex) .filter(_ % 2 == 0) else createObservableEndingInError(Observable.range(1, sourceCount * 2 + 1, 1), ex) .filter(_ % 2 == 0) Sample(o, count(sourceCount), sum(sourceCount), Zero, Zero) } } def brokenUserCodeObservable(sourceCount: Int, ex: Throwable) = { require(sourceCount > 0, "sourceCount should be strictly positive") Some { val o = if (sourceCount == 1) Observable.unit(1L).filter(_ => throw ex) else Observable.range(1, sourceCount * 2 + 1, 1).filter { x => if (x == sourceCount * 2) throw ex else x % 2 == 0 } Sample(o, count(sourceCount-1), sum(sourceCount-1), Zero, Zero) } } }
sergius/monifu
monifu/shared/src/test/scala/monifu/reactive/internals/operators/FilterSuite.scala
Scala
apache-2.0
2,435
package org.lanyard.dist.disc import org.lanyard.dist.Distribution import org.lanyard.random.RNG case class Geometric( prob: Double ) extends Distribution[Int] { override def apply( value: Int ): Double = 0.0 def mean: Double = 1 / prob def variance: Double = ( 1 - prob ) / (prob * prob) def random( source: RNG ): (Int, RNG) = ??? }
perian/Lanyard
src/main/scala/org/lanyard/dist/disc/Geometric.scala
Scala
gpl-2.0
350
package net.kolotyluk.akka.example import akka.typed.{ActorRef, Behavior} import akka.typed.scaladsl.Actor import scala.concurrent.duration._ import scala.concurrent.ExecutionContext.Implicits.global import grizzled.slf4j.Logger import net.kolotyluk.akka.example.Guardian.Spawn object Brat extends Runnable { trait Message extends Main.Message final case class Count(count: Int) extends Message val logger = Logger[this.type] logger.info("Brat: initializing") val behavior: Behavior[Message] = brat def brat: Behavior[Message] = Actor.immutable[Message] { (actorCell, command) ⇒ command match { case message@Count(count) ⇒ val random = math.random() logger.info(random) if (random > 0.9) 10 / 0 if (random < 0.1) Actor.stopped else { val cancelable = actorCell.schedule(1 second, actorCell.self, Count(0)) Actor.same } case Main.Start() ⇒ //println(s"Brat starting with $actorCell") //logger.info(s"brat received $message") val cancelable = actorCell.schedule(1 second, actorCell.self, Count(0)) Actor.same } } override def run = { Main.system ! Spawn(brat.asInstanceOf[Behavior[Main.Message]], "brat") // val random = math.random() // logger.info(random) // if (random > 0.9) 10 / 0 } }
kolotyluk/akka-laboratory
src/main/scala/net/kolotyluk/akka/example/Brat.scala
Scala
apache-2.0
1,412
/** RLTools is a library for reinforcement learning methods. Copyright (C) 2013 Petteri Mehtala ([email protected]) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package core /** * Defines the interface for the policies. */ trait Policy { // Defines the method to generate next action for given task and current values def next[S,A](task: RLGenericTask[S, A], currentValues: StateActionValues[S,A]): A }
mehtala/RLTools
src/main/scala/core/Policy.scala
Scala
gpl-3.0
1,049
/** * Copyright (C) 2015 DANS - Data Archiving and Networked Services ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package nl.knaw.dans.easy.stage import java.io.File import java.nio.charset.StandardCharsets import org.apache.commons.io.FileUtils._ import org.scalatest.matchers.{ MatchResult, Matcher } /** See also <a href="http://www.scalatest.org/user_guide/using_matchers#usingCustomMatchers">CustomMatchers</a> */ trait CustomMatchers { class ContentMatcher(content: String) extends Matcher[File] { def apply(left: File): MatchResult = { def trimLines(s: String): String = s.split("\\n").map(_.trim).mkString("\\n") MatchResult( trimLines(readFileToString(left, StandardCharsets.UTF_8)).contains(trimLines(content)), s"$left did not contain: $content", s"$left contains $content" ) } } /** usage example: new File(...) should containTrimmed("...") */ def containTrimmed(content: String) = new ContentMatcher(content) } object CustomMatchers extends CustomMatchers
jo-pol/easy-stage-dataset
command/src/test/scala/nl.knaw.dans.easy.stage.command/CustomMatchers.scala
Scala
apache-2.0
1,567
package com.sksamuel.scapegoat.io import scala.xml.Unparsed import com.sksamuel.scapegoat.{Feedback, Levels} /** * @author Stephen Samuel */ object HtmlReportWriter extends ReportWriter { override protected def fileName: String = "scapegoat.html" private val css = """ | body { | font-family: 'Ubuntu', sans-serif; | padding: 0 15px; | } | | h1 { | color: #515151; | font-weight: 700; | } | | h3 { | color: #8a8a8a; | font-weight: 400; | } | | .warning { | background :#F1F3F2; | border-bottom-left-radius: 6px; | border-bottom-right-radius: 6px; | margin-bottom: 3px; | padding: 12px; | } | | .title { | color: #616161; | font-size: 16px; | } | | .source { | float: right; | font-style: italic; | color: #868686; | } | | .snippet { | padding-top: 8px; | color: #0C0C0C; | font-weight: 300; | font-size: 12px; | } | | .inspection { | padding-left: 10px; | font-style:italic; | color: #969696; | font-size: 12px; | } | | """.stripMargin private def header = <head> <title>Scapegoat Inspection Reporter</title>{ Unparsed( "<link href=\\"https://maxcdn.bootstrapcdn.com/bootstrap/3.2.0/css/bootstrap.min.css\\" rel=\\"stylesheet\\">" ) }{ Unparsed( """<link href='https://fonts.googleapis.com/css?family=Ubuntu:300,400,500,700,300italic,400italic,500italic,700italic' rel='stylesheet' type='text/css'>""" ) }{ Unparsed { """<link href='https://fonts.googleapis.com/css?family=Open+Sans:300italic,400italic,600italic,700italic,800italic,400,300,600,700,800' rel='stylesheet' type='text/css'>""" } }<style> {css} </style> </head> private def body(reporter: Feedback) = <body> <h1>Scapegoat Inspections</h1> <h3> Errors {reporter.warnings(Levels.Error).size.toString} Warnings {reporter.warnings(Levels.Warning).size.toString} Infos {reporter.warnings(Levels.Info).size.toString} </h3>{warnings(reporter)} </body> private def warnings(reporter: Feedback) = { reporter.warningsWithMinimalLevel.map { warning => val source = warning.sourceFileNormalized + ":" + warning.line <div class="warning"> <div class="source"> {source} </div> <div class="title"> { warning.level match { case Levels.Info => <span class="label label-info">Info</span> case Levels.Warning => <span class="label label-warning">Warning</span> case Levels.Error => <span class="label label-danger">Error</span> } }&nbsp;{warning.text}&nbsp; <span class="inspection"> {warning.inspection} </span> </div> <div> {warning.explanation} </div>{ warning.snippet match { case None => case Some(snippet) => <div class="snippet"> {snippet} </div> } } </div> } } private def toHTML(reporter: Feedback) = <html> {header}{body(reporter)} </html> override protected def generate(feedback: Feedback): String = toHTML(feedback).toString() }
sksamuel/scalac-scapegoat-plugin
src/main/scala/com/sksamuel/scapegoat/io/HtmlReportWriter.scala
Scala
apache-2.0
3,731
package chess import org.specs2.mutable.Specification class StatsTest extends Specification { def realMean(elts: Seq[Float]): Float = elts.sum / elts.size def realVar(elts: Seq[Float]): Float = { val mean = realMean(elts).toDouble (elts map { x => Math.pow(x - mean, 2) } sum).toFloat / (elts.size - 1) } def beApprox(comp: Float) = (f: Float) => { if (comp.isNaN) f.isNaN must beTrue else comp must beCloseTo(f +/- 0.001f * comp) } def beLike(comp: Stats) = (s: Stats) => { s.samples must_== comp.samples s.mean must beApprox(comp.mean) (s.variance, comp.variance) match { case (Some(sv), Some(cv)) => sv must beApprox(cv) case (sv, cv) => sv must_== cv } } "empty stats" should { "have good defaults" in { Stats.empty.variance must_== None Stats.empty.mean must_== 0f Stats.empty.samples must_== 0 } "make Stats" in { "with good stats" in { Stats(5).samples must_== 1 Stats(5).variance must_== None Stats(5).mean must_== 5f } } } "large values" should { // Tight data w/ large mean. Shuffled for Stats. val base = (1 to 100) ++ (1 to 100) ++ (1 to 200) val data = base map { _ + 1e5f } val shuffledData = base.sortWith(_ % 8 > _ % 8) map { _ + 1e5f } val statsN = Stats.empty record shuffledData "match actuals" in { statsN.mean must beApprox(realMean(data)) statsN.variance.get must beApprox(realVar(data)) statsN.samples must_== 400 } } }
ornicar/scalachess
src/test/scala/StatsTest.scala
Scala
mit
1,603
/** * Copyright (C) 2014 Orbeon, Inc. * * This program is free software; you can redistribute it and/or modify it under the terms of the * GNU Lesser General Public License as published by the Free Software Foundation; either version * 2.1 of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Lesser General Public License for more details. * * The full text of the license is available at http://www.gnu.org/copyleft/lesser.html */ package org.orbeon.oxf.fr.embedding import java.io.Writer import java.{util ⇒ ju} import javax.servlet.http.{HttpServletRequest, HttpServletResponse} import org.apache.commons.io.IOUtils import org.apache.http.client.CookieStore import org.apache.http.impl.client.BasicCookieStore import org.orbeon.oxf.common.OXFException import org.orbeon.oxf.fr.embedding.servlet.ServletEmbeddingContextWithResponse import org.orbeon.oxf.http.Headers._ import org.orbeon.oxf.http._ import org.orbeon.oxf.util.NetUtils._ import org.orbeon.oxf.util.ScalaUtils._ import org.orbeon.oxf.xml.XMLUtils import org.slf4j.LoggerFactory import scala.collection.JavaConverters._ import scala.collection.immutable object APISupport { import Private._ val Logger = LoggerFactory.getLogger("org.orbeon.embedding") val AllModes = List(New, Edit, View) val AllModesByName = AllModes map (a ⇒ a.name → a) toMap def proxyPage( baseURL : String, path : String, headers : immutable.Seq[(String, String)] = Nil, params : immutable.Seq[(String, String)] = Nil)( implicit ctx: EmbeddingContextWithResponse ): Unit = { val url = formRunnerURL(baseURL, path, embeddable = true) callService(RequestDetails(None, url, headers, params)) match { case content: StreamedContent ⇒ useAndClose(content)(writeResponseBody) case Redirect(_, _) ⇒ throw new UnsupportedOperationException } } def proxyServletResources( req : HttpServletRequest, res : HttpServletResponse, namespace : String, resourcePath: String ): Unit = withSettings(req, res.getWriter) { settings ⇒ implicit val ctx = new ServletEmbeddingContextWithResponse( req, Right(res), namespace, settings.orbeonPrefix, settings.httpClient ) val url = formRunnerURL(settings.formRunnerURL, resourcePath, embeddable = false) val contentFromRequest = req.getMethod == "POST" option StreamedContent( req.getInputStream, Option(req.getContentType), Some(req.getContentLength.toLong) filter (_ >= 0L), None ) proxyResource( RequestDetails( content = contentFromRequest, url = url, headers = proxyCapitalizeAndCombineHeaders(requestHeaders(req).to[List], request = true).to[List], params = Nil ) ) } def proxyResource(requestDetails: RequestDetails)(implicit ctx: EmbeddingContextWithResponse): Unit = { Logger.debug("proxying resource {}", requestDetails.url) val res = connectURL(requestDetails) ctx.setStatusCode(res.statusCode) res.content.contentType foreach (ctx.setHeader(Headers.ContentType, _)) proxyCapitalizeAndCombineHeaders(res.headers, request = false) foreach (ctx.setHeader _).tupled useAndClose(res.content)(writeResponseBody) } def formRunnerPath(app: String, form: String, mode: String, documentId: Option[String], query: Option[String]) = appendQueryString(s"/fr/$app/$form/$mode${documentId map ("/" +) getOrElse ""}", query getOrElse "") def formRunnerHomePath(query: Option[String]) = appendQueryString("/fr/", query getOrElse "") def formRunnerURL(baseURL: String, path: String, embeddable: Boolean) = appendQueryString(dropTrailingSlash(baseURL) + path, if(embeddable) "orbeon-embeddable=true" else "") def requestHeaders(req: HttpServletRequest) = for { name ← req.getHeaderNames.asInstanceOf[ju.Enumeration[String]].asScala values = req.getHeaders(name).asInstanceOf[ju.Enumeration[String]].asScala.toList } yield name → values // Match on headers in a case-insensitive way, but the header we sent follows the capitalization of the // header specified in the init parameter. def headersToForward(clientHeaders: List[(String, List[String])], configuredHeaders: Map[String, String]) = for { (name, value) ← proxyAndCombineRequestHeaders(clientHeaders) originalName ← configuredHeaders.get(name.toLowerCase) } yield originalName → value // Call the Orbeon service at the other end def callService(requestDetails: RequestDetails)(implicit ctx: EmbeddingContext): StreamedContentOrRedirect = { Logger.debug("proxying page {}", requestDetails.url) val cx = connectURL(requestDetails) if (isRedirectCode(cx.statusCode)) Redirect(cx.headers("Location").head, exitPortal = true) else cx.content } def writeResponseBody(content: Content)(implicit ctx: EmbeddingContextWithResponse): Unit = content.contentType map getContentTypeMediaType match { case Some(mediatype) if XMLUtils.isTextOrJSONContentType(mediatype) || XMLUtils.isXMLMediatype(mediatype) ⇒ // Text/JSON/XML content type: rewrite response content val encoding = content.contentType flatMap (t ⇒ Option(getContentTypeCharset(t))) getOrElse "utf-8" val contentAsString = useAndClose(content.inputStream)(IOUtils.toString(_, encoding)) val encodeForXML = XMLUtils.isXMLMediatype(mediatype) def decodeURL(encoded: String) = { val decodedURL = ctx.decodeURL(encoded) if (encodeForXML) XMLUtils.escapeXMLMinimal(decodedURL) else decodedURL } decodeWSRPContent( contentAsString, ctx.namespace, decodeURL, ctx.writer ) case _ ⇒ // All other types: just output useAndClose(content.inputStream)(IOUtils.copy(_, ctx.outputStream)) } def scopeSettings[T](req: HttpServletRequest, settings: EmbeddingSettings)(body: ⇒ T): T = { req.setAttribute(SettingsKey, settings) try body finally req.removeAttribute(SettingsKey) } def withSettings[T](req: HttpServletRequest, writer: ⇒ Writer)(body: EmbeddingSettings ⇒ T): Unit = Option(req.getAttribute(SettingsKey).asInstanceOf[EmbeddingSettings]) match { case Some(settings) ⇒ body(settings) case None ⇒ val msg = "ERROR: Orbeon Forms embedding filter is not configured." Logger.error(msg) writer.write(msg) } def nextNamespace(req: HttpServletRequest) = { val newValue = Option(req.getAttribute(LastNamespaceIndexKey).asInstanceOf[Integer]) match { case Some(value) ⇒ value + 1 case None ⇒ 0 } req.setAttribute(LastNamespaceIndexKey, newValue) NamespacePrefix + newValue } val NamespacePrefix = "o" private object Private { val SettingsKey = "orbeon.form-runner.filter-settings" val RemoteSessionIdKey = "orbeon.form-runner.remote-session-id" val LastNamespaceIndexKey = "orbeon.form-runner.last-namespace-index" // POST when we get RequestDetails for: // // - actions requests // - resources requests: Ajax requests, form posts, and uploads // // GET otherwise for: // // - render requests // - resources: typically image, CSS, JavaScript, etc. def connectURL(requestDetails: RequestDetails)(implicit ctx: EmbeddingContext) = ctx.httpClient.connect( url = recombineQuery(requestDetails.url, requestDetails.params), credentials = None, cookieStore = getOrCreateCookieStore, method = if (requestDetails.content.isEmpty) "GET" else "POST", headers = requestDetails.headersMapWithContentType + (Headers.OrbeonClient → List("portlet")), content = requestDetails.content ) // Parse a string containing WSRP encodings and encode the URLs and namespaces def decodeWSRPContent(content: String, ns: String, decodeURL: String ⇒ String, writer: Writer): Unit = { val stringLength = content.length var currentIndex = 0 var index = 0 import org.orbeon.oxf.externalcontext.WSRPURLRewriter.{decodeURL ⇒ _, _} while ({index = content.indexOf(BaseTag, currentIndex); index} != -1) { // Write up to the current mark writer.write(content, currentIndex, index - currentIndex) // Check if escaping is requested if (index + BaseTagLength * 2 <= stringLength && content.substring(index + BaseTagLength, index + BaseTagLength * 2) == BaseTag) { // Write escaped tag, update index and keep looking writer.write(BaseTag) currentIndex = index + BaseTagLength * 2 } else if (index < stringLength - BaseTagLength && content.charAt(index + BaseTagLength) == '?') { // URL encoding // Find the matching end mark val endIndex = content.indexOf(EndTag, index) if (endIndex == -1) throw new OXFException("Missing end tag for WSRP encoded URL.") val encodedURL = content.substring(index + StartTagLength, endIndex) currentIndex = endIndex + EndTagLength writer.write(decodeURL(encodedURL)) } else if (index < stringLength - BaseTagLength && content.charAt(index + BaseTagLength) == '_') { // Namespace encoding writer.write(ns) currentIndex = index + PrefixTagLength } else throw new OXFException("Invalid WSRP rewrite tagging.") } // Write remainder of string if (currentIndex < stringLength) writer.write(content, currentIndex, content.length - currentIndex) } def getOrCreateCookieStore(implicit ctx: EmbeddingContext) = Option(ctx.getSessionAttribute(RemoteSessionIdKey).asInstanceOf[CookieStore]) getOrElse { val newCookieStore = new BasicCookieStore ctx.setSessionAttribute(RemoteSessionIdKey, newCookieStore) newCookieStore } } }
ajw625/orbeon-forms
src/main/scala/org/orbeon/oxf/fr/embedding/APISupport.scala
Scala
lgpl-2.1
11,616
package model import org.specs2.mock.Mockito import play.api.test.{PlaySpecification} class ResponseParsingSpec extends PlaySpecification with Mockito { "ComposerResponse" should { val composerContentId = "a1b2c3d4" "extract composer content ID from CREATED response messages" in { val response = ComposerResponse. getComposerIdFromResponse(s"OK:${composerContentId};http://someurlorother.com;CREATED") response must equalTo(Some(composerContentId)) } "extract composer content ID from MODIFIED response messages" in { val response = ComposerResponse. getComposerIdFromResponse(s"OK:${composerContentId};http://someurlorother.com;MODIFIED") response must equalTo(Some(composerContentId)) } "deal with empty response messages" in { val response = ComposerResponse. getComposerIdFromResponse(s"") response must equalTo(None) } "deal with junk response messages" in { val response = ComposerResponse. getComposerIdFromResponse("some junk") response must equalTo(None) } } "R2Response" should { val r2ContentId = "456269969" "extract R2 content ID from CREATED response messages" in { val response = R2Response. getR2IdFromResponse(s"OK:${r2ContentId};http://someurlorother.com;CREATED") response must equalTo(Some(r2ContentId)) } "extract R2 content ID from MODIFIED response messages" in { val response = R2Response. getR2IdFromResponse(s"OK:${r2ContentId};http://someurlorother.com;MODIFIED") response must equalTo(Some(r2ContentId)) } "deal with empty response messages" in { val response = R2Response. getR2IdFromResponse(s"") response must equalTo(None) } "deal with junk response messages" in { val response = R2Response. getR2IdFromResponse("some junk") response must equalTo(None) } } }
guardian/flex-content-migrator
test/model/ResponseParsingSpec.scala
Scala
mit
1,940
package com.krux.hyperion.activity import com.krux.hyperion.action.SnsAlarm import com.krux.hyperion.adt.{ HInt, HDuration } import com.krux.hyperion.precondition.Precondition import com.krux.hyperion.resource.{ ResourceObject, Resource } case class ActivityFields[A <: ResourceObject]( runsOn: Resource[A], dependsOn: Seq[PipelineActivity[_]] = Seq.empty, preconditions: Seq[Precondition] = Seq.empty, onFailAlarms: Seq[SnsAlarm] = Seq.empty, onSuccessAlarms: Seq[SnsAlarm] = Seq.empty, onLateActionAlarms: Seq[SnsAlarm] = Seq.empty, maximumRetries: Option[HInt] = None, attemptTimeout: Option[HDuration] = None, lateAfterTimeout: Option[HDuration] = None, retryDelay: Option[HDuration] = None, failureAndRerunMode: Option[FailureAndRerunMode] = None, maxActiveInstances: Option[HInt] = None )
sethyates/hyperion
core/src/main/scala/com/krux/hyperion/activity/ActivityFields.scala
Scala
apache-2.0
821
package com.sksamuel.elastic4s.bulk import com.sksamuel.elastic4s.indexes.IndexApi import scala.language.implicitConversions trait BulkApi { this: IndexApi => def bulk(requests: Iterable[BulkCompatibleRequest]): BulkRequest = BulkRequest(requests.toSeq) def bulk(requests: BulkCompatibleRequest*): BulkRequest = bulk(requests) }
Tecsisa/elastic4s
elastic4s-core/src/main/scala/com/sksamuel/elastic4s/bulk/BulkApi.scala
Scala
apache-2.0
348
/*********************************************************************** * Copyright (c) 2013-2019 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.utils.stats import org.junit.runner.RunWith import org.specs2.mutable.Specification import org.specs2.runner.JUnitRunner @RunWith(classOf[JUnitRunner]) class CountStatTest extends Specification with StatTestHelper { def newStat(observe: Boolean = true): CountStat = { val stat = Stat(sft, s"Count()") if (observe) { features.foreach { stat.observe } } stat.asInstanceOf[CountStat] } "CountStat" should { "be empty initiallly" >> { val stat = newStat(observe = false) stat.counter mustEqual 0L stat.isEmpty must beTrue } "observe correct values" >> { val stat = newStat() stat.counter mustEqual 100L } "unobserve correct values" >> { val stat = newStat() stat.counter mustEqual 100L features.take(10).foreach(stat.unobserve) stat.counter mustEqual 90L } "serialize to json" >> { val stat = newStat() stat.toJson must beEqualTo("""{ "count": 100 }""").ignoreSpace } "serialize empty to json" >> { val stat = newStat(observe = false) stat.toJson must beEqualTo("""{ "count": 0 }""").ignoreSpace } "serialize and deserialize" >> { val stat = newStat() val packed = StatSerializer(sft).serialize(stat) val unpacked = StatSerializer(sft).deserialize(packed) unpacked.toJson mustEqual stat.toJson } "serialize and deserialize empty stat" >> { val stat = newStat(observe = false) val packed = StatSerializer(sft).serialize(stat) val unpacked = StatSerializer(sft).deserialize(packed) unpacked.toJson mustEqual stat.toJson } "deserialize as immutable value" >> { val stat = newStat() val packed = StatSerializer(sft).serialize(stat) val unpacked = StatSerializer(sft).deserialize(packed, immutable = true) unpacked.toJson mustEqual stat.toJson unpacked.clear must throwAn[Exception] unpacked.+=(stat) must throwAn[Exception] unpacked.observe(features.head) must throwAn[Exception] unpacked.unobserve(features.head) must throwAn[Exception] } "combine two states" >> { val stat = newStat() val stat2 = newStat(observe = false) features2.foreach { stat2.observe } stat2.counter mustEqual 100L stat += stat2 stat.counter mustEqual 200L stat2.counter mustEqual 100L } "clear" >> { val stat = newStat() stat.isEmpty must beFalse stat.clear() stat.counter mustEqual 0L stat.isEmpty must beTrue } } }
elahrvivaz/geomesa
geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/stats/CountStatTest.scala
Scala
apache-2.0
3,059
/* * Copyright 2001-2009 Artima, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.scalatest import events._ import Suite.formatterForSuiteAborted import Suite.formatterForSuiteCompleted import Suite.formatterForSuiteStarting /* java -Dorg.scalatest.BigSuite.size=5 -Dorg.scalatest.SuiteCompletedStatusReporter.max=100 -classpath scalatest-1.0-CLICKDEMO.jar:/usr/artima/scala/lib/scala-library.jar org.scalatest.tools.Runner -c4 -p "scalatest-1.0-CLICKDEMO-tests.jar" -oNCXEHLO -r org.scalatest.SuiteCompletedStatusReporter -s org.scalatest.BigSuite -s org.scalatest.BigSuite -s org.scalatest.BigSuite -s org.scalatest.BigSuite -s org.scalatest.BigSuite BigSuite.size determines how many suites will be in each BigSuite tree. I haven't taken time to figure out the function, but it looks like this: size => number of suites in the tree 1 => 2 2 => 5 3 => 16 4 => 65 5 => 326 6 => 1957 7 => 13700 Each -s org.scalatest.BigSuite will create one BigSuite instance using the size specified by the property. By saying -r org.scalatest.SuiteCompletedStatusReporter, you get a custom reporter that prints out a duration note to the standard output for every <configurable number> of SuiteCompleted events it receives. It defaults to 10, and can be set via the -Dorg.scalatest.SuiteCompletedStatusReporter.max=100 setting. So the knobs we can turn are: -cN N is the number of threads in the thread pool -Dorg.scalatest.BigSuite.size=M, M determines the number of suites in the tree via some mysterious function -s org.scalatest.BigSuite..., repeating this gets you more instances of these trees sized by M -Dorg.scalatest.SuiteCompletedStatusReporter.max=X, where X is the number of SuiteCompleted events between duration notes */ class BigSuite(nestedSuiteCount: Option[Int]) extends Suite { thisSuite => //def this() = this(None) override def nestedSuites: collection.immutable.IndexedSeq[Suite] = { def makeList(remaining: Int, soFar: List[Suite], nestedCount: Int): List[Suite] = { if (remaining == 0) soFar else makeList(remaining - 1, (new BigSuite(Some(nestedCount - 1)) :: soFar), nestedCount) } val nsList = nestedSuiteCount match { case None => val sizeString = System.getProperty("org.scalatest.BigSuite.size", "0") val size = try { sizeString.toInt } catch { case e: NumberFormatException => 0 } makeList(size, Nil, size) case Some(n) => if (n == 0) List() else { makeList(n, Nil, n) } } Vector.empty ++ nsList } def testNumber1() { val someFailures = System.getProperty("org.scalatest.BigSuite.someFailures", "") nestedSuiteCount match { case Some(0) if someFailures == "true" => assert(1 + 1 === 3) case _ => assert(1 + 1 === 2) } } def testNumber2() { assert(1 + 1 === 2) } def testNumber3() { assert(1 + 1 === 2) } def testNumber4() { assert(1 + 1 === 2) } def testNumber5() { assert(1 + 1 === 2) } def testNumber6() { assert(1 + 1 === 2) } def testNumber7() { assert(1 + 1 === 2) } def testNumber8() { assert(1 + 1 === 2) } def testNumber9() { assert(1 + 1 === 2) } def testNumber10() { assert(1 + 1 === 2) } def testNumber11() { assert(1 + 1 === 2) } def testNumber12() { assert(1 + 1 === 2) } def testNumber13() { assert(1 + 1 === 2) } def testNumber14() { assert(1 + 1 === 2) } def testNumber15() { assert(1 + 1 === 2) } def testNumber16() { assert(1 + 1 === 2) } def testNumber17() { assert(1 + 1 === 2) } def testNumber18() { assert(1 + 1 === 2) } def testNumber19() { assert(1 + 1 === 2) } def testNumber20() { assert(1 + 1 === 2) } def testNumber21() { assert(1 + 1 === 2) } def testNumber22() { assert(1 + 1 === 2) } def testNumber23() { assert(1 + 1 === 2) } def testNumber24() { assert(1 + 1 === 2) } def testNumber25() { assert(1 + 1 === 2) } def testNumber26() { assert(1 + 1 === 2) } def testNumber27() { assert(1 + 1 === 2) } def testNumber28() { assert(1 + 1 === 2) } def testNumber29() { assert(1 + 1 === 2) } def testNumber30() { assert(1 + 1 === 2) } def testNumber31() { assert(1 + 1 === 2) } def testNumber32() { assert(1 + 1 === 2) } def testNumber33() { assert(1 + 1 === 2) } def testNumber34() { assert(1 + 1 === 2) } def testNumber35() { assert(1 + 1 === 2) } def testNumber36() { assert(1 + 1 === 2) } def testNumber37() { assert(1 + 1 === 2) } def testNumber38() { assert(1 + 1 === 2) } def testNumber39() { assert(1 + 1 === 2) } def testNumber40() { assert(1 + 1 === 2) } def testNumber41() { assert(1 + 1 === 2) } def testNumber42() { assert(1 + 1 === 2) } def testNumber43() { assert(1 + 1 === 2) } def testNumber44() { assert(1 + 1 === 2) } def testNumber45() { assert(1 + 1 === 2) } def testNumber46() { assert(1 + 1 === 2) } def testNumber47() { assert(1 + 1 === 2) } def testNumber48() { assert(1 + 1 === 2) } def testNumber49() { assert(1 + 1 === 2) } def testNumber50() { assert(1 + 1 === 2) } def testNumber51() { assert(1 + 1 === 2) } def testNumber52() { assert(1 + 1 === 2) } def testNumber53() { assert(1 + 1 === 2) } def testNumber54() { assert(1 + 1 === 2) } def testNumber55() { assert(1 + 1 === 2) } def testNumber56() { assert(1 + 1 === 2) } def testNumber57() { assert(1 + 1 === 2) } def testNumber58() { assert(1 + 1 === 2) } def testNumber59() { assert(1 + 1 === 2) } def testNumber60() { assert(1 + 1 === 2) } def testNumber61() { assert(1 + 1 === 2) } def testNumber62() { assert(1 + 1 === 2) } def testNumber63() { assert(1 + 1 === 2) } def testNumber64() { assert(1 + 1 === 2) } def testNumber65() { assert(1 + 1 === 2) } def testNumber66() { assert(1 + 1 === 2) } def testNumber67() { assert(1 + 1 === 2) } def testNumber68() { assert(1 + 1 === 2) } def testNumber69() { assert(1 + 1 === 2) } def testNumber70() { assert(1 + 1 === 2) } def testNumber71() { assert(1 + 1 === 2) } def testNumber72() { assert(1 + 1 === 2) } def testNumber73() { assert(1 + 1 === 2) } def testNumber74() { assert(1 + 1 === 2) } def testNumber75() { assert(1 + 1 === 2) } def testNumber76() { assert(1 + 1 === 2) } def testNumber77() { assert(1 + 1 === 2) } def testNumber78() { assert(1 + 1 === 2) } def testNumber79() { assert(1 + 1 === 2) } def testNumber80() { assert(1 + 1 === 2) } def testNumber81() { assert(1 + 1 === 2) } def testNumber82() { assert(1 + 1 === 2) } def testNumber83() { assert(1 + 1 === 2) } def testNumber84() { assert(1 + 1 === 2) } def testNumber85() { assert(1 + 1 === 2) } def testNumber86() { assert(1 + 1 === 2) } def testNumber87() { assert(1 + 1 === 2) } def testNumber88() { assert(1 + 1 === 2) } def testNumber89() { assert(1 + 1 === 2) } def testNumber90() { assert(1 + 1 === 2) } def testNumber91() { assert(1 + 1 === 2) } def testNumber92() { assert(1 + 1 === 2) } def testNumber93() { assert(1 + 1 === 2) } def testNumber94() { assert(1 + 1 === 2) } def testNumber95() { assert(1 + 1 === 2) } def testNumber96() { assert(1 + 1 === 2) } def testNumber97() { assert(1 + 1 === 2) } def testNumber98() { assert(1 + 1 === 2) } def testNumber99() { assert(1 + 1 === 2) } def testNumber100() { assert(1 + 1 === 2) } }
hubertp/scalatest
src/test/scala/org/scalatest/BigSuite.scala
Scala
apache-2.0
8,527
package io.finch import java.util.UUID class ParamSpec extends FinchSpec { behavior of "param*" def withParam(k: String)(v: String): Input = Input.get("/", k -> v) checkAll("Param[String]", EndpointLaws[String](paramOption("x"))(withParam("x")).evaluating) checkAll("Param[Int]", EndpointLaws[Int](paramOption("x"))(withParam("x")).evaluating) checkAll("Param[Long]", EndpointLaws[Long](paramOption("x"))(withParam("x")).evaluating) checkAll("Param[Boolean]", EndpointLaws[Boolean](paramOption("x"))(withParam("x")).evaluating) checkAll("Param[Float]", EndpointLaws[Float](paramOption("x"))(withParam("x")).evaluating) checkAll("Param[Double]", EndpointLaws[Double](paramOption("x"))(withParam("x")).evaluating) checkAll("Param[UUID]", EndpointLaws[UUID](paramOption("x"))(withParam("x")).evaluating) }
ilya-murzinov/finch
core/src/test/scala/io/finch/ParamSpec.scala
Scala
apache-2.0
826
/* * Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com> */ package scalaguide.ws.scalaws import akka.Done import akka.actor.ActorSystem import akka.stream.ActorMaterializer import play.api.{Environment, Mode} import play.api.inject.guice.GuiceApplicationBuilder import play.api.libs.ws.ahc._ import play.api.test._ import java.io._ import org.junit.runner.RunWith import org.specs2.runner.JUnitRunner import org.specs2.specification.AfterAll //#dependency import javax.inject.Inject import scala.concurrent.Future import scala.concurrent.duration._ import play.api.mvc._ import play.api.libs.ws._ import play.api.http.HttpEntity import akka.actor.ActorSystem import akka.stream.ActorMaterializer import akka.stream.scaladsl._ import akka.util.ByteString import scala.concurrent.ExecutionContext class Application @Inject() (ws: WSClient) extends Controller { } //#dependency // #scalaws-person case class Person(name: String, age: Int) // #scalaws-person /** * NOTE: the format here is because we cannot define a fake application in a new WithServer at once, as we run into a * JVM implementation issue. */ @RunWith(classOf[JUnitRunner]) class ScalaWSSpec extends PlaySpecification with Results with AfterAll { // #scalaws-context-injected // Configure with a custom execution context from akka.dispatchers.lookup() class MyExecutionContext(ec: ExecutionContext) class PersonService @Inject()(ec: MyExecutionContext) { // ... } // #scalaws-context-injected val url = s"http://localhost:$testServerPort/" val system = ActorSystem() implicit val materializer = ActorMaterializer()(system) def afterAll(): Unit = system.terminate() def withSimpleServer[T](block: WSClient => T): T = withServer { case _ => Action(Ok) }(block) def withServer[T](routes: (String, String) => Handler)(block: WSClient => T): T = { val app = GuiceApplicationBuilder().configure("play.http.filters" -> "play.api.http.NoHttpFilters").appRoutes(a => { case (method, path) => routes(method, path) }).build() running(TestServer(testServerPort, app))(block(app.injector.instanceOf[WSClient])) } def writeFile(file: File, content: String) = { file.getParentFile.mkdirs() val out = new FileWriter(file) try { out.write(content) } finally { out.close() } } /** * A source that produces a "large" result. * * In this case, 9 chunks, each containing abcdefghij repeated 100 times. */ val largeSource: Source[ByteString, _] = { val source = Source.single(ByteString("abcdefghij" * 100)) (1 to 9).foldLeft(source){(acc, _) => (acc ++ source)} } "WS" should { import scala.concurrent.ExecutionContext.Implicits.global "allow making a request" in withSimpleServer { ws => //#simple-holder val request: WSRequest = ws.url(url) //#simple-holder //#complex-holder val complexRequest: WSRequest = request.withHeaders("Accept" -> "application/json") .withRequestTimeout(10000.millis) .withQueryString("search" -> "play") //#complex-holder //#holder-get val futureResponse: Future[WSResponse] = complexRequest.get() //#holder-get await(futureResponse).status must_== 200 } "allow making an authenticated request" in withSimpleServer { ws => val user = "user" val password = "password" val response = //#auth-request ws.url(url).withAuth(user, password, WSAuthScheme.BASIC).get() //#auth-request await(response).status must_== 200 } "allow following redirects" in withSimpleServer { ws => val response = //#redirects ws.url(url).withFollowRedirects(true).get() //#redirects await(response).status must_== 200 } "allow setting a query string" in withSimpleServer { ws => val response = //#query-string ws.url(url).withQueryString("paramKey" -> "paramValue").get() //#query-string await(response).status must_== 200 } "allow setting headers" in withSimpleServer { ws => val response = //#headers ws.url(url).withHeaders("headerKey" -> "headerValue").get() //#headers await(response).status must_== 200 } "allow setting the content type" in withSimpleServer { ws => val xmlString = "<foo></foo>" val response = //#content-type ws.url(url).withHeaders("Content-Type" -> "application/xml").post(xmlString) //#content-type await(response).status must_== 200 } "allow setting the virtual host" in withSimpleServer { ws => val response = //#virtual-host ws.url(url).withVirtualHost("192.168.1.1").get() //#virtual-host await(response).status must_== 200 } "allow setting the request timeout" in withSimpleServer { ws => val response = //#request-timeout ws.url(url).withRequestTimeout(5000.millis).get() //#request-timeout await(response).status must_== 200 } "when posting data" should { "post with form url encoded body" in withServer { case ("POST", "/") => Action(BodyParsers.parse.formUrlEncoded)(r => Ok(r.body("key").head)) case other => Action { NotFound } } { ws => val response = //#url-encoded ws.url(url).post(Map("key" -> Seq("value"))) //#url-encoded await(response).body must_== "value" } "post with multipart/form encoded body" in withServer { case("POST", "/") => Action(BodyParsers.parse.multipartFormData)(r => Ok(r.body.asFormUrlEncoded("key").head)) case other => Action { NotFound } } { ws => import play.api.mvc.MultipartFormData._ val response = //#multipart-encoded ws.url(url).post(Source.single(DataPart("key", "value"))) //#multipart-encoded await(response).body must_== "value" } "post with multipart/form encoded body from a file" in withServer { case("POST", "/") => Action(BodyParsers.parse.multipartFormData){r => val file = r.body.file("hello").head Ok(scala.io.Source.fromFile(file.ref).mkString) } case other => Action { NotFound } } { ws => val tmpFile = new File("/tmp/picture/tmpformuploaded") writeFile(tmpFile, "world") import play.api.mvc.MultipartFormData._ val response = //#multipart-encoded2 ws.url(url).post(Source(FilePart("hello", "hello.txt", Option("text/plain"), FileIO.fromPath(tmpFile.toPath)) :: DataPart("key", "value") :: List())) //#multipart-encoded2 await(response).body must_== "world" } "post with JSON body" in withServer { case ("POST", "/") => Action(BodyParsers.parse.json)(r => Ok(r.body)) case other => Action { NotFound } } { ws => // #scalaws-post-json import play.api.libs.json._ val data = Json.obj( "key1" -> "value1", "key2" -> "value2" ) val futureResponse: Future[WSResponse] = ws.url(url).post(data) // #scalaws-post-json await(futureResponse).json must_== data } "post with XML data" in withServer { case ("POST", "/") => Action(BodyParsers.parse.xml)(r => Ok(r.body)) case other => Action { NotFound } } { ws => // #scalaws-post-xml val data = <person> <name>Steve</name> <age>23</age> </person> val futureResponse: Future[WSResponse] = ws.url(url).post(data) // #scalaws-post-xml await(futureResponse).xml must_== data } } "when processing a response" should { "handle as JSON" in withServer { case ("GET", "/") => Action { import play.api.libs.json._ implicit val personWrites = Json.writes[Person] Ok(Json.obj("person" -> Person("Steve", 23))) } case other => Action { NotFound } } { ws => // #scalaws-process-json val futureResult: Future[String] = ws.url(url).get().map { response => (response.json \ "person" \ "name").as[String] } // #scalaws-process-json await(futureResult) must_== "Steve" } "handle as JSON with an implicit" in withServer { case ("GET", "/") => Action { import play.api.libs.json._ implicit val personWrites = Json.writes[Person] Ok(Json.obj("person" -> Person("Steve", 23))) } case other => Action { NotFound } } { ws => // #scalaws-process-json-with-implicit import play.api.libs.json._ implicit val personReads = Json.reads[Person] val futureResult: Future[JsResult[Person]] = ws.url(url).get().map { response => (response.json \ "person").validate[Person] } // #scalaws-process-json-with-implicit val actual = await(futureResult) actual.asOpt must beSome[Person].which { person => person.age must beEqualTo(23) person.name must beEqualTo("Steve") } } "handle as XML" in withServer { case ("GET", "/") => Action { Ok( """<?xml version="1.0" encoding="utf-8"?> |<wrapper><message status="OK">Hello</message></wrapper> """.stripMargin).as("text/xml") } case other => Action { NotFound } } { ws => // #scalaws-process-xml val futureResult: Future[scala.xml.NodeSeq] = ws.url(url).get().map { response => response.xml \ "message" } // #scalaws-process-xml await(futureResult).text must_== "Hello" } "handle as stream" in withServer { case ("GET", "/") => Action(Ok.chunked(largeSource)) case other => Action { NotFound } } { ws => //#stream-count-bytes // Make the request val futureResponse: Future[StreamedResponse] = ws.url(url).withMethod("GET").stream() val bytesReturned: Future[Long] = futureResponse.flatMap { res => // Count the number of bytes returned res.body.runWith(Sink.fold[Long, ByteString](0L){ (total, bytes) => total + bytes.length }) } //#stream-count-bytes await(bytesReturned) must_== 10000l } "stream to a file" in withServer { case ("GET", "/") => Action(Ok.chunked(largeSource)) case other => Action { NotFound } } { ws => val file = File.createTempFile("stream-to-file-", ".txt") try { //#stream-to-file // Make the request val futureResponse: Future[StreamedResponse] = ws.url(url).withMethod("GET").stream() val downloadedFile: Future[File] = futureResponse.flatMap { res => val outputStream = java.nio.file.Files.newOutputStream(file.toPath) // The sink that writes to the output stream val sink = Sink.foreach[ByteString] { bytes => outputStream.write(bytes.toArray) } // materialize and run the stream res.body.runWith(sink).andThen { case result => // Close the output stream whether there was an error or not outputStream.close() // Get the result or rethrow the error result.get }.map(_ => file) } //#stream-to-file await(downloadedFile) must_== file } finally { file.delete() } } "stream to a result" in withServer { case ("GET", "/") => Action(Ok.chunked(largeSource)) case other => Action { NotFound } } { ws => //#stream-to-result def downloadFile = Action.async { // Make the request ws.url(url).withMethod("GET").stream().map { case StreamedResponse(response, body) => // Check that the response was successful if (response.status == 200) { // Get the content type val contentType = response.headers.get("Content-Type").flatMap(_.headOption) .getOrElse("application/octet-stream") // If there's a content length, send that, otherwise return the body chunked response.headers.get("Content-Length") match { case Some(Seq(length)) => Ok.sendEntity(HttpEntity.Streamed(body, Some(length.toLong), Some(contentType))) case _ => Ok.chunked(body).as(contentType) } } else { BadGateway } } } //#stream-to-result val file = File.createTempFile("stream-to-file-", ".txt") await( downloadFile(FakeRequest()) .flatMap(_.body.dataStream.runFold(0l)((t, b) => t + b.length)) ) must_== 10000l file.delete() } "stream when request is a PUT" in withServer { case ("PUT", "/") => Action(Ok.chunked(largeSource)) case other => Action { NotFound } } { ws => //#stream-put val futureResponse: Future[StreamedResponse] = ws.url(url).withMethod("PUT").withBody("some body").stream() //#stream-put val bytesReturned: Future[Long] = futureResponse.flatMap { res => res.body.runWith(Sink.fold[Long, ByteString](0L){ (total, bytes) => total + bytes.length }) } //#stream-count-bytes await(bytesReturned) must_== 10000l } "stream request body" in withServer { case ("PUT", "/") => Action(Ok("")) case other => Action { NotFound } } { ws => def largeImageFromDB: Source[ByteString, _] = largeSource //#scalaws-stream-request val wsResponse: Future[WSResponse] = ws.url(url) .withBody(StreamedBody(largeImageFromDB)).execute("PUT") //#scalaws-stream-request await(wsResponse).status must_== 200 } } "work with for comprehensions" in withServer { case ("GET", "/one") => Action { Ok(s"http://localhost:$testServerPort/two") } case ("GET", "/two") => Action { Ok(s"http://localhost:$testServerPort/three") } case ("GET", "/three") => Action { Ok("finished!") } case other => Action { NotFound } } { ws => val urlOne = s"http://localhost:$testServerPort/one" val exceptionUrl = s"http://localhost:$testServerPort/fallback" // #scalaws-forcomprehension val futureResponse: Future[WSResponse] = for { responseOne <- ws.url(urlOne).get() responseTwo <- ws.url(responseOne.body).get() responseThree <- ws.url(responseTwo.body).get() } yield responseThree futureResponse.recover { case e: Exception => val exceptionData = Map("error" -> Seq(e.getMessage)) ws.url(exceptionUrl).post(exceptionData) } // #scalaws-forcomprehension await(futureResponse).body must_== "finished!" } "map to async result" in withSimpleServer { ws => //#async-result def wsAction = Action.async { ws.url(url).get().map { response => Ok(response.body) } } status(wsAction(FakeRequest())) must_== OK //#async-result } "allow simple programmatic configuration" in new WithApplication() { //#simple-ws-custom-client import play.api.libs.ws.ahc._ // usually injected through @Inject()(implicit mat: Materializer) implicit val mat: akka.stream.Materializer = app.materializer val wsClient = AhcWSClient() //#simple-ws-custom-client wsClient.close() ok } "allow programmatic configuration" in new WithApplication() { //#ws-custom-client import com.typesafe.config.ConfigFactory import play.api._ import play.api.libs.ws._ import play.api.libs.ws.ahc._ val configuration = Configuration.reference ++ Configuration(ConfigFactory.parseString( """ |ws.followRedirects = true """.stripMargin)) // If running in Play, environment should be injected val environment = Environment(new File("."), this.getClass.getClassLoader, Mode.Prod) val wsConfig = AhcWSClientConfigFactory.forConfig(configuration.underlying, environment.classLoader) val wsClient: WSClient = AhcWSClient(wsConfig) //#ws-custom-client //#close-client wsClient.close() //#close-client ok } "grant access to the underlying client" in withSimpleServer { ws => //#underlying import play.shaded.ahc.org.asynchttpclient.AsyncHttpClient val client: AsyncHttpClient = ws.underlying //#underlying ok } "use logging" in withSimpleServer { ws => // #curl-logger-filter ws.url(s"http://localhost:$testServerPort") .withRequestFilter(AhcCurlRequestLogger()) .put(Map("key" -> Seq("value"))) // #curl-logger-filter ok } } }
wsargent/playframework
documentation/manual/working/scalaGuide/main/ws/code/ScalaWSSpec.scala
Scala
apache-2.0
17,577
/* * Copyright (c) 2009, Ken Faulkner * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Ken Faulkner nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package azurestorage.DAO import azurestorage.Datatypes._ import org.apache.commons.httpclient._ import org.apache.commons.httpclient.methods._ import org.apache.commons.httpclient.params.HttpMethodParams import org.apache.commons.httpclient.util._ import org.apache.commons.httpclient.util.DateUtil import scala.collection.mutable.ListBuffer import org.apache.commons.httpclient.Header import java.io.BufferedReader import java.io.InputStreamReader import org.apache.commons.codec.binary.Base64._ import org.apache.commons.codec.binary.Base64 import scala.collection.mutable.HashMap import scala.collection.mutable.Map import org.apache.commons.httpclient.methods.ByteArrayRequestEntity import net.lag.configgy.Configgy import net.lag.logging.Logger import scala.xml.XML import java.text.SimpleDateFormat import java.util.Date import org.joda.time.DateTime import org.joda.time.DateTimeZone class AzureStorageContainerDAO { val baseBlobURL = ".blob.core.windows.net" val log = Logger.get private def parseContainerList( xml: String ): List[Container] = { log.info("AzureStorageContainerDAO::parseContainerList start") var l = List[Container]() var xmlNode = XML.loadString( xml ) var containerList = xmlNode \\ "Container" for ( container <- containerList ) { // dont bother with last mod time or etag *yet* var c = new Container() c.name = (container \ "Name").text c.url = ( container \ "Url").text //l += c 2.7 l ::= c } return l } def listContainers( accountName:String, key:String ): ( Status, List[Container] ) = { log.info("AzureStorageContainerDAO::listContainers start") var url = "http://"+accountName+baseBlobURL+"/?comp=list" var result = List[Container]() var client = new HttpClient() var method = new GetMethod( url ) var canonicalResource = "/"+accountName+"/?comp=list" AzureStorageCommon.populateMethod( method, key, accountName, canonicalResource, null ) // setup proxy. AzureStorageCommon.setupProxy( client ) var status = new Status() var res = client.executeMethod( method ) if ( res == StatusCodes.LIST_CONTAINERS_SUCCESS ) { status.successful = true } var responseBody = method.getResponseBody() // warning magic magic magic!!! but seems to be a bug where I get crap at the beginning of the response! var subArray = responseBody.drop(3) var subsubArray = subArray.toArray var subArrayAsString = new String( subsubArray ) result = parseContainerList(subArrayAsString) return (status, result ) } def createContainer( accountName:String, key:String, container:String ): Status = { log.info("AzureStorageContainerDAO::createContainer start") var url = "http://"+accountName+baseBlobURL+"/"+container+"?restype=container" var status = new Status() var client = new HttpClient() var method = new PutMethod( url ) var canonicalResource = "/"+accountName+"/" + container + "\nrestype:container" //var canonicalResource = "/"+accountName+"/" + container + "\n" AzureStorageCommon.populateMethod( method, key, accountName, canonicalResource, null ) // setup proxy. AzureStorageCommon.setupProxy( client ) var res = client.executeMethod( method ) log.debug("res is " + res.toString() ) status.code = res var responseBody = method.getResponseBodyAsString() log.debug("response body " + responseBody) if (res == StatusCodes.CREATE_CONTAINER_SUCCESS ) { status.successful = true } return status } def deleteContainer( accountName:String, key:String, container:String ): Status = { log.info("AzureStorageContainerDAO::deleteContainer start") var url = "http://"+accountName+baseBlobURL+"/"+container var status = new Status() var client = new HttpClient() var method = new DeleteMethod( url ) var canonicalResource = "/"+accountName+"/" + container AzureStorageCommon.populateMethod( method, key, accountName, canonicalResource, null ) // setup proxy. AzureStorageCommon.setupProxy( client ) var res = client.executeMethod( method ) status.code = res if (res == StatusCodes.DELETE_CONTAINER_SUCCESS ) { status.successful = true } return status } def setContainerMetadata( accountName:String, key:String, container:String, keyValuePairs: Map[ String, String] ): Status = { log.info("AzureStorageContainerDAO::setContainerMetadata start") var canonicalResource = "/"+accountName+"/"+container+"?comp=metadata" var url = "http://"+accountName+baseBlobURL+"/"+container+"?restype=container&comp=metadata" var status = new Status() var client = new HttpClient() var method = new PutMethod( url ) keyValuePairs += "x-ms-version" -> "2009-09-19" AzureStorageCommon.addMetadataToMethod( method, keyValuePairs ) AzureStorageCommon.populateMethod( method, key, accountName, canonicalResource, null ) // setup proxy. AzureStorageCommon.setupProxy( client ) var res = client.executeMethod( method ) var responseBody = method.getResponseBodyAsString() status.code = res if (res == StatusCodes.SET_CONTAINER_METADATA_SUCCESS) { status.successful = true } return status } def getContainerMetadata( accountName:String, key:String, container:String ): ( Status, Map[String, String]) = { log.info("AzureStorageContainerDAO::getContainerMetadata start") var canonicalResource = "/"+accountName+"/"+container var url = "http://"+accountName+baseBlobURL+canonicalResource var status = new Status() var client = new HttpClient() var method = new GetMethod( url ) AzureStorageCommon.populateMethod( method, key, accountName, canonicalResource, null ) // setup proxy. AzureStorageCommon.setupProxy( client ) var res = client.executeMethod( method ) var h = method.getResponseHeaders() var metadata = AzureStorageCommon.extractMetadata( h, "" ) status.code = res if (res == StatusCodes.GET_CONTAINER_METADATA_SUCCESS) { status.successful = true } return ( status, metadata) } def generateDateString( d:Date ): String = { val format = new SimpleDateFormat("yyyy-MM-dd'T'hh:mm:ss") var newDate = format.format( d ) + "Z" return newDate } def generateACLPermission( acl:ContainerACL ): String = { var perm = "" // figure out the scala shorthand for this later. if ( acl.canRead ) { perm += "r" } if ( acl.canWrite ) { perm += "w" } if ( acl.canDelete ) { perm += "d" } return perm } def generateACLXML( ACLList:List[ ContainerACL ] ): String = { var xml = <SignedIdentifiers> { for (acl <- ACLList ) yield <SignedIdentifier> <Id>{acl.uid}</Id> <AccessPolicy> <Start>{ generateDateString( acl.startTime )}</Start> <Expiry>{generateDateString( acl.endTime )}</Expiry> <Permission>{generateACLPermission( acl ) }</Permission> </AccessPolicy> </SignedIdentifier> } </SignedIdentifiers> return xml.toString() } // see if I can make a generic set. def genericSet( method:HttpMethodBase, accountName:String, key:String, container: String, canonicalResourceExtra: String, metaData:HashMap[String, String], data: Array[Byte] ): Status = { log.info("AzureStorageBlobDAO::genericSet start") var status = new Status() // for some reason, the canonicalResourceExtra needs a = for the URL but a : for the canonialResource. // go figure.... var canonicalResource = "/"+accountName+"/"+container var url = "http://"+accountName+baseBlobURL+"/"+container if ( canonicalResourceExtra != "" ) { canonicalResource += "\n"+canonicalResourceExtra.replace("=",":").replace("&","\n") url += "?"+canonicalResourceExtra } var client = new HttpClient() method.setURI( new URI( url ) ) AzureStorageCommon.addMetadataToMethod( method, metaData ) AzureStorageCommon.populateMethod( method, key, accountName, canonicalResource, data ) // setup proxy. AzureStorageCommon.setupProxy( client ) var res = client.executeMethod( method ) var responseBody = method.getResponseBodyAsString() log.debug("response body " + responseBody) status.code = res return status } def setContainerACL( accountName:String, key:String, containerName:String, ACLList:List[ ContainerACL ], publicAccess:Boolean ): Status = { log.info("AzureStorageContainerDAO::setContainerACL start") var status = new Status() var metaData = new HashMap[String, String]() var isPublic = "true" if ( !publicAccess ) { isPublic = "false" } var xml = generateACLXML( ACLList ) log.debug("XML is " + xml ) var method = new PutMethod( ) metaData("x-ms-prop-publicaccess") = isPublic var entity = new ByteArrayRequestEntity( xml.getBytes() ) method.setRequestEntity( entity ) status = genericSet( method, accountName, key, containerName, "comp=acl&restype=container",metaData, xml.getBytes() ) if ( status.code == StatusCodes.SET_CONTAINER_ACL_SUCCESS ) { status.successful = true } var responseBody = method.getResponseBodyAsString() log.debug("set container acl response " + responseBody ) return status } def parseACLXML( xmlStr:String ): List[ ContainerACL] = { log.info("parseACLXML start") log.debug("xml string is "+ xmlStr ) var xml = XML.loadString( xmlStr ) log.debug("have xml") var aclList = (xml \\ "SignedIdentifier") var l = List[ContainerACL]() for (acl <- aclList ) { log.debug("in acl loop") var realACL = new ContainerACL() realACL.uid = ( acl \\ "Id").text var startTime = ( acl \\ "Start").text var endTime = ( acl \\ "Expiry").text var realStartTime = new DateTime( startTime ).withZone(DateTimeZone.UTC).toDate() var realEndTime = new DateTime( endTime ).withZone(DateTimeZone.UTC).toDate() realACL.startTime = realStartTime realACL.endTime = realEndTime var perms = ( acl \\ "Permission").text if ( perms.exists( _ == 'r' ) ) { realACL.canRead = true } else { realACL.canRead = false } if ( perms.exists( _ == 'w' ) ) { realACL.canWrite = true } else { realACL.canWrite = false } if ( perms.exists( _ == 'd' ) ) { realACL.canDelete = true } else { realACL.canDelete = false } // l += realACL 2.7 l ::= realACL } return( l ) } def genericGet( method:HttpMethodBase, accountName:String, key:String, container: String ,canonicalResourceExtra: String): Status = { log.info("AzureStorageBlobDAO::genericGet start") var status = new Status() var canonicalResource = "/"+accountName+"/"+container var url = "http://"+accountName+baseBlobURL+"/"+container if ( canonicalResourceExtra != "" ) { canonicalResource += "\n"+canonicalResourceExtra.replace("=",":").replace("&","\n") url += "?"+canonicalResourceExtra } var client = new HttpClient() method.setURI( new URI( url ) ) AzureStorageCommon.addMetadataToMethod( method, new HashMap[String,String]() ) AzureStorageCommon.populateMethod( method, key, accountName, canonicalResource, null) // setup proxy. AzureStorageCommon.setupProxy( client ) var res = client.executeMethod( method ) status.code = res return status } def getContainerACL( accountName:String, key:String, container:String ): (Status, List[ ContainerACL ] ) = { log.info("AzureStorageContainerDAO::getContainerACL start") var status = new Status() var method = new GetMethod( ) status = genericGet( method, accountName, key, container,"comp=acl&restype=container" ) if ( status.code == StatusCodes.GET_CONTAINER_ACL_SUCCESS) { status.successful = true } var h = method.getResponseHeaders() var responseBody = method.getResponseBody() var subArray = responseBody.drop(3) var subsubArray = subArray.toArray var rb2 = new String( subsubArray ) //var resp = method.getResponseBody() log.debug("get container acl response " + rb2 ) var l = parseACLXML( rb2 ) return ( status, l) } }
kpfaulkner/azurestorage
src/azurestorage/DAO/AzureStorageContainerDAO.scala
Scala
bsd-3-clause
14,757
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.mllib.recommendation import java.io.IOException import java.lang.{Integer => JavaInteger} import scala.collection.mutable import com.clearspring.analytics.stream.cardinality.HyperLogLogPlus import com.github.fommil.netlib.BLAS.{getInstance => blas} import org.apache.hadoop.fs.Path import org.json4s._ import org.json4s.JsonDSL._ import org.json4s.jackson.JsonMethods._ import org.apache.spark.{Logging, SparkContext} import org.apache.spark.annotation.Since import org.apache.spark.api.java.{JavaPairRDD, JavaRDD} import org.apache.spark.mllib.linalg._ import org.apache.spark.mllib.rdd.MLPairRDDFunctions._ import org.apache.spark.mllib.util.{Loader, Saveable} import org.apache.spark.rdd.RDD import org.apache.spark.sql.{Row, SQLContext} import org.apache.spark.storage.StorageLevel /** * Model representing the result of matrix factorization. * * Note: If you create the model directly using constructor, please be aware that fast prediction * requires cached user/product features and their associated partitioners. * * @param rank Rank for the features in this model. * @param userFeatures RDD of tuples where each tuple represents the userId and * the features computed for this user. * @param productFeatures RDD of tuples where each tuple represents the productId * and the features computed for this product. */ @Since("0.8.0") class MatrixFactorizationModel @Since("0.8.0") ( @Since("0.8.0") val rank: Int, @Since("0.8.0") val userFeatures: RDD[(Int, Array[Double])], @Since("0.8.0") val productFeatures: RDD[(Int, Array[Double])]) extends Saveable with Serializable with Logging { require(rank > 0) validateFeatures("User", userFeatures) validateFeatures("Product", productFeatures) /** Validates factors and warns users if there are performance concerns. */ private def validateFeatures(name: String, features: RDD[(Int, Array[Double])]): Unit = { require(features.first()._2.length == rank, s"$name feature dimension does not match the rank $rank.") if (features.partitioner.isEmpty) { logWarning(s"$name factor does not have a partitioner. " + "Prediction on individual records could be slow.") } if (features.getStorageLevel == StorageLevel.NONE) { logWarning(s"$name factor is not cached. Prediction could be slow.") } } /** Predict the rating of one user for one product. */ @Since("0.8.0") def predict(user: Int, product: Int): Double = { val userVector = userFeatures.lookup(user).head val productVector = productFeatures.lookup(product).head blas.ddot(rank, userVector, 1, productVector, 1) } /** * Return approximate numbers of users and products in the given usersProducts tuples. * This method is based on `countApproxDistinct` in class `RDD`. * * @param usersProducts RDD of (user, product) pairs. * @return approximate numbers of users and products. */ private[this] def countApproxDistinctUserProduct(usersProducts: RDD[(Int, Int)]): (Long, Long) = { val zeroCounterUser = new HyperLogLogPlus(4, 0) val zeroCounterProduct = new HyperLogLogPlus(4, 0) val aggregated = usersProducts.aggregate((zeroCounterUser, zeroCounterProduct))( (hllTuple: (HyperLogLogPlus, HyperLogLogPlus), v: (Int, Int)) => { hllTuple._1.offer(v._1) hllTuple._2.offer(v._2) hllTuple }, (h1: (HyperLogLogPlus, HyperLogLogPlus), h2: (HyperLogLogPlus, HyperLogLogPlus)) => { h1._1.addAll(h2._1) h1._2.addAll(h2._2) h1 }) (aggregated._1.cardinality(), aggregated._2.cardinality()) } /** * Predict the rating of many users for many products. * The output RDD has an element per each element in the input RDD (including all duplicates) * unless a user or product is missing in the training set. * * @param usersProducts RDD of (user, product) pairs. * @return RDD of Ratings. */ @Since("0.9.0") def predict(usersProducts: RDD[(Int, Int)]): RDD[Rating] = { // Previously the partitions of ratings are only based on the given products. // So if the usersProducts given for prediction contains only few products or // even one product, the generated ratings will be pushed into few or single partition // and can't use high parallelism. // Here we calculate approximate numbers of users and products. Then we decide the // partitions should be based on users or products. val (usersCount, productsCount) = countApproxDistinctUserProduct(usersProducts) if (usersCount < productsCount) { val users = userFeatures.join(usersProducts).map { case (user, (uFeatures, product)) => (product, (user, uFeatures)) } users.join(productFeatures).map { case (product, ((user, uFeatures), pFeatures)) => Rating(user, product, blas.ddot(uFeatures.length, uFeatures, 1, pFeatures, 1)) } } else { val products = productFeatures.join(usersProducts.map(_.swap)).map { case (product, (pFeatures, user)) => (user, (product, pFeatures)) } products.join(userFeatures).map { case (user, ((product, pFeatures), uFeatures)) => Rating(user, product, blas.ddot(uFeatures.length, uFeatures, 1, pFeatures, 1)) } } } /** * Java-friendly version of [[MatrixFactorizationModel.predict]]. */ @Since("1.2.0") def predict(usersProducts: JavaPairRDD[JavaInteger, JavaInteger]): JavaRDD[Rating] = { predict(usersProducts.rdd.asInstanceOf[RDD[(Int, Int)]]).toJavaRDD() } /** * Recommends products to a user. * * @param user the user to recommend products to * @param num how many products to return. The number returned may be less than this. * @return [[Rating]] objects, each of which contains the given user ID, a product ID, and a * "score" in the rating field. Each represents one recommended product, and they are sorted * by score, decreasing. The first returned is the one predicted to be most strongly * recommended to the user. The score is an opaque value that indicates how strongly * recommended the product is. */ @Since("1.1.0") def recommendProducts(user: Int, num: Int): Array[Rating] = MatrixFactorizationModel.recommend(userFeatures.lookup(user).head, productFeatures, num) .map(t => Rating(user, t._1, t._2)) /** * Recommends users to a product. That is, this returns users who are most likely to be * interested in a product. * * @param product the product to recommend users to * @param num how many users to return. The number returned may be less than this. * @return [[Rating]] objects, each of which contains a user ID, the given product ID, and a * "score" in the rating field. Each represents one recommended user, and they are sorted * by score, decreasing. The first returned is the one predicted to be most strongly * recommended to the product. The score is an opaque value that indicates how strongly * recommended the user is. */ @Since("1.1.0") def recommendUsers(product: Int, num: Int): Array[Rating] = MatrixFactorizationModel.recommend(productFeatures.lookup(product).head, userFeatures, num) .map(t => Rating(t._1, product, t._2)) protected override val formatVersion: String = "1.0" /** * Save this model to the given path. * * This saves: * - human-readable (JSON) model metadata to path/metadata/ * - Parquet formatted data to path/data/ * * The model may be loaded using [[Loader.load]]. * * @param sc Spark context used to save model data. * @param path Path specifying the directory in which to save this model. * If the directory already exists, this method throws an exception. */ @Since("1.3.0") override def save(sc: SparkContext, path: String): Unit = { MatrixFactorizationModel.SaveLoadV1_0.save(this, path) } /** * Recommends topK products for all users. * * @param num how many products to return for every user. * @return [(Int, Array[Rating])] objects, where every tuple contains a userID and an array of * rating objects which contains the same userId, recommended productID and a "score" in the * rating field. Semantics of score is same as recommendProducts API */ @Since("1.4.0") def recommendProductsForUsers(num: Int): RDD[(Int, Array[Rating])] = { MatrixFactorizationModel.recommendForAll(rank, userFeatures, productFeatures, num).map { case (user, top) => val ratings = top.map { case (product, rating) => Rating(user, product, rating) } (user, ratings) } } /** * Recommends topK users for all products. * * @param num how many users to return for every product. * @return [(Int, Array[Rating])] objects, where every tuple contains a productID and an array * of rating objects which contains the recommended userId, same productID and a "score" in the * rating field. Semantics of score is same as recommendUsers API */ @Since("1.4.0") def recommendUsersForProducts(num: Int): RDD[(Int, Array[Rating])] = { MatrixFactorizationModel.recommendForAll(rank, productFeatures, userFeatures, num).map { case (product, top) => val ratings = top.map { case (user, rating) => Rating(user, product, rating) } (product, ratings) } } } @Since("1.3.0") object MatrixFactorizationModel extends Loader[MatrixFactorizationModel] { import org.apache.spark.mllib.util.Loader._ /** * Makes recommendations for a single user (or product). */ private def recommend( recommendToFeatures: Array[Double], recommendableFeatures: RDD[(Int, Array[Double])], num: Int): Array[(Int, Double)] = { val scored = recommendableFeatures.map { case (id, features) => (id, blas.ddot(features.length, recommendToFeatures, 1, features, 1)) } scored.top(num)(Ordering.by(_._2)) } /** * Makes recommendations for all users (or products). * @param rank rank * @param srcFeatures src features to receive recommendations * @param dstFeatures dst features used to make recommendations * @param num number of recommendations for each record * @return an RDD of (srcId: Int, recommendations), where recommendations are stored as an array * of (dstId, rating) pairs. */ private def recommendForAll( rank: Int, srcFeatures: RDD[(Int, Array[Double])], dstFeatures: RDD[(Int, Array[Double])], num: Int): RDD[(Int, Array[(Int, Double)])] = { val srcBlocks = blockify(rank, srcFeatures) val dstBlocks = blockify(rank, dstFeatures) val ratings = srcBlocks.cartesian(dstBlocks).flatMap { case ((srcIds, srcFactors), (dstIds, dstFactors)) => val m = srcIds.length val n = dstIds.length val ratings = srcFactors.transpose.multiply(dstFactors) val output = new Array[(Int, (Int, Double))](m * n) var k = 0 ratings.foreachActive { (i, j, r) => output(k) = (srcIds(i), (dstIds(j), r)) k += 1 } output.toSeq } ratings.topByKey(num)(Ordering.by(_._2)) } /** * Blockifies features to use Level-3 BLAS. */ private def blockify( rank: Int, features: RDD[(Int, Array[Double])]): RDD[(Array[Int], DenseMatrix)] = { val blockSize = 4096 // TODO: tune the block size val blockStorage = rank * blockSize features.mapPartitions { iter => iter.grouped(blockSize).map { grouped => val ids = mutable.ArrayBuilder.make[Int] ids.sizeHint(blockSize) val factors = mutable.ArrayBuilder.make[Double] factors.sizeHint(blockStorage) var i = 0 grouped.foreach { case (id, factor) => ids += id factors ++= factor i += 1 } (ids.result(), new DenseMatrix(rank, i, factors.result())) } } } /** * Load a model from the given path. * * The model should have been saved by [[Saveable.save]]. * * @param sc Spark context used for loading model files. * @param path Path specifying the directory to which the model was saved. * @return Model instance */ @Since("1.3.0") override def load(sc: SparkContext, path: String): MatrixFactorizationModel = { val (loadedClassName, formatVersion, _) = loadMetadata(sc, path) val classNameV1_0 = SaveLoadV1_0.thisClassName (loadedClassName, formatVersion) match { case (className, "1.0") if className == classNameV1_0 => SaveLoadV1_0.load(sc, path) case _ => throw new IOException("MatrixFactorizationModel.load did not recognize model with" + s"(class: $loadedClassName, version: $formatVersion). Supported:\\n" + s" ($classNameV1_0, 1.0)") } } private[recommendation] object SaveLoadV1_0 { private val thisFormatVersion = "1.0" private[recommendation] val thisClassName = "org.apache.spark.mllib.recommendation.MatrixFactorizationModel" /** * Saves a [[MatrixFactorizationModel]], where user features are saved under `data/users` and * product features are saved under `data/products`. */ def save(model: MatrixFactorizationModel, path: String): Unit = { val sc = model.userFeatures.sparkContext val sqlContext = new SQLContext(sc) import sqlContext.implicits._ val metadata = compact(render( ("class" -> thisClassName) ~ ("version" -> thisFormatVersion) ~ ("rank" -> model.rank))) sc.parallelize(Seq(metadata), 1).saveAsTextFile(metadataPath(path)) model.userFeatures.toDF("id", "features").write.parquet(userPath(path)) model.productFeatures.toDF("id", "features").write.parquet(productPath(path)) } def load(sc: SparkContext, path: String): MatrixFactorizationModel = { implicit val formats = DefaultFormats val sqlContext = new SQLContext(sc) val (className, formatVersion, metadata) = loadMetadata(sc, path) assert(className == thisClassName) assert(formatVersion == thisFormatVersion) val rank = (metadata \\ "rank").extract[Int] val userFeatures = sqlContext.read.parquet(userPath(path)) .map { case Row(id: Int, features: Seq[_]) => (id, features.asInstanceOf[Seq[Double]].toArray) } val productFeatures = sqlContext.read.parquet(productPath(path)) .map { case Row(id: Int, features: Seq[_]) => (id, features.asInstanceOf[Seq[Double]].toArray) } new MatrixFactorizationModel(rank, userFeatures, productFeatures) } private def userPath(path: String): String = { new Path(dataPath(path), "user").toUri.toString } private def productPath(path: String): String = { new Path(dataPath(path), "product").toUri.toString } } }
pronix/spark
mllib/src/main/scala/org/apache/spark/mllib/recommendation/MatrixFactorizationModel.scala
Scala
apache-2.0
15,683
package ohnosequences // Here we define our DSL case object expr { sealed trait AnyExpr { type Repr } sealed trait BoolExpr extends AnyExpr { type Repr = Boolean } case class BoolVar(sym: Symbol) extends BoolExpr case class Not[E <: BoolExpr](val inside: E) extends BoolExpr case class And[L <: BoolExpr, R <: BoolExpr](l: L, r: R) extends BoolExpr case class Or[L <: BoolExpr, R <: BoolExpr](l: L, r: R) extends BoolExpr sealed trait IntExpr extends AnyExpr { type Repr = Int } case class IntConst(value: Int) extends IntExpr case class IntVar(sym: Symbol) extends IntExpr // This can be much more general, but I leave close to how it was in the original example: // case class Ite[C <: BoolExpr, E <: AnyExpr](cond: C, thenExpr: E, elseExpr: E) extends AnyExpr { type Repr = E#Repr } case class IfThenElse[C <: BoolExpr, T <: IntExpr, E <: IntExpr](cond: C, thenExpr: T, elseExpr: E) extends IntExpr // Just an alias for expression with the same representation type SameAs[E <: AnyExpr] = AnyExpr { type Repr <: E#Repr } }
laughedelic/rephrase
src/main/scala/gadt.scala
Scala
agpl-3.0
1,060
package com.outr.arango.api import com.outr.arango.api.model._ import io.youi.client.HttpClient import io.youi.http.HttpMethod import io.youi.net._ import io.circe.Json import scala.concurrent.{ExecutionContext, Future} object AdminExecute { def post(client: HttpClient, body: Json)(implicit ec: ExecutionContext): Future[Json] = client .method(HttpMethod.Post) .path(path"/_admin/execute", append = true) .restful[Json, Json](body) }
outr/arangodb-scala
api/src/main/scala/com/outr/arango/api/AdminExecute.scala
Scala
mit
458
package proofpeer.metis.util import scalaz._ trait PartialOrder[A] { def tryCompare(x: A, y: A): Option[Ordering] }
proofpeer/proofpeer-metis
shared/src/main/scala/proofpeer/metis/util/PartialOrder.scala
Scala
mit
120
package akka.contrib.mailbox /** * Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com> */ import java.util.concurrent.atomic.AtomicInteger import akka.actor.{ActorRef, ActorSystem} import akka.dispatch.{Envelope, MailboxType, MessageQueue, UnboundedMailbox} import akka.event.Logging import com.typesafe.config.Config /** * Logs the mailbox size when exceeding the configured limit. It logs at most once per second * when the messages are enqueued or dequeued. * * Configuration: * <pre> * akka.actor.default-mailbox { * mailbox-type = akka.contrib.mailbox.LoggingMailboxType * size-limit = 20 * } * </pre> */ class LoggingMailboxType(settings: ActorSystem.Settings, config: Config) extends MailboxType { override def create(owner: Option[ActorRef], system: Option[ActorSystem]) = (owner, system) match { case (Some(o), Some(s)) => val sizeLimit = config.getInt("size-limit") val mailbox = new LoggingMailbox(o, s, sizeLimit) mailbox case _ => throw new IllegalArgumentException("no mailbox owner or system given") } } class LoggingMailbox(owner: ActorRef, system: ActorSystem, sizeLimit: Int) extends UnboundedMailbox.MessageQueue { private val interval = 1000000000L // 1 s, in nanoseconds private lazy val log = Logging(system, classOf[LoggingMailbox]) private val path = owner.path.toString @volatile private var logTime: Long = System.nanoTime() private val queueSize = new AtomicInteger private val dequeueCount = new AtomicInteger override def dequeue(): Envelope = { val x = super.dequeue() if (x ne null) { val size = queueSize.decrementAndGet() dequeueCount.incrementAndGet() logSize(size) } x } override def enqueue(receiver: ActorRef, handle: Envelope): Unit = { super.enqueue(receiver, handle) val size = queueSize.incrementAndGet() logSize(size) } def logSize(size: Int): Unit = if (size >= sizeLimit) { val now = System.nanoTime() if (now - logTime > interval) { val msgPerSecond = dequeueCount.get.toDouble / ((now - logTime).toDouble / 1000000000L) logTime = now dequeueCount.set(0) log.info("Mailbox size for [{}] is [{}], processing [{}] msg/s", path, size, f"$msgPerSecond%2.2f") } } override def numberOfMessages: Int = queueSize.get override def cleanUp(owner: ActorRef, deadLetters: MessageQueue): Unit = { super.cleanUp(owner, deadLetters) } }
simao/riepete
src/main/scala/akka/contrib/mailbox/LoggingMailbox.scala
Scala
mit
2,474
// Copyright (C) 2015 IBM Corp. All Rights Reserved. // See the LICENCE.txt file distributed with this work for additional // information regarding copyright ownership. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.ibm.watson.developer_cloud.visual_recognition.v1 import java.io.File import com.ibm.watson.developer_cloud.service.{VCAPConfigFactory, ConfigFactory, WatsonService} import com.ibm.watson.developer_cloud.utils._ import com.ibm.watson.developer_cloud.visual_recognition.v1.model.{VisualRecognitionImages, LabelSet, VisualRecognitionProtocol} import spray.httpx.SprayJsonSupport._ import spray.http._ import spray.client.pipelining._ import spray.json._ import VisualRecognitionProtocol._ import scala.concurrent.Future /** * The Visual Recognition service analyzes images, enabling you to understand their content without * any accompanying descriptive text. * * @version v1 * @see <a * href="http://www.ibm.com/smarterplanet/us/en/ibmwatson/developercloud/visual-recognition.html"> * Visual Recognition</a> */ class VisualRecognition(configFactory: ConfigFactory = new VCAPConfigFactory()) extends WatsonService(configFactory) { /** * Gets the service type for service (used to get correct entry from VCAP_SERVICES properties) * @return */ override def serviceType: String = "visual_recognition" /** * Gets the labels and label groups * @return labels and label groups */ def labelSet : Future[LabelSet] = { val request = Get(config.endpoint + VisualRecognition.labelsPath) send(request).map(unmarshal[LabelSet]) } /** * Classifies the images against the label groups and labels. The response includes a score for a * label if the score meets the minimum threshold of 0.5. If no score meets the threshold for an * image, no labels are returned. * @param image the image file * @param labelSet labels to classify against (optional) * @return the visual recognition image */ def recognize(image: File, labelSet: Option[LabelSet] = None) : Future[VisualRecognitionImages] = { Validation.notNull(image, "Image cannot be null") val bodyPart = BodyPart(image, "imgFile") val list = List(bodyPart) ++ labelSet.map({p => BodyPart(p.toJson.toString, VisualRecognition.labelsToCheck)}).toList val data = MultipartFormData(list) val request = Post(config.endpoint + VisualRecognition.recognizePath, data) send(request).map(unmarshal[VisualRecognitionImages]) } } object VisualRecognition { val labelsPath = "/v1/tag/labels" val recognizePath = "/v1/tag/recognize" val labelsToCheck = "labels_to_check" }
kane77/watson-scala-wrapper
src/main/scala/com/ibm/watson/developer_cloud/visual_recognition/v1/VisualRecognition.scala
Scala
apache-2.0
3,173
import org.portablescala.sbtplatformdeps.PlatformDepsPlugin.autoImport._ import org.scalajs.jsdependencies.sbtplugin.JSDependenciesPlugin.autoImport._ import org.scalajs.jsdependencies.sbtplugin.JSModuleID import sbt.Keys.scalaVersion import sbt._ object Dependencies { val versionOfScala = "2.13.8" //update .github/workflows/ci.yml as well val jqueryWrapperVersion = "3.2.0" val scalaJsDomVersion = "2.1.0" val scalaTagsVersion = "0.11.1" val scalaCssVersion = "1.0.0" val servletVersion = "4.0.1" val avsCommonsVersion = "2.5.5" val atmosphereJSVersion = "3.1.3" val atmosphereVersion = "2.7.5" val upickleVersion = "1.5.0" // Tests only val circeVersion = "0.14.1" // Tests only val circeDerivationVersion = "0.13.0-M5" // Tests only val monixVersion = "3.4.0" // Tests only val sttpVersion = "3.5.1" val scalaLoggingVersion = "3.9.4" val jettyVersion = "9.4.45.v20220203" val typesafeConfigVersion = "1.4.2" val flexmarkVersion = "0.62.2" val logbackVersion = "1.2.11" val janinoVersion = "3.1.6" val fontAwesomeVersion = "5.10.1" val svg4everybodyVersion = "2.1.9" val scalatestVersion = "3.2.11" val bootstrap4Version = "4.1.3" val bootstrap4DatepickerVersion = "5.39.0" val momentJsVersion = "2.29.1" val seleniumVersion = "4.1.2" val webDriverManagerVersion = "5.1.0" val scalaJsBenchmarkVersion = "0.10.0" val compilerPlugins = Def.setting(Seq( "com.avsystem.commons" %% "commons-analyzer" % avsCommonsVersion ).map(compilerPlugin)) val commonTestDeps = Def.setting(Seq( "org.scalatest" %%% "scalatest" % scalatestVersion ).map(_ % Test)) val macroDeps = Def.setting(Seq( "org.scala-lang" % "scala-reflect" % scalaVersion.value, "com.avsystem.commons" %% "commons-macros" % avsCommonsVersion, )) val utilsCrossDeps = Def.setting(Seq( "com.avsystem.commons" %%% "commons-core" % avsCommonsVersion, )) val utilsJvmDeps = Def.setting(utilsCrossDeps.value ++ Seq( "com.typesafe.scala-logging" %% "scala-logging" % scalaLoggingVersion )) val utilsSjsDeps = Def.setting(utilsCrossDeps.value ++ Seq( "org.scala-js" %%% "scalajs-dom" % scalaJsDomVersion, )) private val coreCrossDeps = Def.setting(Seq( "com.lihaoyi" %%% "scalatags" % scalaTagsVersion )) val coreJvmDeps = coreCrossDeps val coreSjsDeps = coreCrossDeps private val rpcCrossDeps = Def.setting(Seq( "com.lihaoyi" %%% "upickle" % upickleVersion % Test, "io.circe" %%% "circe-core" % circeVersion % Test, "io.circe" %%% "circe-parser" % circeVersion % Test, )) val rpcJvmDeps = Def.setting(rpcCrossDeps.value ++ Seq( "javax.servlet" % "javax.servlet-api" % servletVersion, "org.atmosphere" % "atmosphere-runtime" % atmosphereVersion )) val rpcSjsDeps = rpcCrossDeps val rpcJsDeps = Def.setting(Seq( "org.webjars" % "atmosphere-javascript" % atmosphereJSVersion / s"$atmosphereJSVersion/atmosphere.js" minified s"$atmosphereJSVersion/atmosphere-min.js" )) private val restCrossDeps = Def.setting(Seq( "com.avsystem.commons" %%% "commons-core" % avsCommonsVersion, "com.softwaremill.sttp.client3" %%% "core" % sttpVersion, "io.monix" %%% "monix" % monixVersion, "io.circe" %%% "circe-core" % circeVersion % Test, "io.circe" %%% "circe-parser" % circeVersion % Test, "io.circe" %%% "circe-derivation" % circeDerivationVersion % Test, )) val restJvmDeps = Def.setting(restCrossDeps.value ++ Seq( "com.softwaremill.sttp.client3" %% "async-http-client-backend-future" % sttpVersion, "javax.servlet" % "javax.servlet-api" % servletVersion, "com.typesafe.scala-logging" %% "scala-logging" % scalaLoggingVersion, "org.eclipse.jetty" % "jetty-server" % jettyVersion % Test, "org.eclipse.jetty" % "jetty-servlet" % jettyVersion % Test )) val restSjsDeps = restCrossDeps val restJettyDeps = Def.setting(Seq( "org.eclipse.jetty" % "jetty-client" % jettyVersion )) private val cssCrossDeps = Def.setting(Seq( "com.github.japgolly.scalacss" %%% "core" % scalaCssVersion, )) val cssJvmDeps = cssCrossDeps val cssSjsDeps = Def.setting(cssCrossDeps.value ++ Seq( "com.lihaoyi" %%% "scalatags" % scalaTagsVersion, )) val bootstrap4SjsDeps = Def.setting(Seq( "io.udash" %%% "udash-jquery" % jqueryWrapperVersion, )) private val momentResource = s"$momentJsVersion/moment.js" private val bootstrap4Resource = "js/bootstrap.bundle.js" val bootstrap4JsDeps = Def.setting(Seq[JSModuleID]( "org.webjars" % "bootstrap" % bootstrap4Version / bootstrap4Resource minified "js/bootstrap.bundle.min.js" dependsOn "jquery.js", "org.webjars" % "momentjs" % s"$momentJsVersion" / momentResource minified s"$momentJsVersion/min/moment.min.js", "org.webjars" % "tempusdominus-bootstrap-4" % bootstrap4DatepickerVersion / "js/tempusdominus-bootstrap-4.js" minified "js/tempusdominus-bootstrap-4.min.js" dependsOn(bootstrap4Resource, momentResource) )) val benchmarksSjsDeps = Def.setting(Seq( "com.github.japgolly.scalajs-benchmark" %%% "benchmark" % scalaJsBenchmarkVersion, "io.circe" %%% "circe-core" % circeVersion, "io.circe" %%% "circe-generic" % circeVersion, "io.circe" %%% "circe-parser" % circeVersion, "com.lihaoyi" %%% "upickle" % upickleVersion, )) val backendDeps = Def.setting(Seq( "com.typesafe.scala-logging" %% "scala-logging" % scalaLoggingVersion, "ch.qos.logback" % "logback-classic" % logbackVersion, "org.codehaus.janino" % "janino" % janinoVersion, //conditional processing in logback "org.eclipse.jetty" % "jetty-server" % jettyVersion, "org.eclipse.jetty" % "jetty-rewrite" % jettyVersion, "org.eclipse.jetty.websocket" % "websocket-server" % jettyVersion, "com.typesafe" % "config" % typesafeConfigVersion, "com.vladsch.flexmark" % "flexmark-all" % flexmarkVersion, )) val seleniumDeps: Seq[ModuleID] = Seq( "org.seleniumhq.selenium" % "selenium-java" % seleniumVersion, "io.github.bonigarcia" % "webdrivermanager" % webDriverManagerVersion, ).map(_ % Test) val guideJsDeps = Def.setting(Seq[JSModuleID]( ProvidedJS / "prism.js", )) val guideFrontendDeps = Def.setting(Seq( "org.webjars" % "font-awesome" % fontAwesomeVersion, )) val homepageJsDeps = Def.setting(Seq[JSModuleID]( "org.webjars.npm" % "svg4everybody" % svg4everybodyVersion / s"$svg4everybodyVersion/dist/svg4everybody.js", ProvidedJS / "prism.js", )) }
UdashFramework/udash-core
project/Dependencies.scala
Scala
apache-2.0
6,503
/******************************************************************************* * Copyright (c) 2019. Carl Minden * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. ******************************************************************************/ package com.anathema_roguelike package stats.effects import com.anathema_roguelike.actors.TimeElapsedEvent import com.anathema_roguelike.main.Game import com.anathema_roguelike.stats.Stat import com.google.common.collect.{HashBiMap, Iterables} import com.google.common.eventbus.Subscribe import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer import scala.reflect.runtime.universe._ class EffectCollection[T, S <: Stat[_ <: T]](var affected: T) { Game.getInstance.getEventBus.register(this) private val sourcedEffects: HashBiMap[HasEffect[_ <: Effect[_ <: T, _]], Effect[_ <: T, _]] = HashBiMap.create[HasEffect[_ <: Effect[_ <: T, _]], Effect[_ <: T, _]] private var unsourcedEffects: ListBuffer[Effect[_ <: T, _]] = ListBuffer() def getStatBonus[G <: S : TypeTag]: Double = { getEffects.foldLeft(0.0) { (bonus, effect) => bonus + effect.getAdditiveBonus[G] } } def getStatMultiplier[G <: S : TypeTag]: Double = { getEffects.foldLeft(1.0) { (bonus, effect) => bonus * effect.getMultiplier[G] } } @Subscribe def handleSegmentElapsedEvent(event: TimeElapsedEvent): Unit = { elapse(event.elapsedTime) removeExpired() } def getEffects: Iterable[Effect[_ <: T, _]] = sourcedEffects.values.asScala ++ unsourcedEffects def apply(effect: Effect[T, _]): Unit = { if(effect.getSource.isDefined) { sourcedEffects.forcePut(effect.getSource.get, effect) } else { unsourcedEffects += effect } effect.applyTo(affected) } def removeBySource(source: HasEffect[_ <: Effect[_ <: T, _]]): Unit = { val effect: Effect[_ <: T, _] = sourcedEffects.get(source) if (effect != null) { sourcedEffects.remove(effect) effect.remove() } } def elapse(duration: Double): Unit = { for (effect <- getEffects) { effect.getDuration.elapse(duration) } } def removeExpired(): Unit = { sourcedEffects.entrySet.removeIf(entry => { if(entry.getValue.getDuration.isExpired) { entry.getValue.remove() true } else { false } }) unsourcedEffects = unsourcedEffects.filterNot(e => { if(e.getDuration.isExpired) { e.remove() true } else false }) } }
carlminden/anathema-roguelike
src/com/anathema_roguelike/stats/effects/EffectCollection.scala
Scala
gpl-3.0
3,127
/* * Copyright (c) 2014 Erik van Oosten * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package imperial.mixins import com.codahale.metrics.MetricRegistry import imperial.wrappers.codahale.CodaHaleBackedArmoury import org.junit.runner.RunWith import org.mockito.Mockito.verify import org.scalatest.{FlatSpec, OneInstancePerTest} import org.scalatest.junit.JUnitRunner import org.scalatest.mock.MockitoSugar._ import imperial.measures.Counter import imperial.Armoury @RunWith(classOf[JUnitRunner]) class ImperialInstrumentedSpec extends FlatSpec with OneInstancePerTest { "An ImperialInstrumented" should "use the owner class as metric base name" in { val metricOwner = new MetricOwner metricOwner.createCounter() verify(metricOwner.metricRegistry).counter("imperial.mixins.ImperialInstrumentedSpec.MetricOwner.cnt") } private class MetricOwner() extends imperial.mocks.MockitoInstrumented { def createCounter(): Counter = armoury.counter("cnt") } }
thecoda/scala-imperial
src/test/scala/imperial/mixins/ImperialInstrumentedSpec.scala
Scala
apache-2.0
1,494
/* * Copyright 2012 Twitter Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.zipkin.storage import com.twitter.util.FuturePools._ import com.twitter.util.{Closable, Future} import com.twitter.zipkin.adjuster.{ApplyTimestampAndDuration, CorrectForClockSkew, MergeById} import com.twitter.zipkin.common.Span import java.nio.ByteBuffer abstract class SpanStore extends java.io.Closeable { /** * Get the available trace information from the storage system. * * <p/> Traces are sorted in descending in order of the first span's * timestamp, containing up to [[QueryRequest.limit]] traces, nearest to * [[QueryRequest.endTs]], looking back up to [[QueryRequest.lookback]] ms. * * <p/> Spans in trace, and annotations in a span are sorted ascending by * timestamp. First event should be first in the spans list. */ def getTraces(qr: QueryRequest): Future[Seq[List[Span]]] /** * Get the available trace information from the storage system. * Spans in trace are sorted by the first annotation timestamp * in that span. First event should be first in the spans list. * * <p/> Results are sorted in order of the first span's timestamp, and contain * less elements than trace IDs when corresponding traces aren't available. */ def getTracesByIds(traceIds: Seq[Long]): Future[Seq[List[Span]]] /** * Get all the service names for as far back as the ttl allows. * * <p/> Results are sorted lexicographically */ def getAllServiceNames(): Future[Seq[String]] /** * Get all the span names for a particular service, as far back as the ttl allows. * * <p/> Results are sorted lexicographically */ def getSpanNames(service: String): Future[Seq[String]] /** * Store a list of spans, indexing as necessary. * * <p/> Spans may come in sparse, for example apply may be called multiple times * with a span with the same id, containing different annotations. The * implementation should ensure these are merged at query time. */ def apply(spans: Seq[Span]): Future[Unit] /** * Close writes and await possible draining of internal queues. */ override def close() } object SpanStore { /** Allows [[SpanStore]] to be used with a [[com.twitter.finagle.Filter]] */ implicit def toScalaFunc(s: SpanStore): (Seq[Span] => Future[Unit]) = { return (spans: Seq[Span]) => s.apply(spans) } implicit def toTwitterCloseable(c: java.io.Closeable): Closable = { Closable.make(t => unboundedPool.apply(() => c.close())) } } class InMemorySpanStore extends SpanStore with CollectAnnotationQueries { import scala.collection.mutable val spans: mutable.ArrayBuffer[Span] = new mutable.ArrayBuffer[Span] private[this] def call[T](f: => T): Future[T] = synchronized(Future(f)) private[this] def spansForService(name: String): Iterator[Span] = spans.reverseIterator.filter(_.serviceNames.contains(name)) override def close() = {} override def apply(newSpans: Seq[Span]): Future[Unit] = call { spans ++= newSpans .map(s => s.copy(annotations = s.annotations.sorted)) .map(ApplyTimestampAndDuration.apply) }.unit override def getTracesByIds(traceIds: Seq[Long]): Future[Seq[List[Span]]] = call { spans.groupBy(_.traceId) .filterKeys(traceIds.contains(_)) .values.filter(!_.isEmpty).toList .map(MergeById) .map(CorrectForClockSkew) .map(ApplyTimestampAndDuration) .sortBy(_.head)(Ordering[Span].reverse) // sort descending by the first span } override def getTraceIdsByName( serviceName: String, spanName: Option[String], endTs: Long, lookback: Long, limit: Int ): Future[Seq[IndexedTraceId]] = call { spansForService(serviceName) .filter(s => spanName.map(_ == s.name).getOrElse(true)) .filter(_.timestamp.exists(t => t >= (endTs - lookback) * 1000 && t <= endTs * 1000)) .take(limit) .map(span => IndexedTraceId(span.traceId, span.timestamp.get)) .toList } override def getTraceIdsByAnnotation( serviceName: String, annotation: String, value: Option[ByteBuffer], endTs: Long, lookback: Long, limit: Int ): Future[Seq[IndexedTraceId]] = call { spansForService(serviceName) .filter(_.timestamp.exists(t => t >= (endTs - lookback) * 1000 && t <= endTs * 1000)) .filter(if (value.isDefined) { _.binaryAnnotations.exists(ba => ba.key == annotation && ba.value == value.get) } else { _.annotations.exists(_.value == annotation) }) .take(limit) .map(span => IndexedTraceId(span.traceId, span.timestamp.get)) .toList } override protected def getTraceIdsByDuration( serviceName: String, spanName: Option[String], minDuration: Long, maxDuration: Option[Long], endTs: Long, lookback: Long, limit: Int ): Future[Seq[IndexedTraceId]] = call { spansForService(serviceName) .filter(s => spanName.map(_ == s.name).getOrElse(true)) .filter(_.timestamp.exists(t => t >= (endTs - lookback) * 1000 && t <= endTs * 1000)) .filter(_.duration.exists(_ >= minDuration)) .filter(_.duration.exists(_ <= maxDuration.getOrElse(Long.MaxValue))) .take(limit) .map(span => IndexedTraceId(span.traceId, span.timestamp.get)) .toList } override def getAllServiceNames(): Future[Seq[String]] = call { spans.flatMap(_.serviceNames).distinct.toList.sorted } override def getSpanNames(_serviceName: String): Future[Seq[String]] = call { val serviceName = _serviceName.toLowerCase // service names are always lowercase! spansForService(serviceName).map(_.name).toList.distinct.sorted } }
rocwzp/zipkin
zipkin-common/src/main/scala/com/twitter/zipkin/storage/SpanStore.scala
Scala
apache-2.0
6,239
package scorex.crypto.authds import scorex.crypto.authds.legacy.treap.Level import scorex.crypto.hash._ trait ProofIterator { private var i = -1 protected def initializeIterator(): Unit = i = -1 val proofSeq: Seq[TwoPartyProofElement] protected def dequeueValue(): ADValue = { i = i + 1 ADValue @@ proofSeq(i).asInstanceOf[ProofValue].e } protected def dequeueKey(): ADKey = { i = i + 1 ADKey @@ proofSeq(i).asInstanceOf[ProofKey].e } protected def dequeueNextLeafKey(): ADKey = { i = i + 1 ADKey @@ proofSeq(i).asInstanceOf[ProofNextLeafKey].e } protected def dequeueRightLabel(): Digest = { i = i + 1 Digest32 @@ proofSeq(i).asInstanceOf[ProofRightLabel].e } protected def dequeueLeftLabel(): Digest = { i = i + 1 Digest32 @@ proofSeq(i).asInstanceOf[ProofLeftLabel].e } protected def dequeueDirection(): Direction = { i = i + 1 proofSeq(i).asInstanceOf[ProofDirection].direction } protected def dequeueLevel(): Level = { i = i + 1 proofSeq(i).asInstanceOf[ProofLevel].e } protected def dequeueBalance(): Balance = { i = i + 1 proofSeq(i).bytes(0) match { case -1 => Balance @@ -1.toByte case 0 => Balance @@ 0.toByte case 1 => Balance @@ 1.toByte } } }
ScorexProject/scrypto
src/main/scala/scorex/crypto/authds/ProofIterator.scala
Scala
cc0-1.0
1,292
package com.codility.challenge._5_PrefixSums /** * Created by obarros on 01/11/2016. */ object PassingCars { def solution(a: Array[Int]): Int = { var countOne = 0 var result = 0 for(i <- a.length -1 to 0 by -1) { if(a(i) == 1) countOne += 1 else result += countOne } if(math.abs(result) > 1000000000) -1 else result } }
Obarros/Codility
src/main/scala-2.11/com/codility/challenge/_5_PrefixSums/PassingCars.scala
Scala
mit
404
package com.github.wakfudecrypt.types.data import com.github.wakfudecrypt._ @BinaryDecoder case class StatueIeParam( _0_int32: Int ) object StatueIeParam extends BinaryDataCompanion[StatueIeParam] { override val dataId = 93 }
jac3km4/wakfudecrypt
types/src/main/scala/com/github/wakfudecrypt/types/data/StatueIeParam.scala
Scala
mit
233
package com.socrata.datacoordinator.secondary import scala.collection.JavaConverters._ import java.lang.Runnable import java.util.UUID import java.util.concurrent.{CountDownLatch, Executors, ScheduledExecutorService, TimeUnit, ConcurrentHashMap} import com.socrata.datacoordinator.id.DatasetId import com.socrata.datacoordinator.secondary.messaging._ import com.socrata.datacoordinator.secondary.messaging.eurybates.MessageProducerFromConfig import scala.collection.mutable import scala.concurrent.duration._ import scala.util.Random import com.rojoma.simplearm.v2._ import com.socrata.curator.CuratorFromConfig import com.socrata.datacoordinator.common.{DataSourceFromConfig, SoQLCommon} import com.socrata.datacoordinator.common.DataSourceFromConfig.DSInfo import com.socrata.datacoordinator.secondary.sql.SqlSecondaryStoresConfig import com.socrata.datacoordinator.common.collocation.{CollocationLock, CollocationLockError, CollocationLockTimeout, CuratedCollocationLock} import com.socrata.datacoordinator.truth.universe._ import com.socrata.datacoordinator.util._ import com.socrata.soql.types.{SoQLType, SoQLValue} import com.socrata.thirdparty.metrics.MetricsReporter import com.socrata.thirdparty.typesafeconfig.Propertizer import com.typesafe.config.{Config, ConfigFactory} import org.apache.log4j.PropertyConfigurator import org.joda.time.{DateTime, Seconds} import org.slf4j.LoggerFactory import sun.misc.{Signal, SignalHandler} class SecondaryWatcher[CT, CV](universe: => Managed[SecondaryWatcher.UniverseType[CT, CV]], claimantId: UUID, claimTimeout: FiniteDuration, backoffInterval: FiniteDuration, replayWait: FiniteDuration, maxReplayWait: FiniteDuration, maxRetries: Int, maxReplays: Int, timingReport: TimingReport, messageProducer: MessageProducer, collocationLock: CollocationLock, collocationLockTimeout: FiniteDuration) { val log = LoggerFactory.getLogger(classOf[SecondaryWatcher[_,_]]) private val rand = new Random() // splay the sleep time +/- 5s to prevent watchers from getting in lock step private val nextRuntimeSplay = (rand.nextInt(10000) - 5000).toLong // allow for overriding for easy testing protected def manifest(u: Universe[CT, CV] with SecondaryManifestProvider with PlaybackToSecondaryProvider): SecondaryManifest = u.secondaryManifest protected def replicationMessages(u: Universe[CT, CV] with SecondaryReplicationMessagesProvider): SecondaryReplicationMessages[CT, CV] = u.secondaryReplicationMessages(messageProducer) def run(u: Universe[CT, CV] with Commitable with PlaybackToSecondaryProvider with SecondaryManifestProvider with SecondaryReplicationMessagesProvider with SecondaryMoveJobsProvider, secondary: NamedSecondary[CT, CV]): Boolean = { import u._ val foundWorkToDo = for(job <- manifest(u).claimDatasetNeedingReplication( secondary.storeId, claimantId, claimTimeout)) yield { timingReport( "playback-to-secondary", "tag:job-id" -> UUID.randomUUID(), // add job id tag to enclosing logs "tag:dataset-id" -> job.datasetId, // add dataset id tag to enclosing logs "truthDatasetId" -> job.datasetId.underlying, "secondary" -> secondary.storeId, "endingDataVersion" -> job.endingDataVersion ) { // This dataset _should_ not already be in the working set... if it is, this exits. SecondaryWatcherClaimManager.workingOn(secondary.storeId, job.datasetId) log.info(">> Syncing {} into {}", job.datasetId, secondary.storeId) if(job.replayNum > 0) log.info("Replay #{} of {}", job.replayNum, maxReplays) if(job.retryNum > 0) log.info("Retry #{} of {}", job.retryNum, maxRetries) val startingMillis = System.currentTimeMillis() try { playbackToSecondary(secondary, job) manifest(u).updateRetryInfo(job.storeId, job.datasetId, 0, 0) // done with the job, reset the retry counter log.info("<< Sync done for {} into {}", job.datasetId, secondary.storeId) completeSecondaryMoveJobs(u, job) // Essentially, this simulates unclaimDataset in a finally block. That is, make sure we clean up whether // there is exception or not. We don't do it the normal way (finally block) because we // want to trigger event message only when there is no error. unclaimDataset(u, secondary, job, sendMessage = true, startingMillis) } catch { case ex: Exception => try { handlePlaybackErrors(u, secondary, job, ex) } finally { unclaimDataset(u, secondary, job, sendMessage = false, startingMillis) } } } } foundWorkToDo.isDefined } private def handlePlaybackErrors(u: Universe[CT, CV] with Commitable with PlaybackToSecondaryProvider with SecondaryManifestProvider with SecondaryReplicationMessagesProvider, secondary: NamedSecondary[CT, CV], job: SecondaryRecord, error: Exception): Unit = { error match { case bdse@BrokenDatasetSecondaryException(reason, cookie) => log.error("Dataset version declared to be broken while updating dataset {} in secondary {}; marking it as broken", job.datasetId.asInstanceOf[AnyRef], secondary.storeId, bdse) // Prefer cookie handed-back by the exception, but since we write notes in out cookies currently that we don't // want to lose, let's avoid overriding those if we are not given Some other cookie string manifest(u).markSecondaryDatasetBroken(job, cookie.orElse(job.initialCookie)) case rlse@ReplayLaterSecondaryException(reason, cookie) => if (job.replayNum < maxReplays) { val replayAfter = Math.min(replayWait.toSeconds * Math.log(job.replayNum + 2), maxReplayWait.toSeconds) log.info("Replay later requested while updating dataset {} in secondary {}, replaying in {}...", job.datasetId.asInstanceOf[AnyRef], secondary.storeId, replayAfter.toString, rlse) manifest(u).updateReplayInfo(secondary.storeId, job.datasetId, cookie, job.replayNum + 1, replayAfter.toInt) } else { log.error("Ran out of replay attempts while updating dataset {} in secondary {}; marking it as broken", job.datasetId.asInstanceOf[AnyRef], secondary.storeId, rlse) manifest(u).markSecondaryDatasetBroken(job, job.initialCookie) } case ResyncLaterSecondaryException(reason) => log.info("resync later {} {} {} {}", secondary.groupName, secondary.storeId, job.datasetId.toString, reason) manifest(u).updateRetryInfo(job.storeId, job.datasetId, job.retryNum, backoffInterval.toSeconds.toInt) case e: Exception => if (job.retryNum < maxRetries) { val retryBackoff = backoffInterval.toSeconds * Math.pow(2, job.retryNum) log.warn("Unexpected exception while updating dataset {} in secondary {}, retrying in {}...", job.datasetId.asInstanceOf[AnyRef], secondary.storeId, retryBackoff.toString, e) manifest(u).updateRetryInfo(secondary.storeId, job.datasetId, job.retryNum + 1, retryBackoff.toInt) } else { log.error("Unexpected exception while updating dataset {} in secondary {}; marking it as broken", job.datasetId.asInstanceOf[AnyRef], secondary.storeId, e) manifest(u).markSecondaryDatasetBroken(job, job.initialCookie) } } } private def unclaimDataset(u: Universe[CT, CV] with Commitable with PlaybackToSecondaryProvider with SecondaryManifestProvider with SecondaryReplicationMessagesProvider, secondary: NamedSecondary[CT, CV], job: SecondaryRecord, sendMessage: Boolean, startingMillis: Long): Unit ={ try { // We need to remove the job from our in memory list before we unclaim it or we have a race condition where // another thread claims it and errors out because it thinks we are still working on it. The main goal of // tracking what jobs we are working on in memory is to avoid continuing to update our claim on a job // that has experienced an unexpected error. A secondary purpose is to catch logic errors if we were to end up // claiming a job twice. Both of those goals are compatible with first removing the job from our in memory // list and then unclaiming it. SecondaryWatcherClaimManager.doneWorkingOn(secondary.storeId, job.datasetId) manifest(u).releaseClaimedDataset(job) u.commit() log.info("finished version: {}", job.endingDataVersion) // logic for sending messages to amq if (sendMessage) { try { replicationMessages(u).send( datasetId = job.datasetId, storeId = job.storeId, endingDataVersion = job.endingDataVersion, startingMillis = startingMillis, endingMillis = System.currentTimeMillis() ) } catch { case e: Exception => log.error("Unexpected exception sending message! Continuing regardless...", e) } } } catch { case e: Exception => log.error("Unexpected exception while releasing claim on dataset {} in secondary {}", job.datasetId.asInstanceOf[AnyRef], secondary.storeId, e) } } private def completeSecondaryMoveJobs(u: Universe[CT, CV] with SecondaryManifestProvider with PlaybackToSecondaryProvider with SecondaryMoveJobsProvider, job: SecondaryRecord): Unit = { val moveJobs = u.secondaryMoveJobs val storeId = job.storeId val datasetId = job.datasetId def forLog(jobs: Seq[SecondaryMoveJob]): String = jobs.map(_.id).toString() if (job.pendingDrop) { // For pending drops when "completing" move jobs we acquire a collocation lock to avoid race conditions // Note: the only currently automated process to mark a dataset for pending drop is here in the secondary // watcher after a dataset has completed replication to a destination store in a move job. try { log.info("Attempting to acquire collocation lock for pending drop of dataset.") if (collocationLock.acquire(collocationLockTimeout.toMillis)) { try { log.info("Acquired collocation lock for pending drop of dataset.") val movesToStore = moveJobs.jobsToStore(storeId, datasetId) if (movesToStore.nonEmpty) { log.error("Upon pending drop of dataset {} from store {} there are unexepected jobs moving to the store: {}", datasetId.toString, storeId.toString, forLog(movesToStore)) throw new Exception("Unexpected jobs moving dataset to store when it is pending drop!") } val movesFromStore = moveJobs.jobsFromStore(storeId, datasetId) log.info("Upon pending drop of dataset {} from store {} completed moves for jobs: {}", datasetId.toString, storeId.toString, forLog(movesFromStore)) moveJobs.markJobsFromStoreComplete(storeId, datasetId) } finally { collocationLock.release() } } else { log.error("Failed to acquire collocation lock during pending drop of {}", datasetId) throw CollocationLockTimeout(collocationLockTimeout.toMillis) } } catch { case error: CollocationLockError => log.error("Unexpected error with collocation lock during pending drop of dataset!", error) throw error } } else { val movesToStore = moveJobs.jobsToStore(storeId, datasetId) log.info("Upon replicating dataset {} to store {} started moves for jobs: {}", datasetId.toString, storeId.toString, forLog(movesToStore)) moveJobs.markJobsToStoreComplete(storeId, datasetId) // initiate drop from secondary stores movesToStore.foreach { move => manifest(u).markDatasetForDrop(move.fromStoreId, job.datasetId) } } } private def maybeSleep(storeId: String, nextRunTime: DateTime, finished: CountDownLatch): Boolean = { val remainingTime = nextRunTime.getMillis - System.currentTimeMillis() - nextRuntimeSplay finished.await(remainingTime, TimeUnit.MILLISECONDS) } def bestNextRunTime(storeId: String, target: DateTime, interval: Int): DateTime = { val now = DateTime.now() val remainingTime = (now.getMillis - target.getMillis) / 1000 if(remainingTime > 0) { val diffInIntervals = remainingTime / interval log.warn("{} is behind schedule {}s ({} intervals)", storeId, remainingTime.asInstanceOf[AnyRef], diffInIntervals.asInstanceOf[AnyRef]) val newTarget = target.plus(Seconds.seconds(Math.min(diffInIntervals * interval, Int.MaxValue).toInt)) log.warn("Resetting target time to {}", newTarget) newTarget } else { target } } /** * Cleanup any orphaned jobs created by this watcher exiting uncleanly. Needs to be run * without any workers running (ie. at startup). */ def cleanOrphanedJobs(secondaryConfigInfo: SecondaryConfigInfo): Unit = { // At startup clean up any orphaned jobs which may have been created by this watcher for { u <- universe } { u.secondaryManifest.cleanOrphanedClaimedDatasets(secondaryConfigInfo.storeId, claimantId) } } def mainloop(secondaryConfigInfo: SecondaryConfigInfo, secondary: Secondary[CT, CV], finished: CountDownLatch): Unit = { var lastWrote = new DateTime(0L) var nextRunTime = bestNextRunTime(secondaryConfigInfo.storeId, secondaryConfigInfo.nextRunTime, secondaryConfigInfo.runIntervalSeconds) var done = maybeSleep(secondaryConfigInfo.storeId, nextRunTime, finished) while(!done) { try { for {u <- universe} { import u._ while(run(u, new NamedSecondary(secondaryConfigInfo.storeId, secondary, secondaryConfigInfo.groupName)) && finished.getCount != 0) { // loop until we either have no more work or we are told to exit } nextRunTime = bestNextRunTime(secondaryConfigInfo.storeId, nextRunTime.plus(Seconds.seconds(secondaryConfigInfo.runIntervalSeconds)), secondaryConfigInfo.runIntervalSeconds) // we only actually write at most once every 15 minutes... val now = DateTime.now() if(now.getMillis - lastWrote.getMillis >= 15 * 60 * 1000) { log.info("Writing new next-runtime: {}", nextRunTime) secondaryStoresConfig.updateNextRunTime(secondaryConfigInfo.storeId, nextRunTime) } lastWrote = now } done = maybeSleep(secondaryConfigInfo.storeId, nextRunTime, finished) } catch { case e: Exception => log.error("Unexpected exception while updating claimedAt time for secondary sync jobs claimed by watcherId " + claimantId.toString(), e) // avoid tight spin loop if we have recurring errors, eg. unable to talk to db Thread.sleep(10L * 1000) } } } } object SecondaryWatcherClaimManager { private val log = LoggerFactory.getLogger(classOf[SecondaryWatcherClaimManager]) // in-memory list of datasets that are actively being worked on by _this_ instance private object X // like Unit, but nullable because Java private val workingSet = new ConcurrentHashMap[(String, Long), X.type] def workingOn(storeId: String, datasetId: DatasetId): Unit = { if(workingSet.putIfAbsent((storeId, datasetId.underlying), X) ne null) { log.error("We have already claimed dataset {} for store {} in our working set." + s" An unexpected error has occurred; exiting.", datasetId, storeId) sys.exit(1) } log.debug(s"Added dataset {} for store $storeId to our working set which is now {}.", datasetId, workingSet) } def doneWorkingOn(storeId: String, datasetId: DatasetId): Unit = { workingSet.remove((storeId, datasetId.underlying)) log.debug(s"Removed dataset {} for store $storeId from our working set which is now {}.", datasetId, workingSet) } def andInWorkingSetSQL: String = { val ids = workingSet.keySet.iterator.asScala.map(_._2).to[Vector] if(ids.nonEmpty) { ids.mkString(" AND dataset_system_id IN (", ",", ")") } else { "" } } } class SecondaryWatcherClaimManager(dsInfo: DSInfo, claimantId: UUID, claimTimeout: FiniteDuration) { val log = LoggerFactory.getLogger(classOf[SecondaryWatcherClaimManager]) // A claim on a dataset on the secondary manifest expires after claimTimeout. // // To maintain our claim on a dataset we will update the claimed_at timestamp every // updateInterval = claimTimeout / 4 < claimTimeout, therefore as we update our claims on a shorter interval // than that they timeout, as long as our instance is running we will not lose our claims. // // To ensure that we are actually updating the claimed_at timestamp on the interval we intend to, with another thread // we audit when we last updated the claimed_at timestamp every checkUpdateInterval = claimedTimeout / 2. // Since checkUpdateInterval < claimTimeout and > updateInterval, we will catch a failure to update our claims // frequently enough before we lose our claims, and then shutdown and abort work on claims that will be lost. // // This way we can protect ourselves from having multiple threads / processes mistakenly claiming the same // store-dataset pairs. val updateInterval = claimTimeout / 4 val checkUpdateInterval = claimTimeout / 2 // We update this value after each time we successfully update the claimed_at time. // A separate ScheduledExecutor thread checks that this var gets updated frequently enough. var lastUpdate = Long.MinValue // Note: the finished CountDownLatch that gets passed signals that all job processing is (should be) done def scheduleHealthCheck(finished: CountDownLatch): Unit = { SecondaryWatcherScheduledExecutor.schedule( checkFailed = finished.getCount() > 0 && lastUpdate + checkUpdateInterval.toMillis < System.currentTimeMillis(), name = "update claimed_at time", interval = checkUpdateInterval ) } def mainloop(finished: CountDownLatch): Unit = { var done = awaitEither(finished, updateInterval.toMillis) while(!done) { def retryingUpdate(failureTimeoutMillis: Long, timeoutMillis: Long = 100): Unit = { if (System.currentTimeMillis() < failureTimeoutMillis) { try { updateDatasetClaimedAtTime() lastUpdate = System.currentTimeMillis() log.debug("Updated claimed_at time successfully") } catch { case e: Exception => log.warn("Unexpected exception while updating claimedAt time for secondary sync jobs " + "claimed by watcherId " + claimantId.toString() + ". Going to retry...", e) // if we have been told to terminate we can just do that; otherwise retry if (!awaitEither(finished, timeoutMillis)) retryingUpdate(failureTimeoutMillis, 2 * timeoutMillis) } } else { // else: the scheduled executor will deal with this log.warn("Failed to update claimed_at time before timing-out.") } } val failureTimeoutMillis = System.currentTimeMillis() + updateInterval.toMillis retryingUpdate(failureTimeoutMillis) // initial timeout is .5 second with a backoff of 2 * timeout val remainingTimeMillis = math.max(failureTimeoutMillis - System.currentTimeMillis(), 0.toLong) done = awaitEither(finished, remainingTimeMillis) // await for the rest of the updateInterval } } private def awaitEither(finished: CountDownLatch, interval: Long): Boolean = { finished.await(interval, TimeUnit.MILLISECONDS) } private def updateDatasetClaimedAtTime(): Unit = { // TODO: We have a difficult to debug problem here if our connection pool isn't large enough to satisfy // all our workers, since it can block claim updates and result in claims being overwritten. // For now we are working around it by configuring a large pool and relying on worker config to control // concurrency. We could potentially have a separate pool for the claim manager. using(dsInfo.dataSource.getConnection()) { conn => using(conn.prepareStatement( s"""UPDATE secondary_manifest |SET claimed_at = CURRENT_TIMESTAMP |WHERE claimant_id = ? AND (dataset_system_id, store_id) IN ( | SELECT dataset_system_id, store_id FROM secondary_manifest | WHERE claimant_id = ?${SecondaryWatcherClaimManager.andInWorkingSetSQL} | ORDER BY dataset_system_id, store_id FOR UPDATE |)""".stripMargin)) { stmt => stmt.setObject(1, claimantId) stmt.setObject(2, claimantId) stmt.executeUpdate() } } } } object SecondaryWatcherScheduledExecutor { val log = LoggerFactory.getLogger(classOf[SecondaryWatcher[_,_]]) private val scheduler: ScheduledExecutorService = Executors.newScheduledThreadPool(1) def shutdown() = SecondaryWatcherScheduledExecutor.scheduler.shutdownNow() def schedule(checkFailed: => Boolean, name: String, interval: FiniteDuration): Unit = { val runnable = new Runnable() { def run(): Unit = { try { Thread.currentThread().setName("SecondaryWatcher scheduled health checker") log.debug("Running scheduled health check of: {}", name) if (checkFailed) { log.error("Failed scheduled health check of: {}; exiting. Increase claim-timeout in config for break/step when debugging.", name) sys.exit(1) } } catch { case e: Exception => log.error(s"Unexpected exception while attempting to run scheduled health check for: $name; exiting.", e) sys.exit(1) } } } try { // If any execution of the task encounters an exception, subsequent executions are suppressed. // Hopefully the try-catch we have in run() will catch this and exit. scheduler.scheduleAtFixedRate(runnable, interval.toMillis, interval.toMillis, TimeUnit.MILLISECONDS) } catch { case e: Exception => log.error(s"Unexpected exception while attempting to schedule health check for: $name; exiting.", e) sys.exit(1) } } } object SecondaryWatcher { type UniverseType[CT, CV] = Universe[CT, CV] with Commitable with PlaybackToSecondaryProvider with SecondaryManifestProvider with SecondaryMoveJobsProvider with SecondaryReplicationMessagesProvider with SecondaryStoresConfigProvider } object SecondaryWatcherApp { def apply(secondaryProvider: Config => Secondary[SoQLType, SoQLValue]) { val rootConfig = ConfigFactory.load() val config = new SecondaryWatcherConfig(rootConfig, "com.socrata.coordinator.secondary-watcher") PropertyConfigurator.configure(Propertizer("log4j", config.log4j)) val log = LoggerFactory.getLogger(classOf[SecondaryWatcher[_,_]]) log.info(s"Starting secondary watcher with watcher claim uuid of ${config.watcherId}") val metricsOptions = config.metrics Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() { def uncaughtException(t: Thread, e: Throwable): Unit = { log.error(s"Uncaught exception in thread ${t.getName}, exiting", e) sys.exit(1) } }) for { dsInfo <- DataSourceFromConfig(config.database) reporter <- MetricsReporter.managed(metricsOptions) curator <- CuratorFromConfig(config.curator) } { val secondaries = config.secondaryConfig.instances.keysIterator.map { instanceName => instanceName -> secondaryProvider(config.secondaryConfig.instances(instanceName).config) }.toMap val executor = Executors.newCachedThreadPool() val collocationLockPath = s"/${config.discovery.name}/${config.collocation.lockPath}" val collocationLock = new CuratedCollocationLock(curator, collocationLockPath) val common = new SoQLCommon( dsInfo.dataSource, dsInfo.copyIn, executor, _ => None, new LoggedTimingReport(log) with StackedTimingReport with MetricsTimingReport with TaggableTimingReport, allowDdlOnPublishedCopies = false, // don't care, Duration.fromNanos(1L), // don't care config.instance, config.tmpdir, Duration.fromNanos(1L), // don't care Duration.fromNanos(1L), // don't care //Duration.fromNanos(1L), NullCache ) val messageProducerExecutor = Executors.newCachedThreadPool() val messageProducer = MessageProducerFromConfig(config.watcherId, messageProducerExecutor, config.messageProducerConfig) messageProducer.start() val w = new SecondaryWatcher(common.universe, config.watcherId, config.claimTimeout, config.backoffInterval, config.replayWait, config.maxReplayWait, config.maxRetries, config.maxReplays.getOrElse(Integer.MAX_VALUE), common.timingReport, messageProducer, collocationLock, config.collocation.lockTimeout) val cm = new SecondaryWatcherClaimManager(dsInfo, config.watcherId, config.claimTimeout) val SIGTERM = new Signal("TERM") val SIGINT = new Signal("INT") /** Flags when we want to start shutting down, don't process new work */ val initiateShutdown = new CountDownLatch(1) /** Flags when we have stopped processing work and are ready to actually shutdown */ val completeShutdown = new CountDownLatch(1) val signalHandler = new SignalHandler { val firstSignal = new java.util.concurrent.atomic.AtomicBoolean(true) def handle(signal: Signal): Unit = { log.info("Signalling shutdown") initiateShutdown.countDown() } } var oldSIGTERM: SignalHandler = null var oldSIGINT: SignalHandler = null try { log.info("Hooking SIGTERM and SIGINT") oldSIGTERM = Signal.handle(SIGTERM, signalHandler) oldSIGINT = Signal.handle(SIGINT, signalHandler) val claimTimeManagerThread = new Thread { setName("SecondaryWatcher claim time manager") override def run(): Unit = { cm.scheduleHealthCheck(completeShutdown) cm.mainloop(completeShutdown) } } val workerThreads = using(dsInfo.dataSource.getConnection()) { conn => val cfg = new SqlSecondaryStoresConfig(conn, common.timingReport) secondaries.iterator.flatMap { case (name, secondary) => cfg.lookup(name).map { info => w.cleanOrphanedJobs(info) 1 to config.secondaryConfig.instances(name).numWorkers map { n => new Thread { setName(s"Worker $n for secondary $name") override def run(): Unit = { w.mainloop(info, secondary, initiateShutdown) } } } }.orElse { log.warn("Secondary {} is defined, but there is no record in the secondary config table", name) None } }.toList.flatten } claimTimeManagerThread.start() workerThreads.foreach(_.start()) log.info("Going to sleep...") initiateShutdown.await() log.info("Waiting for worker threads to stop...") workerThreads.foreach(_.join()) // Can't shutdown claim time manager until workers stop or their jobs might be stolen log.info("Shutting down claim time manager...") completeShutdown.countDown() claimTimeManagerThread.join() log.info("Shutting down scheduled health checker...") SecondaryWatcherScheduledExecutor.shutdown() } finally { log.info("Shutting down message producer...") messageProducer.shutdown() messageProducerExecutor.shutdown() log.info("Un-hooking SIGTERM and SIGINT") if(oldSIGTERM != null) Signal.handle(SIGTERM, oldSIGTERM) if(oldSIGTERM != null) Signal.handle(SIGINT, oldSIGINT) } secondaries.values.foreach(_.shutdown()) executor.shutdown() } } }
socrata-platform/data-coordinator
secondarylib/src/main/scala/com/socrata/datacoordinator/secondary/SecondaryWatcher.scala
Scala
apache-2.0
29,533
/* * Copyright 2019 ACINQ SAS * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package fr.acinq.eclair import akka.actor.{Actor, FSM} import akka.event.{DiagnosticLoggingAdapter, LoggingAdapter} /** * A version of akka.actor.DiagnosticActorLogging compatible with an FSM * See https://groups.google.com/forum/#!topic/akka-user/0CxR8CImr4Q */ trait FSMDiagnosticActorLogging[S, D] extends FSM[S, D] { import akka.event.Logging._ val diagLog: DiagnosticLoggingAdapter = akka.event.Logging(this) def mdc(currentMessage: Any): MDC = emptyMDC override def log: LoggingAdapter = diagLog override def aroundReceive(receive: Actor.Receive, msg: Any): Unit = try { diagLog.mdc(mdc(msg)) super.aroundReceive(receive, msg) } finally { diagLog.clearMDC() } }
ACINQ/eclair
eclair-core/src/main/scala/fr/acinq/eclair/FSMDiagnosticActorLogging.scala
Scala
apache-2.0
1,301
import scala.reflect.macros.whitebox._ import scala.language.experimental.macros class Macros(val c: Context) { def impl = ??? } object Macros { def impl(c: Context) = ??? } object Test extends App { def foo: Unit = macro Macros.impl }
felixmulder/scala
test/files/neg/macro-bundle-ambiguous.scala
Scala
bsd-3-clause
244
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.planner.plan.optimize.program import org.apache.flink.table.planner.calcite.RelTimeIndicatorConverter import org.apache.flink.util.Preconditions import org.apache.calcite.rel.RelNode /** * A FlinkOptimizeProgram that deals with time. * * @tparam OC OptimizeContext */ class FlinkRelTimeIndicatorProgram[OC <: FlinkOptimizeContext] extends FlinkOptimizeProgram[OC] { override def optimize(input: RelNode, context: OC): RelNode = { val rexBuilder = Preconditions.checkNotNull(context.getRexBuilder) RelTimeIndicatorConverter.convert(input, rexBuilder, context.needFinalTimeIndicatorConversion) } }
tillrohrmann/flink
flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/optimize/program/FlinkRelTimeIndicatorProgram.scala
Scala
apache-2.0
1,459
package controllers.admin import java.io.File import global.Global import ingest.harvest.GazetteerImporter import models.geo.{ Gazetteers, Gazetteer } import play.api.db.slick._ import play.api.mvc.Controller import play.api.Logger import play.api.libs.Files import play.api.libs.json.Json import play.api.libs.concurrent.Execution.Implicits._ object GazetteerAdminController extends BaseUploadController with Secured { def index = adminAction { username => implicit requestWithSession => Ok(views.html.admin.gazetteers()) } def deleteGazetteer(name: String) = adminAction { username => implicit requestWithSession => val gazetteer = Gazetteers.findByName(name) if (gazetteer.isDefined) { Logger.info("Deleting gazetteer: " + name) Gazetteers.delete(gazetteer.get.name) Global.index.deleteGazetter(gazetteer.get.name.toLowerCase) Logger.info("Done.") Status(200) } else { NotFound } } def uploadGazetteerDump = adminAction { username => implicit requestWithSession => val json = requestWithSession.request.body.asJson if (json.isDefined) { val url = (json.get \\ "url").as[String] Logger.info("Importing from " + url + " not implemented yet") // TODO implement! Ok(Json.parse("{ \\"message\\": \\"Not implemented yet.\\" }")) } else { processUpload("rdf", requestWithSession, { filepart => { // Original name of the uploaded file val filename = filepart.filename val gazetteerName = filename.substring(0, filename.indexOf(".")) // Play apparently removes the file after first read... But ingest will // need to read the file twice (once to count the places, second to import // them) so we create a copy here val tempFile = filepart.ref.file val copy = new File(tempFile.getAbsolutePath + "_cp") Files.copyFile(tempFile, copy, true, true) val importer = new GazetteerImporter(Global.index) val future = importer.importDataFileAsync(copy.getAbsolutePath, gazetteerName, Some(filename)) future.onComplete(_ => { Logger.info("Deleting file " + copy.getAbsolutePath) copy.delete() }) Redirect(routes.GazetteerAdminController.index).flashing("success" -> { "Import in progress." }) }}) } } }
pelagios/peripleo
app/controllers/admin/GazetteerAdminController.scala
Scala
gpl-3.0
2,420
package io.getquill.context.async.mysql import java.util.concurrent.ConcurrentLinkedQueue import scala.jdk.CollectionConverters._ import scala.concurrent.ExecutionContext.Implicits.global import scala.math.BigDecimal.int2bigDecimal import io.getquill.context.sql.QueryResultTypeSpec class QueryResultTypeMysqlAsyncSpec extends QueryResultTypeSpec { val context = testContext import testContext._ val insertedProducts = new ConcurrentLinkedQueue[Product] override def beforeAll = { await(testContext.run(deleteAll)) val ids = await(testContext.run(liftQuery(productEntries).foreach(e => productInsert(e)))) val inserted = (ids zip productEntries).map { case (id, prod) => prod.copy(id = id) } insertedProducts.addAll(inserted.asJava) () } def products = insertedProducts.asScala.toList "return list" - { "select" in { await(testContext.run(selectAll)) must contain theSameElementsAs (products) } "map" in { await(testContext.run(map)) must contain theSameElementsAs (products.map(_.id)) } "filter" in { await(testContext.run(filter)) must contain theSameElementsAs (products) } "withFilter" in { await(testContext.run(withFilter)) must contain theSameElementsAs (products) } "sortBy" in { await(testContext.run(sortBy)) must contain theSameElementsInOrderAs (products) } "take" in { await(testContext.run(take)) must contain theSameElementsAs (products) } "drop" in { await(testContext.run(drop)) must contain theSameElementsAs (products.drop(1)) } "++" in { await(testContext.run(`++`)) must contain theSameElementsAs (products ++ products) } "unionAll" in { await(testContext.run(unionAll)) must contain theSameElementsAs (products ++ products) } "union" in { await(testContext.run(union)) must contain theSameElementsAs (products) } "join" in { await(testContext.run(join)) must contain theSameElementsAs (products zip products) } "distinct" in { await(testContext.run(distinct)) must contain theSameElementsAs (products.map(_.id).distinct) } } "return single result" - { "min" - { "some" in { await(testContext.run(minExists)) mustEqual Some(products.map(_.sku).min) } "none" in { await(testContext.run(minNonExists)) mustBe None } } "max" - { "some" in { await(testContext.run(maxExists)) mustBe Some(products.map(_.sku).max) } "none" in { await(testContext.run(maxNonExists)) mustBe None } } "avg" - { "some" in { await(testContext.run(avgExists)) mustBe Some(BigDecimal(products.map(_.sku).sum) / products.size) } "none" in { await(testContext.run(avgNonExists)) mustBe None } } "size" in { await(testContext.run(productSize)) mustEqual products.size } "parametrized size" in { await(testContext.run(parametrizedSize(lift(10000)))) mustEqual 0 } "nonEmpty" in { await(testContext.run(nonEmpty)) mustEqual true } "isEmpty" in { await(testContext.run(isEmpty)) mustEqual false } } }
getquill/quill
quill-async-mysql/src/test/scala/io/getquill/context/async/mysql/QueryResultTypeMysqlAsyncSpec.scala
Scala
apache-2.0
3,215
def isSorted[A](as: Array[A],ls:(A,A) => Boolean): Boolean={ def check(index: Int,nextIndex: Int): Boolean={ if(nextIndex==as.length)true else if(!ls(as(index),as(nextIndex))) false else check(nextIndex,nextIndex+1) } check(0,1) }
iUwej/functional-programming-in-scala-solutions
exer2.scala
Scala
unlicense
248
/* Copyright (C) 2008-2016 University of Massachusetts Amherst. This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible) http://factorie.cs.umass.edu, http://github.com/factorie Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cc.factorie import java.io._ import org.junit.Test /** * User: apassos * Date: 10/10/12 */ class TestExamples { val emptyArgs = Array[String]() // Returns the name of a new temporary file with the specified def dummyFileWithContents(prefix: String, content: String): String = { val name = java.io.File.createTempFile("FactorieTestFile", prefix).getAbsolutePath val writer = new BufferedWriter(new FileWriter(name)) writer.write(content) writer.close() name } def dummyDirectoryWithFileWithContents(prefix: String, content: String, ext: String = ".txt"): String = { val dir = java.io.File.createTempFile("FactorieTestFile", prefix) new File(dir.getAbsolutePath + "2").mkdirs() val n1 = dir.getAbsolutePath + "2" + java.io.File.separator + "f1" + ext val writer = new BufferedWriter(new FileWriter(n1)) writer.write(content) writer.close() dir.getAbsolutePath+"2" } val dummyNERFile = dummyFileWithContents("train", "A NN C I-PER\\nA NNS D O\\nA NNP C I-LOC") @Test def testChainNER1ML() { cc.factorie.tutorial.ChainNERExample.main(Array(dummyNERFile, dummyNERFile)) } @Test def testDirichletDemo() { cc.factorie.tutorial.DirichletDemo.main(emptyArgs) } val dummyDir1 = dummyDirectoryWithFileWithContents("documentDir1", "I am a file\\n") val dummyDir2 = dummyDirectoryWithFileWithContents("documentDir2", "I am a other file\\n") @Test def testDocumentClassifier1() { cc.factorie.tutorial.DocumentClassifier1.main(Array(dummyDir1, dummyDir2)) } val posFile = dummyFileWithContents("POS", "\\nHello NN\\nWorld NN\\n") @Test def testForwardBackwardPOS() { cc.factorie.tutorial.ForwardBackwardPOS.main(Array("--train", posFile, "--dev", posFile, "--test", posFile)) } @Test def testGaussianDemo() { cc.factorie.tutorial.GaussianDemo.main(emptyArgs) } @Test def testGaussianMixtureDemo() { cc.factorie.tutorial.GaussianMixtureDemo.main(emptyArgs) } @Test def testMultivariateGaussianDemo() { cc.factorie.tutorial.MultivariateGaussianDemo.main(emptyArgs) } @Test def testMultivariateGaussianMixtureDemo() { cc.factorie.tutorial.MultivariateGaussianMixtureDemo.main(emptyArgs) } @Test def testGrid() { cc.factorie.tutorial.Grid.main(emptyArgs) } @Test def testSimpleLDA() { cc.factorie.tutorial.SimpleLDA.main(Array(dummyDir1)) } @Test def testEfficientLDA() { cc.factorie.tutorial.EfficientLDA.main(Array(dummyDir1)) } @Test def testTopicsOverTime() { cc.factorie.tutorial.TopicsOverTime.main(Array(dummyDir1, dummyDir2)) } @Test def testMultinomialDemo() { cc.factorie.tutorial.MultinomialDemo.main(emptyArgs) } @Test def testTutorialVariables() { cc.factorie.tutorial.TutorialVariables.main(emptyArgs) } @Test def testTutorialDomain() { cc.factorie.tutorial.TutorialDomain.main(emptyArgs) } @Test def testTutorialFactors() { cc.factorie.tutorial.TutorialFactors.main(emptyArgs) } @Test def testTutorialFamily() { cc.factorie.tutorial.TutorialFamily.main(emptyArgs) } @Test def testTutorialModel() { cc.factorie.tutorial.TutorialModel.main(emptyArgs) } @Test def testTutorialLearning() { cc.factorie.tutorial.TutorialLearning.main(emptyArgs) } @Test def testTutorialParallelismAndHyperparameters() { cc.factorie.tutorial.TutorialParallelismAndHyperparameters.main(emptyArgs) } @Test def testWordSegmenter() { cc.factorie.tutorial.WordSegmenter.main(emptyArgs) } }
strubell/factorie
src/test/scala/cc/factorie/TestExamples.scala
Scala
apache-2.0
4,275
package list import list.P09.pack /** * P10 (*) Run-length encoding of a list. */ object P10 { def encode[A](list: List[A]): List[(Int, A)] = pack(list) map { e => (e.length, e.head) } }
zjt1114/scala99
src/main/scala/list/P10.scala
Scala
apache-2.0
197
package skinny.micro.cookie import javax.servlet.http.{ HttpServletRequest, HttpServletResponse } import skinny.micro.implicits.ServletApiImplicits import scala.collection.mutable /** * Extended cookie object. */ class SweetCookies( private[this] val request: HttpServletRequest, private[this] val response: HttpServletResponse) extends ServletApiImplicits { private[this] lazy val cookies = mutable.HashMap[String, String]() ++ request.cookies def get(key: String): Option[String] = cookies.get(key) def apply(key: String): String = { cookies.get(key) getOrElse (throw new Exception("No cookie could be found for the specified key")) } def update(name: String, value: String)( implicit cookieOptions: CookieOptions = CookieOptions()): Cookie = { cookies += name -> value addCookie(name, value, cookieOptions) } def set(name: String, value: String)( implicit cookieOptions: CookieOptions = CookieOptions()): Cookie = { this.update(name, value)(cookieOptions) } def delete(name: String)(implicit cookieOptions: CookieOptions = CookieOptions()): Unit = { cookies -= name addCookie(name, "", cookieOptions.copy(maxAge = 0)) } def +=(keyValuePair: (String, String))( implicit cookieOptions: CookieOptions = CookieOptions()): Unit = { this.update(keyValuePair._1, keyValuePair._2)(cookieOptions) } def +=(cookie: Cookie): Unit = { this.update(cookie.name, cookie.value)(cookie.options) } def ++=(cookies: Seq[Cookie]): Unit = { cookies.foreach { cookie => this.update(cookie.name, cookie.value)(cookie.options) } } def -=(key: String)(implicit cookieOptions: CookieOptions = CookieOptions()): Unit = { delete(key)(cookieOptions) } private def addCookie(name: String, value: String, options: CookieOptions): Cookie = { val cookie = new Cookie(name, value)(options) response.addCookie(cookie) cookie } }
xerial/skinny-micro
micro/src/main/scala/skinny/micro/cookie/SweetCookies.scala
Scala
bsd-2-clause
1,937
/* * Copyright 2016 rdbc contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.rdbc.sapi.exceptions class MissingColumnException(val column: String, maybeCause: Option[Throwable] = None) extends RdbcException( s"Requested column '$column' is not present in the row", maybeCause )
rdbc-io/rdbc
rdbc-api-scala/src/main/scala/io/rdbc/sapi/exceptions/MissingColumnException.scala
Scala
apache-2.0
827
package nz.ubermouse.rsbot.actions import com.epicbot.api.rs3.methods.interactive.Players import com.epicbot.api.rs3.methods.{Walking, Calculations} import com.epicbot.api.rs3.wrappers.Locatable import nz.ubermouse.rsbot.helpers.Wait class MoveTo(location: Locatable) extends Action { override def run: Boolean = { if(Walking.getDestination.equals(location.getLocation)) return false if(Calculations.isOnScreen(location.getLocation.getCentralPoint)) Walking.walk(location.getLocation.randomize(5, 5)) else location.getLocation.clickOnMap() Wait.until()(Players.getLocal.isMoving) } }
UberMouse/RSArchitectureTest
src/nz/ubermouse/rsbot/actions/MoveTo.scala
Scala
mit
626
/** * This file is part of mycollab-web. * * mycollab-web is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * mycollab-web is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with mycollab-web. If not, see <http://www.gnu.org/licenses/>. */ package com.esofthead.mycollab.module.project.view.parameters import com.esofthead.mycollab.vaadin.mvp.ScreenData /** * @author MyCollab Ltd. * @since 5.0.3 */ object FileScreenData { class GotoDashboard extends ScreenData {} }
uniteddiversity/mycollab
mycollab-web/src/main/scala/com.esofthead.mycollab.module.project.view.parameters/FileScreenData.scala
Scala
agpl-3.0
934
/* ************************************************************************************* * Copyright 2013 Normation SAS ************************************************************************************* * * This file is part of Rudder. * * Rudder is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * In accordance with the terms of section 7 (7. Additional Terms.) of * the GNU General Public License version 3, the copyright holders add * the following Additional permissions: * Notwithstanding to the terms of section 5 (5. Conveying Modified Source * Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General * Public License version 3, when you create a Related Module, this * Related Module is not considered as a part of the work and may be * distributed under the license agreement of your choice. * A "Related Module" means a set of sources files including their * documentation that, without modification of the Source Code, enables * supplementary functions or services in addition to those offered by * the Software. * * Rudder is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Rudder. If not, see <http://www.gnu.org/licenses/>. * ************************************************************************************* */ package com.normation.rudder.web.components.popup import bootstrap.liftweb.RudderConfig import net.liftweb.common._ import net.liftweb.http.DispatchSnippet import net.liftweb.util._ import net.liftweb.util.Helpers._ import scala.xml._ import net.liftweb.http._ import net.liftweb.http.js._ import net.liftweb.http.js.JsCmds._ import net.liftweb.http.js.JE._ import com.normation.rudder.domain.parameters._ import com.normation.eventlog.ModificationId import org.joda.time.DateTime import com.normation.rudder.web.model.CurrentUser import com.normation.rudder.web.model.{ WBTextField, FormTracker, WBTextAreaField, WBRadioField } import java.util.regex.Pattern import CreateOrUpdateGlobalParameterPopup._ import com.normation.rudder.domain.workflows.ChangeRequestId class CreateOrUpdateGlobalParameterPopup( parameter : Option[GlobalParameter] , workflowEnabled : Boolean , action : String //one among: create, save or delete , onSuccessCallback : (Either[GlobalParameter,ChangeRequestId]) => JsCmd = { x => Noop } , onFailureCallback : () => JsCmd = { () => Noop } ) extends DispatchSnippet with Loggable { private[this] val roParameterService = RudderConfig.roParameterService private[this] val woParameterService = RudderConfig.woParameterService private[this] val uuidGen = RudderConfig.stringUuidGenerator private[this] val userPropertyService= RudderConfig.userPropertyService private[this] val woChangeRequestRepo = RudderConfig.woChangeRequestRepository private[this] val changeRequestService = RudderConfig.changeRequestService private[this] val workflowService = RudderConfig.workflowService def dispatch = { case "popupContent" => { _ => popupContent } } /* Text variation for * - Global Parameter * - Create, delete, modify (save) */ private def titles = Map( "delete" -> "Delete a Global Parameter" , "save" -> "Update a Global Parameter" , "create" -> "Create a Global Parameter" ) private[this] val titleWorkflow = workflowEnabled match { case true => <h4 class="col-lg-12 col-sm-12 col-xs-12 audit-title">Change Request</h4> <hr class="css-fix"/> <div class="text-center alert alert-info"> <span class="glyphicon glyphicon-info-sign"></span> Workflows are enabled, your change has to be validated in a Change request </div> case false => NodeSeq.Empty } private[this] def globalParamDiffFromAction(newParameter : GlobalParameter): Box[ChangeRequestGlobalParameterDiff] = { parameter match { case None => if ((action == "save") || (action == "create")) Full(AddGlobalParameterDiff(newParameter)) else Failure(s"Action ${action} is not possible on a new Global Parameter") case Some(d) => action match { case "delete" => Full(DeleteGlobalParameterDiff(d)) case "save" | "create" => Full(ModifyToGlobalParameterDiff(newParameter)) case _ => Failure(s"Action ${action} is not possible on a existing Global Parameter") } } } private[this] def onSubmit() : JsCmd = { if(formTracker.hasErrors) { onFailure } else { val newParameter = new GlobalParameter( name = ParameterName(parameterName.get), value = parameterValue.get, description = parameterDescription.get, overridable = parameterOverridable ) val savedChangeRequest = { for { diff <- globalParamDiffFromAction(newParameter) cr <- changeRequestService.createChangeRequestFromGlobalParameter( changeRequestName.get , paramReasons.map( _.get ).getOrElse("") , newParameter , parameter , diff , CurrentUser.getActor , paramReasons.map( _.get ) ) wfStarted <- workflowService.startWorkflow(cr.id, CurrentUser.getActor, paramReasons.map(_.get)) } yield { cr.id } } savedChangeRequest match { case Full(cr) => if (workflowEnabled) { // TODO : do more than that closePopup() & onSuccessCallback(Right(cr)) } else closePopup() & onSuccessCallback(Left(newParameter)) case eb:EmptyBox => logger.error("An error occurred while updating the parameter") formTracker.addFormError(error("An error occurred while updating the parameter")) onFailure } } } private[this] def onFailure: JsCmd = { formTracker.addFormError(error("The form contains some errors, please correct them")) updateFormClientSide() } private[this] def closePopup() : JsCmd = { JsRaw("""$('#createGlobalParameterPopup').bsModal('hide');""") } /** * Update the form when something happened */ private[this] def updateFormClientSide() : JsCmd = { SetHtml(htmlId_popupContainer, popupContent()) } private[this] def updateAndDisplayNotifications(formTracker : FormTracker) : NodeSeq = { val notifications = formTracker.formErrors formTracker.cleanErrors if(notifications.isEmpty) { NodeSeq.Empty } else { <div id="notifications" class="alert alert-danger text-center col-lg-12 col-xs-12 col-sm-12" role="alert"><ul class="text-danger">{notifications.map( n => <li>{n}</li>) }</ul> </div> } } ////////////////////////// fields for form //////////////////////// private[this] val patternName = Pattern.compile("[a-zA-Z0-9_]+"); private[this] val parameterName = new WBTextField("Name", parameter.map(_.name.value).getOrElse("")) { override def setFilter = notNull _ :: trim _ :: Nil override def errorClassName = "col-lg-12 errors-container" override def inputField = (parameter match { case Some(entry) => super.inputField % ("disabled" -> "true") case None => super.inputField }) % ("onkeydown" , "return processKey(event , 'createParameterSaveButton')") % ("tabindex","1") override def validations = valMinLen(3, "The name must have at least 3 characters") _ :: valRegex(patternName, "The name can contain only letters, digits and underscore") _ :: Nil } // The value may be empty private[this] val parameterValue = new WBTextAreaField("Value", parameter.map(_.value).getOrElse("")) { override def setFilter = trim _ :: Nil override def inputField = ( action match { case "delete" => super.inputField % ("disabled" -> "true") case _ => super.inputField }) % ("style" -> "height:4em") % ("tabindex","2") override def errorClassName = "col-lg-12 errors-container" override def validations = Nil } private[this] val parameterDescription = new WBTextAreaField("Description", parameter.map(_.description).getOrElse("")) { override def setFilter = notNull _ :: trim _ :: Nil override def inputField =( action match { case "delete" => super.inputField % ("disabled" -> "true") case _ => super.inputField }) % ("tabindex","3") override def errorClassName = "col-lg-12 errors-container" override def validations = Nil } private[this] val defaultActionName = Map ( "save" -> "Update" , "create" -> "Create" , "delete" -> "Delete" )(action) private[this] val defaultClassName = Map ( "save" -> "btn-success" , "create" -> "btn-success" , "delete" -> "btn-danger" )(action) private[this] val defaultRequestName = s"${defaultActionName} Global Parameter " + parameter.map(_.name.value).getOrElse("") private[this] val changeRequestName = new WBTextField("Change request title", defaultRequestName) { override def setFilter = notNull _ :: trim _ :: Nil override def errorClassName = "col-lg-12 errors-container" override def inputField = super.inputField % ("onkeydown" , "return processKey(event , 'createDirectiveSaveButton')") % ("tabindex","4") override def validations = valMinLen(3, "The name must have at least 3 characters") _ :: Nil } val parameterOverridable = true private[this] val paramReasons = { import com.normation.rudder.web.services.ReasonBehavior._ userPropertyService.reasonsFieldBehavior match { case Disabled => None case Mandatory => Some(buildReasonField(true, "subContainerReasonField")) case Optionnal => Some(buildReasonField(false, "subContainerReasonField")) } } def buildReasonField(mandatory:Boolean, containerClass:String = "twoCol") = { new WBTextAreaField("Change audit message", "") { override def setFilter = notNull _ :: trim _ :: Nil override def inputField = super.inputField % ("style" -> "height:5em;") % ("tabindex","5") % ("placeholder" -> {userPropertyService.reasonsFieldExplanation}) override def errorClassName = "col-lg-12 errors-container" override def validations() = { if(mandatory){ valMinLen(5, "The reason must have at least 5 characters.") _ :: Nil } else { Nil } } } } private[this] val formTracker = { val fields = parameterName :: parameterValue :: paramReasons.toList ::: { if (workflowEnabled) changeRequestName :: Nil else Nil } new FormTracker(fields) } private[this] var notifications = List.empty[NodeSeq] private[this] def error(msg:String) = Text(msg) def popupContent() = { val (buttonName, classForButton) = workflowEnabled match { case true => ("Open Request", "wideButton btn-primary") case false => (defaultActionName, defaultClassName) } ( "#title *" #> titles(action) & ".name" #> parameterName.toForm_! & ".value" #> parameterValue.toForm_! & ".description *" #> parameterDescription.toForm_! & "#titleWorkflow *" #> titleWorkflow & "#changeRequestName" #> { if (workflowEnabled) { changeRequestName.toForm } else Full(NodeSeq.Empty) } & "#delete *" #> { if (action=="delete") { <div class="row"> </div> } else { NodeSeq.Empty } } & ".itemReason *" #> { //if (buttonName=="Delete") paramReasons.map { f => <div> {if (!workflowEnabled) { <h4 class="col-lg-12 col-sm-12 col-xs-12 audit-title">Change Audit Log</h4> }} {f.toForm_!} </div> } } & "#cancel" #> (SHtml.ajaxButton("Cancel", { () => closePopup() }) % ("tabindex","6") % ("class","btn btn-default") ) & "#save" #> (SHtml.ajaxSubmit( buttonName, onSubmit _) % ("id","createParameterSaveButton") % ("tabindex","5") % ("class", s"btn ${classForButton}")) andThen ".notifications *" #> { updateAndDisplayNotifications(formTracker) } ).apply(formXml()) } private[this] def formXml() : NodeSeq = { SHtml.ajaxForm( <div id="paramForm" class="modal-backdrop fade in" style="height: 100%;"></div> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <div class="close" data-dismiss="modal"> <span aria-hidden="true">&times;</span> <span class="sr-only">Close</span> </div> <h4 class="modal-title" id="title">Here come title</h4> </div> <div class="modal-body"> <div class="notifications">Here comes validation messages</div> <div class="name"/> <div class="value"/> <div class="description"/> <div id="changeRequestZone"> <div id="titleWorkflow"/> <input type="text" id="changeRequestName" /> </div> <div class="itemReason"/> <div id="delete" /> </div> <div class="modal-footer"> <div id="cancel"/> <div id="save"/> </div> </div><!-- /.modal-content --> </div><!-- /.modal-dialog --> ) } } object CreateOrUpdateGlobalParameterPopup { val htmlId_popupContainer = "createGlobalParameterContainer" val htmlId_popup = "createGlobalParameterPopup" }
armeniaca/rudder
rudder-web/src/main/scala/com/normation/rudder/web/components/popup/CreateOrUpdateGlobalParameterPopup.scala
Scala
gpl-3.0
14,182
package fr.inria.spirals.sigma.ttc14.fixml import fr.inria.spirals.sigma.ttc14.fixml.objlang.support.ObjLang import fr.inria.spirals.sigma.ttc14.fixml.objlang.support.ObjLang._objlang._ trait ObjLang2CPP extends BaseObjLangMTT { var cppHeaderSuffix = "h" implicit class ClassCPPUtil(that: Classifier) { def cppHeaderFile = s"${that.name}.${cppHeaderSuffix}" } override def toCode(e: NullLiteral) = "NULL" override def type2Code(e: TypedElement): String = (e.type_.isInstanceOf[Class], e.many) match { case (true, false) => class2Code(e.type_) + "*" case (true, true) => class2Code(e.type_) + "**" case (false, true) => class2Code(e.type_) + "[]" case (false, false) => class2Code(e.type_) } override def toCode(a: ArrayLiteral): String = s"new ${class2Code(a.type_)}*[${a.elements.size}] { ${a.elements map (toCode) mkString (", ")} }" override def class2Code(p: DataType) = "std::string" }
fikovnik/ttc14-fixml-sigma
ttc14-fixml-base/src/fr/inria/spirals/sigma/ttc14/fixml/ObjLang2CPP.scala
Scala
epl-1.0
938
package lila.importer import akka.actor.ActorRef import chess.Color import chess.format.UciMove import lila.game.{ Game, Player, Source, GameRepo, Pov } import lila.hub.actorApi.map.Tell import lila.round.actorApi.round._ import scala.concurrent.duration._ final class Live( roundMap: ActorRef) { def create = { val variant = chess.variant.Standard val g = Game.make( game = chess.Game(variant), whitePlayer = Player.white, blackPlayer = Player.black, mode = chess.Mode.Casual, variant = variant, source = Source.ImportLive, pgnImport = none).start GameRepo insertDenormalized g inject g } def move(id: String, move: String, ip: String) = GameRepo game id flatMap { _ filter (g => g.playable && g.imported) match { case None => fufail("No such playing game: " + id) case Some(game) => UciMove(move) match { case None => move match { case "1-0" => fuccess { roundMap ! Tell(game.id, Resign(game.blackPlayer.id)) } case "0-1" => fuccess { roundMap ! Tell(game.id, Resign(game.whitePlayer.id)) } case "1/2-1/2" => fuccess { roundMap ! Tell(game.id, DrawForce) } case m => fufail("Invalid move: " + m) } case Some(uci) => fuccess { applyMove(Pov(game, game.player.color), uci, ip) } } } } private def applyMove(pov: Pov, move: UciMove, ip: String) { roundMap ! Tell(pov.gameId, HumanPlay( playerId = pov.playerId, ip = ip, orig = move.orig.toString, dest = move.dest.toString, prom = move.promotion map (_.name), blur = false, lag = 0.millis )) } }
terokinnunen/lila
modules/importer/src/main/Live.scala
Scala
mit
1,790
///* // * Copyright 2017 Datamountaineer. // * // * Licensed under the Apache License, Version 2.0 (the "License"); // * you may not use this file except in compliance with the License. // * You may obtain a copy of the License at // * // * http://www.apache.org/licenses/LICENSE-2.0 // * // * Unless required by applicable law or agreed to in writing, software // * distributed under the License is distributed on an "AS IS" BASIS, // * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // * See the License for the specific language governing permissions and // * limitations under the License. // */ // //// If only the Apache Hadoop will get to refresh dependencies! guava 11.0.2 // //package com.datamountaineer.streamreactor.connect.hbase.writers // //import com.datamountaineer.streamreactor.connect.hbase.BytesHelper._ //import com.datamountaineer.streamreactor.connect.hbase.config.{ // ConfigurationBuilder, // HBaseConfig, // HBaseConfigConstants, // HBaseSettings //} //import com.datamountaineer.streamreactor.connect.hbase.{ // FieldsValuesExtractor, // HbaseHelper, // HbaseTableHelper, // StructFieldsRowKeyBuilderBytes //} //import org.apache.hadoop.hbase.HBaseTestingUtility //import org.apache.hadoop.hbase.client.Connection //import org.apache.hadoop.hbase.util.Bytes //import org.apache.kafka.connect.data.{Schema, SchemaBuilder, Struct} //import org.apache.kafka.connect.errors.RetriableException //import org.apache.kafka.connect.sink.SinkRecord //import org.mockito.MockitoSugar //import org.scalatest.BeforeAndAfterAll //import org.scalatest.matchers.should.Matchers //import org.scalatest.wordspec.AnyWordSpec // //import scala.collection.JavaConverters._ // //class HbaseWriterTest // extends AnyWordSpec // with Matchers // with MockitoSugar // with BeforeAndAfterAll { // // var miniCluster: HBaseTestingUtility = _ // // override def beforeAll() { // miniCluster = new HBaseTestingUtility() // miniCluster.startMiniCluster() // } // // override def afterAll() {} // // "HbaseWriter" should { // // "write an Hbase row for each SinkRecord provided using StructFieldsRowKeyBuilderBytes" in { // // val fieldsExtractor = mock[FieldsValuesExtractor] // val rowKeyBuilder = mock[StructFieldsRowKeyBuilderBytes] // val tableName = "someTable" // val topic = "someTopic" // val columnFamily = "somecolumnFamily" // // val QUERY_ALL = // s"INSERT INTO $tableName SELECT * FROM $topic PK firstName" // val props = Map( // HBaseConfigConstants.KCQL_QUERY -> QUERY_ALL, // HBaseConfigConstants.COLUMN_FAMILY -> "somecolumnFamily" // ).asJava // // val config = HBaseConfig(props) // val settings = HBaseSettings(config) // val hbaseConfig = ConfigurationBuilder.buildHBaseConfig(settings) // // val writer = new HbaseWriter(settings, hbaseConfig) // // val schema = SchemaBuilder // .struct() // .name("com.example.Person") // .field("firstName", Schema.STRING_SCHEMA) // .field("age", Schema.INT32_SCHEMA) // .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA) // .build() // // val struct1 = new Struct(schema).put("firstName", "Alex").put("age", 30) // val struct2 = new Struct(schema) // .put("firstName", "Mara") // .put("age", 22) // .put("threshold", 12.4) // // val sinkRecord1 = new SinkRecord(topic, 1, null, null, schema, struct1, 0) // val sinkRecord2 = new SinkRecord(topic, 1, null, null, schema, struct2, 1) // // when(rowKeyBuilder.build(sinkRecord1, null)).thenReturn(10.fromInt()) // when(rowKeyBuilder.build(sinkRecord2, null)).thenReturn(11.fromInt()) // // when(fieldsExtractor.get(struct1)).thenReturn( // Seq("firstName" -> "Alex".fromString(), "age" -> 30.fromInt())) // when(fieldsExtractor.get(struct2)).thenReturn( // Seq("firstName" -> "Mara".fromString(), // "age" -> 22.fromInt(), // "threshold" -> 12.4.fromDouble())) // // HbaseHelper.autoclose(HbaseReaderHelper.createConnection) { connection => // implicit val conn: Connection = connection // try { // HbaseTableHelper.createTable(tableName, columnFamily) // writer.write(Seq(sinkRecord1, sinkRecord2)) // // val data = HbaseReaderHelper.getAllRecords(tableName, columnFamily) // // data.size shouldBe 2 // // val row1 = data.filter { r => // Bytes.toString(r.key) == "Alex" // }.head // row1.cells.size shouldBe 2 // // Bytes.toString(row1.cells("firstName")) shouldBe "Alex" // Bytes.toInt(row1.cells("age")) shouldBe 30 // // val row2 = data.filter { r => // Bytes.toString(r.key) == "Mara" // }.head // row2.cells.size shouldBe 3 // // Bytes.toString(row2.cells("firstName")) shouldBe "Mara" // Bytes.toInt(row2.cells("age")) shouldBe 22 // Bytes.toDouble(row2.cells("threshold")) shouldBe 12.4 // // } finally { // HbaseTableHelper.deleteTable(tableName) // } // } // } // // "write an Hbase row for each SinkRecord provided using GenericRowKeyBuilderBytes" in { // // val fieldsExtractor = mock[FieldsValuesExtractor] // val tableName = "someTable" // val topic = "someTopic" // val columnFamily = "somecolumnFamily" // val QUERY_ALL = s"INSERT INTO $tableName SELECT * FROM $topic" // // val props = Map( // HBaseConfigConstants.KCQL_QUERY -> QUERY_ALL, // HBaseConfigConstants.COLUMN_FAMILY -> "somecolumnFamily" // ).asJava // // val config = HBaseConfig(props) // val settings = HBaseSettings(config) // val hbaseConfig = ConfigurationBuilder.buildHBaseConfig(settings) // val writer = new HbaseWriter(settings, hbaseConfig) // // val schema = SchemaBuilder // .struct() // .name("com.example.Person") // .field("firstName", Schema.STRING_SCHEMA) // .field("age", Schema.INT32_SCHEMA) // .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA) // .build() // // val struct1 = new Struct(schema).put("firstName", "Alex").put("age", 30) // val struct2 = new Struct(schema) // .put("firstName", "Mara") // .put("age", 22) // .put("threshold", 12.4) // // val sinkRecord1 = new SinkRecord(topic, 1, null, null, schema, struct1, 0) // val sinkRecord2 = new SinkRecord(topic, 1, null, null, schema, struct2, 1) // // when(fieldsExtractor.get(struct1)).thenReturn( // Seq("firstName" -> "Alex".fromString(), "age" -> 30.fromInt()) // ) // when(fieldsExtractor.get(struct2)).thenReturn( // Seq( // "firstName" -> "Mara".fromString(), // "age" -> 22.fromInt(), // "threshold" -> 12.4.fromDouble() // ) // ) // // HbaseHelper.autoclose(HbaseReaderHelper.createConnection) { connection => // implicit val conn: Connection = connection // try { // HbaseTableHelper.createTable(tableName, columnFamily) // writer.write(Seq(sinkRecord1, sinkRecord2)) // // val data = HbaseReaderHelper.getAllRecords(tableName, columnFamily) // // data.size shouldBe 2 // // val row1 = data.filter { r => // Bytes.toString(r.key).equals(s"$topic|1|0") // }.head // row1.cells.size shouldBe 2 // // Bytes.toString(row1.cells("firstName")) shouldBe "Alex" // Bytes.toInt(row1.cells("age")) shouldBe 30 // // val row2 = data.filter { r => // Bytes.toString(r.key).equals(s"$topic|1|1") // }.head // row2.cells.size shouldBe 3 // // Bytes.toString(row2.cells("firstName")) shouldBe "Mara" // Bytes.toInt(row2.cells("age")) shouldBe 22 // Bytes.toDouble(row2.cells("threshold")) shouldBe 12.4 // // } finally { // HbaseTableHelper.deleteTable(tableName) // } // } // } // // "should recover from failure if set to retry" ignore { // // val fieldsExtractor = mock[FieldsValuesExtractor] // val rowKeyBuilder = mock[StructFieldsRowKeyBuilderBytes] // // val tableName = "someTable" // val topic = "someTopic" // val columnFamily = "somecolumnFamily" // val QUERY_ALL = // s"INSERT INTO $tableName SELECT * FROM $topic PK firstName" // // val props = Map( // HBaseConfigConstants.KCQL_QUERY -> QUERY_ALL, // HBaseConfigConstants.COLUMN_FAMILY -> "somecolumnFamily", // HBaseConfigConstants.ERROR_POLICY -> "RETRY" // ).asJava // // val config = HBaseConfig(props) // val settings = HBaseSettings(config) // val hbaseConfig = ConfigurationBuilder.buildHBaseConfig(settings) // val writer = new HbaseWriter(settings, hbaseConfig) // // val schema = SchemaBuilder // .struct() // .name("com.example.Person") // .field("firstName", Schema.STRING_SCHEMA) // .field("age", Schema.INT32_SCHEMA) // .field("threshold", Schema.OPTIONAL_FLOAT64_SCHEMA) // .build() // // val struct1 = new Struct(schema).put("firstName", "Alex").put("age", 30) // val struct2 = new Struct(schema) // .put("firstName", "Mara") // .put("age", 22) // .put("threshold", 12.4) // // val sinkRecord1 = new SinkRecord(topic, 1, null, null, schema, struct1, 0) // val sinkRecord2 = new SinkRecord(topic, 1, null, null, schema, struct2, 1) // // when(rowKeyBuilder.build(sinkRecord1, null)).thenReturn(10.fromInt()) // when(rowKeyBuilder.build(sinkRecord2, null)).thenReturn(11.fromInt()) // // when(fieldsExtractor.get(struct1)).thenReturn( // Seq("firstName" -> "Alex".fromString(), "age" -> 30.fromInt()) // ) // when(fieldsExtractor.get(struct2)).thenReturn( // Seq( // "firstName" -> "Mara".fromString(), // "age" -> 22.fromInt(), // "threshold" -> 12.4.fromDouble() // ) // ) // // HbaseHelper.autoclose(HbaseReaderHelper.createConnection) { connection => // implicit val conn: Connection = connection // try { // //HbaseTableHelper.createTable(tableName, columnFamily) // // //write should now error and retry // // intercept[RetriableException] { // writer.write(Seq(sinkRecord1, sinkRecord2)) // } // // HbaseTableHelper.createTable(tableName, columnFamily) // // //write again, should recover // writer.write(Seq(sinkRecord1, sinkRecord2)) // // val data = HbaseReaderHelper.getAllRecords(tableName, columnFamily) // // data.size shouldBe 2 // // val row1 = data.filter { r => // Bytes.toString(r.key) == "Alex" // }.head // row1.cells.size shouldBe 2 // // Bytes.toString(row1.cells("firstName")) shouldBe "Alex" // Bytes.toInt(row1.cells("age")) shouldBe 30 // // val row2 = data.filter { r => // Bytes.toString(r.key) == "Mara" // }.head // row2.cells.size shouldBe 3 // // Bytes.toString(row2.cells("firstName")) shouldBe "Mara" // Bytes.toInt(row2.cells("age")) shouldBe 22 // Bytes.toDouble(row2.cells("threshold")) shouldBe 12.4 // // } finally { // HbaseTableHelper.deleteTable(tableName) // } // } // } // } //}
datamountaineer/stream-reactor
kafka-connect-hbase/src/test/scala/com/datamountaineer/streamreactor/connect/hbase/writers/HbaseWriterTest.scala
Scala
apache-2.0
11,511
/* * Copyright (C) 2005, The Beangle Software. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.beangle.data.transfer.importer /** * ImportPrepare interface. * * @author chaostone */ trait ImportPrepare { /** * prepare. */ def prepare(importer: Importer): Unit }
beangle/data
transfer/src/main/scala/org/beangle/data/transfer/importer/ImportPrepare.scala
Scala
lgpl-3.0
916
package com.wavesplatform.utils import java.net.{InetAddress, SocketTimeoutException} import monix.eval.Task import monix.execution.ExecutionModel import monix.execution.schedulers.SchedulerService import org.apache.commons.net.ntp.NTPUDPClient import scala.concurrent.duration.DurationInt trait Time { def correctedTime(): Long def getTimestamp(): Long } class NTP(ntpServer: String) extends Time with ScorexLogging with AutoCloseable { private[this] val ExpirationTimeout = 60.seconds private[this] val RetryDelay = 10.seconds private[this] val ResponseTimeout = 10.seconds private[this] implicit val scheduler: SchedulerService = Schedulers.singleThread(name = "time-impl", reporter = log.error("Error in NTP", _), ExecutionModel.AlwaysAsyncExecution) private[this] val client = new NTPUDPClient() client.setDefaultTimeout(ResponseTimeout.toMillis.toInt) @volatile private[this] var ntpTimestamp = System.currentTimeMillis() @volatile private[this] var nanoTime = System.nanoTime() def correctedTime(): Long = { val timestamp = ntpTimestamp val offset = (System.nanoTime() - nanoTime) / 1000000 timestamp + offset } @volatile private[this] var txTime: Long = 0 def getTimestamp(): Long = { txTime = Math.max(correctedTime(), txTime + 1) txTime } private[this] val updateTask: Task[Unit] = { def newOffsetTask: Task[Option[(InetAddress, Long, Long)]] = Task { try { client.open() val beforeRequest = System.nanoTime() val info = client.getTime(InetAddress.getByName(ntpServer)) val message = info.getMessage val ntpTime = message.getTransmitTimeStamp.getTime val serverSpentTime = message.getTransmitTimeStamp.getTime - message.getReceiveTimeStamp.getTime val roundripTime = (System.nanoTime() - beforeRequest) / 1000000 - serverSpentTime val corrected = ntpTime + roundripTime / 2 Some((info.getAddress, corrected, System.nanoTime())) } catch { case _: SocketTimeoutException => None case t: Throwable => log.warn("Problems with NTP: ", t) None } finally { client.close() } } newOffsetTask.flatMap { case None if !scheduler.isShutdown => updateTask.delayExecution(RetryDelay) case Some((server, ntpTimestamp, nanoTime)) if !scheduler.isShutdown => log.trace(s"Adjusting time with ${ntpTimestamp - System.currentTimeMillis()} milliseconds, source: ${server.getHostName}.") this.ntpTimestamp = ntpTimestamp this.nanoTime = nanoTime updateTask.delayExecution(ExpirationTimeout) case _ => Task.unit } } private[this] val taskHandle = updateTask.runAsyncLogErr override def close(): Unit = { log.trace("Shutting down Time") taskHandle.cancel() scheduler.shutdown() } }
wavesplatform/Waves
node/src/main/scala/com/wavesplatform/utils/Time.scala
Scala
mit
2,928
package sclack.tech import java.awt.image.BufferedImage import scala.collection.mutable.HashMap import java.awt.image.BufferedImage /** * The tile manager is a Cache-like pattern that only loads the required * tilesets once, and then they are loaded from a single point in memory when * required, when painting the tiles anywhere. * * @author Simon Symeonidis */ object TileManager { /** * The tilemap to contain the required graphics */ private var tilemap = new HashMap[String,BufferedImage] /** * Get the tileset by giving the required name * @param name is the name of the tileset to use (for now 'dun', and 'fan' * are the only valid choices * @param ix is the index of the tile to fetch and return * @return the tile that you require, given the id * @note Ultimately you'd */ def tile(name: String, ix: Int) : BufferedImage = { name match { /* Dungeon */ case "dun" => return dungeonTileHelper.tile(ix) /* Fantasy */ case "fan" => return fantasyTileHelper.tile(ix) } } /** * Get the width of a particular tileset * * @param name is the name of the tileset we want. You can either specify * "dun" or "fan" for the dungeon or fantasy tilesets, respectively. * @return the width of that particular tileset */ def widthOf(name: String) : Int = { name match { case "dun" => return dungeonTileHelper.width case "fan" => return fantasyTileHelper.width } } /** * Get the height of a particular tileset * * @param name is the name of the tileset we want. You can either specify * "dun" or "fan" for the dungeon or fantasy tilesets, respectively. * @return the height of that particular tileset. */ def heightOf(name: String) : Int = { name match { case "dun" => return dungeonTileHelper.height case "fan" => return fantasyTileHelper.height } } private val dungeonTilesetName = "/16x16-dungeon-tiles-nes-remake.png" private val fantasyTilesetName = "/16x16-fantasy-tileset.png" private val dungeonTilesetRes = getClass.getResource(dungeonTilesetName) private val fantasyTilesetRes = getClass.getResource(fantasyTilesetName) private val dungeonTileHelper = new TileHelper(16,16,2,dungeonTilesetRes) private val fantasyTileHelper = new TileHelper(16,16,0,fantasyTilesetRes) }
psyomn/sclack
src/main/scala/tech/TileManager.scala
Scala
gpl-3.0
2,371
// how to thread a monad correctly // (global name refreshment) package creg.example import creg._ object Fresh { import Monad.State import Monad.State.stateMonad import Banana.cata import Compos.getNameStream private[this] // necessary to make inner type λ covariant type State[S] = { type λ[+A] = S => (A, S) } @data def Term = Fix(T => TermT { Var(x = String) Abs(x = String, t = T) App(f = T, y = T) }) @functor def termF[T] = TermT { Var(x = String) Abs(x = String, t = T) App(f = T, y = T) } type Names = Stream[String] type Subst = Map[String, String] type FreshM[+T] = Subst => State[Names]#λ[T] implicit object FreshM extends MonadWithBind { type Map[+T] = FreshM[T] def pure[A](x: A): Map[A] = subst => names => (x, names) def bind[A, B](m: Map[A], f: A => Map[B]): Map[B] = subst => names => { val (x, newNames) = m(subst)(names) f(x)(subst)(newNames) } } def ask: FreshM[Subst] = env => stateMonad pure env def local[A](f: (Subst => Subst))(m: FreshM[A]): FreshM[A] = m compose f def readState: FreshM[Names] = env => State.readState def writeState(names: Names): FreshM[Unit] = env => State writeState names implicit class FreshMonadView[T](x: FreshM.Map[T]) extends Monad.View[FreshM, T](x) def refresh: Term => FreshM[Term] = cata[FreshM[Term]](termF) { case Var(x) => for { env <- ask } yield coerce { Var(env.withDefault(identity[String])(x)) } case Abs(x, s) => for { ys <- readState _ <- writeState(ys.tail) y = ys.head t <- local(_ + (x -> y))(s) } yield coerce { Abs(y, t) } case s => for { t <- termF(s).traverse(FreshM)(x => x) } yield coerce(t) } val omega: Term = coerce { App( Abs("x", App(Var("x"), Var("x"))), Abs("x", App(Var("x"), Var("x")))) } val omg: Term = refresh(omega)(Map.empty)(getNameStream)._1 def run() { println(omg) } }
yfcai/CREG
example/Fresh.scala
Scala
mit
2,056
/* * Copyright 2014–2017 SlamData Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package quasar.sql import slamdata.Predef._ import quasar.fp._ import quasar.RenderTree.ops._ import quasar.specs2.QuasarMatchers._ import quasar.sql.StatementArbitrary._ import quasar.sql.fixpoint._ import scala.Predef.$conforms import matryoshka._ import matryoshka.data.Fix import matryoshka.implicits._ import pathy.Path._ import scalaz._, Scalaz._ class SQLParserSpec extends quasar.Qspec { import SqlQueries._, ExprArbitrary._ def parse(query: String): ParsingError \\/ Fix[Sql] = fixParser.parseExpr(query) "SQLParser" should { "parse query1" in { val r = parse(q1).toOption r should beSome } "parse query2" in { val r = parse(q2).toOption r should beSome } "parse query3" in { val r = parse(q3).toOption r should beSome } "parse query4" in { val r = parse(q4).toOption r should beSome } "parse query5" in { val r = parse(q5).toOption r should beSome } "parse query6" in { val r = parse(q6).toOption r should beSome } "parse query7" in { val r = parse(q7).toOption r should beSome } "parse query8" in { val r = parse(q8).toOption r should beSome } "parse query9" in { val r = parse(q9).toOption r should beSome } "parse query10" in { val r = parse(q10).toOption r should beSome } "parse query11" in { val r = parse(q11).toOption r should beSome } "parse query12" in { val r = parse(q12).toOption r should beSome } "parse query13" in { val r = parse(q13).toOption r should beSome } "parse query14" in { val r = parse(q14).toOption r should beSome } "parse query16" in { val r = parse(q16).toOption r should beSome } "parse query17" in { val r = parse(q17).toOption r should beSome } "parse query18" in { val r = parse(q18).toOption r should beSome } "parse query19" in { val r = parse(q19).toOption r should beSome } "parse query20" in { val r = parse(q20).toOption r should beSome } "parse query21" in { val r = parse(q21).toOption r should beSome } "parse query22" in { val r = parse(q22).toOption r should beSome } "parse basic select" in { parse("select foo from bar") must beRightDisjunction( SelectR( SelectAll, List(Proj(IdentR("foo"), None)), Some(TableRelationAST(file("bar"), None)), None, None, None)) } "parse keywords as identifiers" in { parse("select as as as from from as from where where group by group order by order") should beRightDisjOrDiff( SelectR( SelectAll, List(Proj(IdentR("as"), "as".some)), TableRelationAST(file("from"), "from".some).some, IdentR("where").some, GroupBy(List(IdentR("group")), None).some, OrderBy((ASC: OrderType, IdentR("order")).wrapNel).some)) } "parse ambiguous keyword as identifier" in { parse("""select `false` from zips""") should beRightDisjOrDiff( SelectR( SelectAll, List(Proj(IdentR("false"), None)), TableRelationAST(file("zips"), None).some, None, None, None)) } "parse ambiguous expression as expression" in { parse("""select case from when where then and end""") should beRightDisjOrDiff( SelectR( SelectAll, List(Proj(MatchR(IdentR("from"), List(Case(IdentR("where"), IdentR("and"))), None), None)), None, None, None, None)) } "parse partially-disambiguated expression" in { parse("""select `case` from when where then and end""") should beRightDisjOrDiff( SelectR( SelectAll, List(Proj(IdentR("case"), None)), TableRelationAST(file("when"), None).some, BinopR(IdentR("then"), IdentR("end"), And).some, None, None)) } "parse quoted literal" in { parse("""select * from foo where bar = "abc" """).toOption should beSome } "parse quoted literal with escaped quote" in { parse(raw"""select * from foo where bar = "that\\"s it!" """).toOption should beSome } "don’t parse multi-character char literal" in { parse("""select * from foo where bar = 'it!'""").toOption should beNone } "parse literal that’s too big for an Int" in { parse("select * from users where add_date > 1425460451000") should beRightDisjOrDiff( SelectR( SelectAll, List(Proj(SpliceR(None), None)), Some(TableRelationAST(file("users"),None)), Some(BinopR(IdentR("add_date"),IntLiteralR(1425460451000L), Gt)), None,None)) } "parse quoted identifier" in { parse("""select * from `tmp/foo` """).toOption should beSome } "parse quoted identifier with escaped quote" in { parse(raw"select * from `tmp/foo[\\`bar\\`]` ").toOption should beSome } "parse simple query with two variables" in { parse("""SELECT * FROM zips WHERE zips.dt > :start_time AND zips.dt <= :end_time """).toOption should beSome } "parse variable with quoted name" in { parse(""":`start time`""") should beRightDisjOrDiff(VariR("start time")) } "parse variable with quoted name starting with '_'" in { parse(""":`_8`""") should beRightDisjOrDiff(VariR("_8")) } "not parse variable with '_' as the name" in { parse(""":_""") must beLeftDisjunction( GenericParsingError("quotedIdent expected; but found `:'")) } "not parse variable with digit as the name" in { parse(""":8""") must beLeftDisjunction( GenericParsingError("quotedIdent expected; but found `:'")) } "not parse variable with digit at the start of the name" in { parse(""":8_""") must beLeftDisjunction( GenericParsingError("quotedIdent expected; but found `:'")) } "not parse variable with '_' at the start of the name" in { parse(""":_8""") must beLeftDisjunction( GenericParsingError("quotedIdent expected; but found `:'")) } "parse simple query with '_' as relation" in { parse("""SELECT * FROM `_`""").toOption should beSome } "parse simple query with '_' in relation" in { parse("""SELECT * FROM `/foo/bar/_`""").toOption should beSome } "parse simple query with variable as relation" in { parse("""SELECT * FROM :table""").toOption should beSome } "parse true and false literals" in { parse("""SELECT * FROM zips WHERE zips.isNormalized = TRUE AND zips.isFruityFlavored = FALSE""").toOption should beSome } "parse “full-value” insert expression" in { parse("insert into zips values 1, 2, 3") should beRightDisjOrDiff( Distinct(UnionAll( SetLiteralR(List(IntLiteralR(1), IntLiteralR(2), IntLiteralR(3))), SelectR( SelectAll, List(Proj(SpliceR(None), None)), Some(TableRelationAST(file("zips"),None)), None, None, None)).embed).embed) } "parse “keyed” insert expression" in { parse("insert into zips ('a', 'b') values (1, 2), (3, 4)") should beRightDisjOrDiff( Distinct(UnionAll( SetLiteralR(List( MapLiteralR(List(StringLiteralR("a") -> IntLiteralR(1), StringLiteralR("b") -> IntLiteralR(2))), MapLiteralR(List(StringLiteralR("a") -> IntLiteralR(3), StringLiteralR("b") -> IntLiteralR(4))))), SelectR( SelectAll, List(Proj(SpliceR(None), None)), Some(TableRelationAST(file("zips"),None)), None, None, None)).embed).embed) } "parse numeric literals" in { parse("select 1, 2.0, 3000000, 2.998e8, -1.602E-19, 1e+6") should beRightDisjunction } "parse date, time, timestamp, and id literals" in { val q = """select * from foo where dt < date("2014-11-16") and tm < time("03:00:00") and ts < timestamp("2014-11-16T03:00:00Z") + interval("PT1H") and `_id` != oid("abc123")""" parse(q) must beRightDisjunction } "parse IS and IS NOT" in { val q = """select * from foo where a IS NULL and b IS NOT NULL and c IS TRUE and d IS NOT FALSE""" parse(q) must beRightDisjunction } "parse is (not) as (!)=" in { val q1 = "select * from zips where pop is 1000 and city is not \\"BOULDER\\"" val q2 = "select * from zips where pop = 1000 and city != \\"BOULDER\\"" parse(q1) must_=== parse(q2) } "parse `in` and `like` with optional `is`" in { val q1 = "select * from zips where pop is in (1000, 2000) and city is like \\"BOU%\\"" val q2 = "select * from zips where pop in (1000, 2000) and city like \\"BOU%\\"" parse(q1) must_=== parse(q2) } "parse `not in` and `not like` with optional `is`" in { val q1 = "select * from zips where pop is not in (1000, 2000) and city is not like \\"BOU%\\"" val q2 = "select * from zips where pop not in (1000, 2000) and city not like \\"BOU%\\"" parse(q1) must_=== parse(q2) } "parse nested joins left to right" in { val q1 = "select * from a cross join b cross join c" val q2 = "select * from (a cross join b) cross join c" parse(q1) must_=== parse(q2) } "parse nested joins with parens" in { val q = "select * from a cross join (b cross join c)" parse(q) must beRightDisjunction( SelectR( SelectAll, List(Proj(SpliceR(None), None)), Some( CrossRelation( TableRelationAST(file("a"), None), CrossRelation( TableRelationAST(file("b"), None), TableRelationAST(file("c"), None)))), None, None, None)) } "parse array constructor and concat op" in { parse("select loc || [ pop ] from zips") must beRightDisjunction( SelectR(SelectAll, List( Proj( BinopR(IdentR("loc"), ArrayLiteralR(List( IdentR("pop"))), Concat), None)), Some(TableRelationAST(file("zips"), None)), None, None, None)) } val expectedSelect = SelectR(SelectAll, List(Proj(IdentR("loc"), None)), Some(TableRelationAST(file("places"), None)), None, None, None ) val selectString = "select loc from places" "parse offset" in { val q = s"$selectString offset 6" parse(q) must beRightDisjunction( Offset(expectedSelect, IntLiteralR(6)).embed ) } "parse limit" should { "normal" in { val q = s"$selectString limit 6" parse(q) must beRightDisjunction( Limit(expectedSelect, IntLiteralR(6)).embed ) } "multiple limits" in { val q = s"$selectString limit 6 limit 3" parse(q) must beRightDisjunction( Limit(Limit(expectedSelect, IntLiteralR(6)).embed, IntLiteralR(3)).embed ) } "should not allow single limit" in { val q = "limit 6" parse(q) must beLeftDisjunction } } "parse limit and offset" should { "limit before" in { val q = s"$selectString limit 6 offset 3" parse(q) must beRightDisjunction( Offset(Limit(expectedSelect, IntLiteralR(6)).embed, IntLiteralR(3)).embed ) } "limit after" in { val q = s"$selectString offset 6 limit 3" parse(q) must beRightDisjunction( Limit(Offset(expectedSelect, IntLiteralR(6)).embed, IntLiteralR(3)).embed ) } } "should refuse a semicolon not at the end" in { val q = "select foo from (select 5 as foo;) where foo = 7" parse(q) must beLeftDisjunction( GenericParsingError("operator ')' expected; but found `;'") ) } "parse basic let" in { parse("""foo := 5; foo""") must beRightDisjunction( LetR(CIName("foo"), IntLiteralR(5), IdentR("foo"))) } "parse basic let with quoted identifier starting with '_'" in { parse("""`_8` := 5; `_8`""") must beRightDisjunction( LetR(CIName("_8"), IntLiteralR(5), IdentR("_8"))) } "not parse basic let with '_' as the identifier" in { parse("""_ := 5; _""") must beLeftDisjunction( GenericParsingError("quotedIdent expected; but found `*** error: `!' expected but _ found'")) } "not parse basic let with digit as the identifier" in { parse("""8 := 5; 8""") must beLeftDisjunction( GenericParsingError("keyword 'except' expected; but found `:='")) } "not parse basic let with digit at the start of the identifier" in { parse("""8_ := 5; 8_""") must beLeftDisjunction( GenericParsingError("keyword 'except' expected; but found `*** error: `!' expected but _ found'")) } "not parse basic let with '_' at the start of the identifier" in { parse("""_8 := 5; _8""") must beLeftDisjunction( GenericParsingError("quotedIdent expected; but found `*** error: `!' expected but _ found'")) } "parse nested lets" in { parse("""foo := 5; bar := "hello"; bar + foo""") must beRightDisjunction( LetR( CIName("foo"), IntLiteralR(5), LetR( CIName("bar"), StringLiteralR("hello"), BinopR(IdentR("bar"), IdentR("foo"), Plus)))) } "parse let inside select" in { parse("""select foo from (bar := 12; baz) as quag""") must beRightDisjunction( SelectR( SelectAll, List(Proj(IdentR("foo"), None)), Some(ExprRelationAST( LetR( CIName("bar"), IntLiteralR(12), IdentR("baz")), "quag")), None, None, None)) } "parse select inside body of let" in { parse("""foo := (1,2,3); select * from foo""") must beRightDisjunction( LetR( CIName("foo"), SetLiteralR( List(IntLiteralR(1), IntLiteralR(2), IntLiteralR(3))), SelectR( SelectAll, List(Proj(SpliceR(None), None)), Some(IdentRelationAST("foo", None)), None, None, None))) } "parse select inside body of let" in { parse("""foo := (1,2,3); select foo from bar""") must beRightDisjunction( LetR( CIName("foo"), SetLiteralR( List(IntLiteralR(1), IntLiteralR(2), IntLiteralR(3))), SelectR( SelectAll, // TODO this should be IdentRelationAST not Ident List(Proj(IdentR("foo"), None)), Some(TableRelationAST(file("bar"), None)), None, None, None))) } "parse select inside body of let inside select" in { val innerLet = LetR( CIName("foo"), SetLiteralR( List(IntLiteralR(1), IntLiteralR(2), IntLiteralR(3))), SelectR( SelectAll, List(Proj(SpliceR(None), None)), Some(IdentRelationAST("foo", None)), None, None, None)) parse("""select (foo := (1,2,3); select * from foo) from baz""") must beRightDisjunction( SelectR( SelectAll, List(Proj(innerLet, None)), Some(TableRelationAST(file("baz"), None)), None, None, None)) } "should parse a single-quoted character" in { val q = "'c'" parse(q) must beRightDisjunction(StringLiteralR("c")) } "should parse escaped characters" in { val q = raw"select '\\'', '\\\\', '\\u1234'" parse(q) must beRightDisjunction( SelectR(SelectAll, List( Proj(StringLiteralR("'"), None), Proj(StringLiteralR(raw"\\"), None), Proj(StringLiteralR("ሴ"), None)), None, None, None, None)) } "should parse escaped characters in a string" in { val q = raw""""'\\\\\\u1234"""" parse(q) must beRightDisjunction(StringLiteralR(raw"'\\ሴ")) } "should not parse multiple expressions seperated incorrectly" in { val q = "select foo from bar limit 6 select biz from baz" parse(q) must beLeftDisjunction } "parse function declaration" in { val funcDeclString = "CREATE FUNCTION ARRAY_LENGTH(:foo) BEGIN COUNT(:foo[_]) END" fixParser.parseWithParser(funcDeclString, fixParser.func_def) must beRightDisjunction( FunctionDecl(CIName("ARRAY_LENGTH"),List(CIName("foo")),Fix(invokeFunction(CIName("count"),List(Fix(Unop(Fix(vari[Fix[Sql]]("foo")),ShiftArrayValues))))))) } "parse import statement" in { val importString = "import `/foo/bar/baz/`" fixParser.parseWithParser(importString, fixParser.import_) must beRightDisjunction( Import(rootDir </> dir("foo") </> dir("bar") </> dir("baz"))) } "parse module" >> { "typical case" in { val moduleString = """ |CREATE FUNCTION ARRAY_LENGTH(:foo) BEGIN COUNT(:foo[_]) END; |CREATE FUNCTION USER_DATA(:user_id) BEGIN SELECT * FROM `/root/path/data/` WHERE user_id = :user_id END; |import `/other/stuff/in/filesystem/` """.stripMargin fixParser.parseModule(moduleString) must beLike { case \\/-(List(FunctionDecl(_, _, _), FunctionDecl(_, _, _), Import(_))) => ok } } "does not complain about a trailing semicolon" in { val moduleString = "CREATE FUNCTION FOO(:foo) BEGIN :foo END;" fixParser.parseModule(moduleString) must_=== \\/-(List(FunctionDecl(CIName("foo"), List(CIName("foo")), sqlE":foo"))) } } "parse scopedExpr" in { val scopedExprString = """ |CREATE FUNCTION USER_DATA(:user_id) | BEGIN | SELECT * FROM `/foo` WHERE user_id = :user_id | END; |USER_DATA("bob") """.stripMargin val invokeAST: Fix[Sql] = Fix(invokeFunction[Fix[Sql]](CIName("USER_DATA"),List(Fix(stringLiteral[Fix[Sql]]("bob"))))) fixParser.parse(scopedExprString) must beLike { case \\/-(ScopedExpr(`invokeAST`, List(FunctionDecl(_,_,_)))) => ok } } "parse array literal at top level" in { parse("""["X", "Y"]""") must beRightDisjunction( ArrayLiteralR(List(StringLiteralR("X"), StringLiteralR("Y")))) } "parse empty set literal" in { parse("()") must beRightDisjunction( SetLiteralR(Nil)) } "parse parenthesized simple expression (which is syntactically identical to a 1-element set literal)" in { parse("(a)") must beRightDisjunction( IdentR("a")) } "parse 2-element set literal" in { parse("(a, b)") must beRightDisjunction( SetLiteralR(List(IdentR("a"), IdentR("b")))) } "parse deeply nested parens" in { // NB: Just a stress-test that the parser can handle a deeply // left-recursive expression with many unneeded parens, which // happens to be exactly what pprint produces. val q = """(select distinct topArr, topObj from `/demo/demo/nested` where ((((((((((((((((((((((((((((((search((((topArr)[:*])[:*])[:*], "^.*$", true)) or (search((((topArr)[:*])[:*]).a, "^.*$", true)))) or (search((((topArr)[:*])[:*]).b, "^.*$", true)))) or (search((((topArr)[:*])[:*]).c, "^.*$", true)))) or (search((((topArr)[:*]).botObj).a, "^.*$", true)))) or (search((((topArr)[:*]).botObj).b, "^.*$", true)))) or (search((((topArr)[:*]).botObj).c, "^.*$", true)))) or (search((((topArr)[:*]).botArr)[:*], "^.*$", true)))) or (search((((topObj).midArr)[:*])[:*], "^.*$", true)))) or (search((((topObj).midArr)[:*]).a, "^.*$", true)))) or (search((((topObj).midArr)[:*]).b, "^.*$", true)))) or (search((((topObj).midArr)[:*]).c, "^.*$", true)))) or (search((((topObj).midObj).botArr)[:*], "^.*$", true)))) or (search((((topObj).midObj).botObj).a, "^.*$", true)))) or (search((((topObj).midObj).botObj).b, "^.*$", true)))) or (search((((topObj).midObj).botObj).c, "^.*$", true))))""" parse(q).map(pprint[Fix[Sql]]) must beRightDisjunction(q) } "should not parse query with a single backslash in an identifier" should { "in table relation" in { parse(raw"select * from `\\bar`") should beLeftDisjunction }.pendingUntilFixed("SD-1536") "in identifier" in { parse(raw"`\\bar`") should beLeftDisjunction }.pendingUntilFixed("SD-1536") } "round-trip to SQL and back" >> prop { node: Fix[Sql] => val parsed = parse(pprint(node)) parsed.fold( _ => println(node.render.shows + "\\n" + pprint(node)), p => if (p ≠ node) println(pprint(p) + "\\n" + (node.render diff p.render).show)) parsed must beRightDisjOrDiff(node) }.set(minTestsOk = 1000) // one cannot test a parser too much "round-trip module" >> prop { module: List[Statement[Fix[Sql]]] => val back = fixParser.parseModule(module.pprint) back must beRightDisjOrDiff(module) } "pprint an import statement should escpae backticks" >> { val `import` = Import[Fix[Sql]](currentDir </> dir("di") </> dir("k`~ireW.5u1+fOh") </> dir("j")) val string = List(`import`).pprint string must_= raw"import `./di/k\\`~ireW.5u1+fOh/j/`" fixParser.parseModule(string) must_=== List(`import`).right } "round-trip through the pretty-printer" >> { def roundTrip(q: String) = { val ast = parse(q) ast should beRightDisjunction ast.map(pprint[Fix[Sql]] _).flatMap(parse) must_=== ast } "quoted variable names" in roundTrip("select * from :`A.results`") "field deref with string literal" in roundTrip("select a.`_id` from z as a") "let binding" in roundTrip("a := 42; SELECT * FROM z") "union all" in roundTrip("""SELECT * FROM (SELECT 1 as v UNION ALL SELECT 2 as v) as o""") } } }
drostron/quasar
frontend/src/test/scala/quasar/sql/SqlParserSpec.scala
Scala
apache-2.0
23,286
package com.yammer.dropwizard.scala.params object BooleanParam { def apply(value: Boolean): BooleanParam = BooleanParam(value.toString) } /** * Parses "true" and "false" to Boolean values. */ case class BooleanParam(s: String) extends AbstractParam[Boolean](s) { protected def parse(input: String) = input.toBoolean override protected def renderError(input: String, e: Throwable) = "Invalid parameter: %s (Must be \\"true\\" or \\"false\\".)".format(input) }
hailcode/dropwizard-scala
src/main/scala/com/yammer/dropwizard/scala/params/BooleanParam.scala
Scala
apache-2.0
470
/** * Copyright (C) 2010-2012 LShift Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.lshift.diffa.agent.itest import support.TestConstants.{ agentURL, yesterday } import net.lshift.diffa.agent.client.ConfigurationRestClient import net.lshift.diffa.kernel.frontend.EndpointDef import net.lshift.diffa.adapter.changes.ChangeEvent import org.junit.Assert.fail import net.lshift.diffa.client.{RateLimitExceededException, ChangesRestClient} import net.lshift.diffa.kernel.client.ChangesClient import org.junit.{Ignore, Before, Test} import com.hazelcast.util.Clock import org.apache.commons.lang3.RandomStringUtils @Ignore class ChangeEventRateLimitingTest extends IsolatedDomainTest { var clientCreateTime: Long = 0L var changesClient: ChangesClient = _ var event: ChangeEvent = _ val endpoint = RandomStringUtils.randomAlphanumeric(10) val lastUpdated = yesterday val ratePerSecondLimit = 1 @Before def initializeChangesClient { new ConfigurationRestClient(agentURL, isolatedDomain).declareEndpoint(EndpointDef(name = endpoint)) clientCreateTime = Clock.currentTimeMillis() changesClient = new ChangesRestClient(agentURL, isolatedDomain, endpoint) event = ChangeEvent.forChange("id", "aaff00001111", lastUpdated) // Make sure that no previous change events interfere with the acceptance of // the next test. Thread.sleep(1000 / ratePerSecondLimit) } @Test def shouldAcceptFirstEvent { try { changesClient.onChangeEvent(event) } catch { case x: RateLimitExceededException => fail("First event was rate limited, but should not have been") } } @Test def givenDefaultConfigurationAndRateLimitAlreadyReachedWhenSubsequentChangeEventReceivedThenRejectEventSubmission { try { changesClient.onChangeEvent(event) assertFailUntil(clientCreateTime + 1000L) } catch { case x: Exception => fail("Unexpected failure of first change event submission: " + x.toString) } } private def assertFailUntil(sysTimeMillis: Long) { val retryFrequency = 50 // milliseconds while (Clock.currentTimeMillis < sysTimeMillis) { try { changesClient.onChangeEvent(event) // check the time again in case the previous call took a while to execute, // in which case it's not necessarily true that the action should have been // rate limited. if (Clock.currentTimeMillis < sysTimeMillis) { fail("Change Event submission was expected to raise an exception due to violating the rate limit, but succeeded") } } catch { case x: RateLimitExceededException => } Thread.sleep(retryFrequency) } } }
0x6e6562/diffa
agent/src/test/scala/net/lshift/diffa/agent/itest/ChangeEventRateLimitingTest.scala
Scala
apache-2.0
3,234
package org.dsa.iot.rx.core import org.dsa.iot.rx.RxMerger3 /** * Combines three Observables into a single Observable of Tuple3, emitting a new tuple * after all sources emitted the next item. */ class Zip3[T1, T2, T3] extends RxMerger3[T1, T2, T3, (T1, T2, T3)] { protected def compute = source1.in zip source2.in zip source3.in map { case ((i1, i2), i3) => (i1, i2, i3) } } /** * Factory for [[Zip3]] instances. */ object Zip3 { /** * Creates a new Zip3 instance. */ def apply[T1, T2, T3]: Zip3[T1, T2, T3] = new Zip3[T1, T2, T3] }
IOT-DSA/dslink-scala-ignition
src/main/scala/org/dsa/iot/rx/core/Zip3.scala
Scala
apache-2.0
560
package proofpeer.proofscript.logic private class KernelImpl( val mk_theorem_helper : (Context, Term) => Theorem, val mk_cterm : (Context, Term, Type) => CTerm) extends Kernel { import Term._ import Type._ import Utils._ import KernelUtils._ private def mk_theorem(context : Context, term : Term) : Theorem = { mk_theorem_helper(context, betaEtaLongNormalform(context, term)) } private class ContextImpl(val isMainThread : Boolean, val kind : ContextKind, val depth : Integer, val created : ContextKind.Created, val parentContext : Option[ContextImpl], val constants : Map[Name, Type]) extends Context { def debugPrint(name : String) { var kinds : List[ContextKind] = List() var impl : ContextImpl = this while (impl != null) { kinds = impl.kind :: kinds impl.parentContext match { case None => impl = null case Some(c) => impl = c } } println("Context " + name+":") for (kind <- kinds) { println(" " + kind) } } def kernel : Kernel = KernelImpl.this def namespace = created.namespace def parentNamespaces = created.parentNamespaces def typeOfTerm(term : Term) : Option[Type] = KernelUtils.typeOfTerm(this, Map(), term) private def hasContextScope(name : Name) : Boolean = { name.namespace.isEmpty || (name.namespace == Some(namespace)) } private def contextOfName(name : Name) : ContextImpl = { if (hasContextScope(name)) this else { val namespace = name.namespace.get if (created.ancestorNamespaces.contains(namespace)) contextOfNamespace(namespace).get.asInstanceOf[ContextImpl] else failwith("no such namespace found: " + namespace) } } def typeOfConst(const_name : Name) : Option[Type] = { contextOfName(const_name).constants.get(const_name) } def certify(term : Term) : CTerm = { typeOfTerm(term) match { case None => failwith("term is invalid in this context") case Some(ty) => mk_cterm(this, term, ty) } } private def isComplete : Boolean = { kind match { case ContextKind.Complete => true case _ => false } } private def ensureContextScope(name : Name) { if (!hasContextScope(name)) failwith("name "+name+" is outside of namespace: "+created.namespace) if (!isMainThread && name.namespace.isDefined) failwith("name "+name+" is only definable on the main thread") } private def contains[T](name : Name, map : Map[Name, T]) : Boolean = { if (name.namespace.isDefined) map.contains(name) || map.contains(Name(None, name.name)) else map.contains(name) || map.contains(Name(Some(namespace), name.name)) } private def mkChild(kind : ContextKind, constants : Map[Name, Type]) : ContextImpl = { if (!isMainThread) { new ContextImpl(false, kind, depth + 1, created, Some(this), constants) } else { val c = new ContextImpl(true, kind, depth + 1, created, Some(this), constants) if (KernelImpl.this.setMainThread(c)) c else failwith("cannot create context in main thread of namespace: " + namespace) } } def spawnThread : Context = { if (!isMainThread) this else { new ContextImpl(false, ContextKind.SpawnThread, depth + 1, created, Some(this), constants) } } def isPolyConst(name : Name) : Boolean = { name.name match { case Kernel.equals.name => true case Kernel.forall.name => true case Kernel.exists.name => true case _ => false } } def introduce(const_name : Name, ty : Type) : Context = { if (isComplete) failwith("cannot extend completed context") ensureContextScope(const_name) if (contains(const_name, constants) || isPolyConst(const_name)) failwith("constant name " + const_name + " clashes with other constant in current scope") mkChild( ContextKind.Introduce(const_name, ty), constants + (const_name -> ty)) } def assume(_assumption : CTerm) : Theorem = { if (isComplete) failwith("cannot extend completed context") val assumption = doautolift(_assumption) if (assumption.typeOf != Prop) failwith("assumption is not a valid proposition") val context = mkChild(ContextKind.Assume(assumption.term), constants) mk_theorem(context, assumption.term) } def magic(term : Term) : Theorem = { if (typeOfTerm(term) != Some(Prop)) failwith("term is not a valid proposition in this context") mk_theorem(this, term) } def hasAssumptions : Boolean = { var context : Context = this do { context.kind match { case _ : ContextKind.Assume => return true case _ => } context = context.parentContext match { case Some(c) => c case None => null } } while (context != null) false } def define(const_name : Name, tm_ : CTerm) : Theorem = { if (isComplete) failwith("cannot extend completed context") ensureContextScope(const_name) if (contains(const_name, constants) || isPolyConst(const_name)) failwith("constant name "+const_name+" clashes with other constant in current scope") val tm = doautolift(tm_) val ty = tm.typeOf val eq = Comb(Comb(PolyConst(Kernel.equals, ty), Const(const_name)), tm.term) val context = mkChild(ContextKind.Define(const_name, ty, tm.term), constants + (const_name -> ty)) mk_theorem(context, eq) } def choose(const_name : Name, thm : Theorem) : Theorem = { if (isComplete) failwith("cannot extend completed context") checkTheoremContext(thm) ensureContextScope(const_name) if (contains(const_name, constants) || isPolyConst(const_name)) failwith("constant name "+const_name+" clashes with other constant in current scope") val (quantifiers, th) = strip_forall_unique(thm.proposition) val (x, ty, p) = dest_exists(th) match { case None => failwith("choose: theorem is not an (possibly universally quantified) existential") case Some(u) => u } var c : Term = Const(const_name) var cty : Type = ty for ((x, xty) <- quantifiers) { c = Comb(c, Var(x)) } var prop = substVar(p, x, c) for ((x, xty) <- quantifiers.reverse) { cty = Fun(xty, cty) val all = PolyConst(Kernel.forall, xty) prop = Comb(all, Abs(x, xty, prop)) } val context = mkChild(ContextKind.Choose(const_name, cty, prop), constants + (const_name -> cty)) mk_theorem(context, prop) } def instantiate(thm : Theorem, cinsts : List[Option[CTerm]]) : Theorem = { checkTheoremContext(thm) def m(t : Option[CTerm]) : Option[Term] = { t match { case None => None case Some(t) => Some(doautolift(t).term) } } val insts = cinsts.map(m _) mk_theorem(this, KernelUtils.instantiate(this, thm.proposition, insts)) } def checkTheoremContext(thm : Theorem) { if (KernelImpl.this != thm.context.kernel) failwith("theorem belongs to a different kernel") if (thm.context != this) failwith("theorem belongs to a different context") } def lift(thm : Theorem, preserve_structure : Boolean) : Theorem = { if (KernelImpl.this != thm.context.kernel) failwith("theorem belongs to a different kernel") val src_context = thm.context.asInstanceOf[ContextImpl] if (src_context == this) return thm val src_namespace = src_context.namespace if (namespace != src_namespace) { if (created.ancestorNamespaces.contains(src_namespace)) { val prop = completedContext(src_namespace).liftLocally(thm, preserve_structure).proposition mk_theorem(this, prop) } else { failwith("cannot lift theorem from namespace '" + src_context.namespace +"' to namespace '"+namespace+"'") } } else { liftLocally(thm, preserve_structure) } } def lift(term : CTerm, preserve_structure : Boolean) : CTerm = { if (KernelImpl.this != term.context.kernel) failwith("term belongs to a different kernel") val src_context = term.context.asInstanceOf[ContextImpl] if (src_context == this) return term val src_namespace = src_context.namespace if (namespace != src_namespace) { if (created.ancestorNamespaces.contains(src_namespace)) { val ct = completedContext(src_namespace).liftLocally(term, preserve_structure) mk_cterm(this, ct.term, ct.typeOf) } else { failwith("cannot lift term from namespace '" + src_context.namespace +"' to namespace '"+namespace+"'") } } else { liftLocally(term, preserve_structure) } } def autolift(term : CTerm) : Option[CTerm] = { val liftedTerm = lift(term, false) if (liftedTerm != term) None else Some(liftedTerm) } private def doautolift(term : CTerm) : CTerm = { val liftedTerm = lift(term, false) if (liftedTerm != term) failwith("cannot automatically lift term into context") else liftedTerm } // Same as lift, but assumes that the theorem context has the same namespace as this context. private def liftLocally(thm : Theorem, preserve_structure : Boolean) : Theorem = { val src_context = thm.context.asInstanceOf[ContextImpl] val common_ancestor = findCommonAncestorContext(this, src_context) val lifted_thm = common_ancestor.liftLocallyUp(thm, preserve_structure) if (common_ancestor.depth == depth) lifted_thm else { if (isComplete) { if (!isQualifiedTerm(lifted_thm.proposition)) failwith("cannot lift theorem containing unqualified constants into completed context of namespace " + namespace) } mk_theorem(this, lifted_thm.proposition) } } // Same as lift, but assumes that the termcontext has the same namespace as this context. private def liftLocally(term : CTerm, preserve_structure : Boolean) : CTerm = { val src_context = term.context.asInstanceOf[ContextImpl] val common_ancestor = findCommonAncestorContext(this, src_context) val lifted_term = common_ancestor.liftLocallyUp(term, preserve_structure) if (common_ancestor.depth == depth) lifted_term else { if (isComplete) { if (!isQualifiedTerm(lifted_term.term)) failwith("cannot lift term containing unqualified constants into completed context of namespace " + namespace) } mk_cterm(this, lifted_term.term, lifted_term.typeOf) } } private def liftLocallyUp(thm : Theorem, preserve_structure : Boolean) : Theorem = { import ContextKind._ var context = thm.context.asInstanceOf[ContextImpl] if (context == this) return thm var prop = thm.proposition if (preserve_structure) { while (context.depth > depth) { context.kind match { case Assume(hyp) => prop = mk_implies(hyp, prop) case Introduce(c, ty) => prop = mk_forall(c, ty, prop) case Define(c, ty, _) => prop = mk_exists(c, ty, prop) case Choose(c, ty, _) => prop = mk_exists(c, ty, prop) case _ => // nothing to do, the context is non-logical } context = context.parentContext.get } } else { var consts : Set[Name] = collectConsts(prop) while (context.depth > depth) { context.kind match { case Assume(hyp) => prop = mk_implies_prenex(hyp, prop) consts = collectConsts(hyp, consts) case Introduce(c, ty) => if (consts.contains(c)) { prop = mk_forall(c, ty, prop) consts = consts - c } case Define(c, ty, _) => if (consts.contains(c)) { prop = mk_exists(c, ty, prop) consts = consts - c } case Choose(c, ty, _) => if (consts.contains(c)) { prop = mk_exists(c, ty, prop) consts = consts - c } case _ => // nothing to do, the context is non-logical } context = context.parentContext.get } } mk_theorem(context, prop) } private def liftLocallyUp(cterm : CTerm, preserve_structure : Boolean) : CTerm = { import ContextKind._ var context = cterm.context.asInstanceOf[ContextImpl] if (context == this) return cterm var term = cterm.term var typeOf = cterm.typeOf if (preserve_structure) { while (context.depth > depth) { context.kind match { case Introduce(c, ty) => term = mk_abs(c, ty, term) case Define(c, ty, _) => term = mk_abs(c, ty, term) case Choose(c, ty, _) => term = mk_abs(c, ty, term) case _ => // nothing to do, the context does not introduce any constants } context = context.parentContext.get } } else { var consts : Set[Name] = collectConsts(term) while (context.depth > depth) { context.kind match { case Introduce(c, ty) => if (consts.contains(c)) { term = mk_abs(c, ty, term) typeOf = Type.Fun(ty, typeOf) consts = consts - c } case Define(c, ty, _) => if (consts.contains(c)) { term = mk_abs(c, ty, term) typeOf = Type.Fun(ty, typeOf) consts = consts - c } case Choose(c, ty, _) => if (consts.contains(c)) { term = mk_abs(c, ty, term) typeOf = Type.Fun(ty, typeOf) consts = consts - c } case _ => // nothing to do, the context does not introduce any constants } context = context.parentContext.get } } mk_cterm(context, term, typeOf) } private def getTypeOfTerm(tm : Term) : Type = { typeOfTerm(tm) match { case None => failwith("term is not wellformed in this context") case Some(ty) => ty } } private def equivalent(u : Term, v : Term) : Boolean = { if ((u eq v) || u == v) return true val f = betaEtaNormalform(u) val g = betaEtaNormalform(v) alpha_equivalent(f, g) } private def equivalent(u : CTerm, v : CTerm) : Boolean = { u == v } def reflexive(tm_ : CTerm) : Theorem = { val tm = doautolift(tm_) val a = tm.term val ty = tm.typeOf mk_theorem(this, mk_equals(a, a, ty)) } def normalize(tm_ : CTerm) : Theorem = { val tm = doautolift(tm_) val a = tm.term val ty = tm.typeOf val b = KernelUtils.betaEtaLongNormalform(this, a) mk_theorem(this, mk_equals(a, b, ty)) } def normalize(p : Theorem, q_ : CTerm) : Theorem = { checkTheoremContext(p) val q = doautolift(q_) if (equivalent(p.prop, q)) mk_theorem(this, q.term) else failwith("propositions are not alpha/beta/eta equivalent") } private def _mkFresh(name : IndexedName, fresh : IndexedName => Boolean) : IndexedName = { def isFresh(name : Name) = !isPolyConst(name) && typeOfConst(name).isEmpty && fresh(name.name) var i : Utils.Integer = if (name.index.isDefined) name.index.get else 0 do { val indexedName = IndexedName(name.name, if (i == 0) None else Some(i)) if (isFresh(Name(None, indexedName)) && isFresh(Name(Some(namespace), indexedName))) return indexedName i = i + 1 } while (true) failwith("mkFresh: internal error") } def mkFresh(name : IndexedName) : IndexedName = { _mkFresh(name, c => true) } def mkFreshs(names : Vector[IndexedName]) : Vector[IndexedName] = { var results : Vector[IndexedName] = Vector() for (name <- names) { val r = _mkFresh(name, n => results.indexOf(n) < 0) results = results :+ r } results } def destAbs(term_ : CTerm) : Option[(Context, CTerm, CTerm)] = { val term = doautolift(term_) term.term match { case Abs(name, ty, body) => val ns = if (isMainThread) Some(namespace) else None val x = Const(Name(ns, mkFresh(name))) val context = introduce(x.name, ty) val cx = mk_cterm(context, x, ty) val cbody = term.typeOf match { case Fun(_, range) => mk_cterm(context, KernelUtils.substVar(body, name, x), range) case _ => failwith("destAbs: internal error") } Some((context, cx, cbody)) case _ => None } } // hugely inefficient operation, should not be necessary to recompute type each time def destComb(term : CTerm) : Option[(CTerm, CTerm)] = { doautolift(term).term match { case Comb(f, g) => Some(certify(f), certify(g)) case _ => None } } def transitive(p : Theorem, q : Theorem) : Theorem = { checkTheoremContext(p) checkTheoremContext(q) val (a, b1, ty_a) = dest_equals(p.proposition) match { case None => failwith("transitive: first theorem is not an equation") case Some(u) => u } val (b2, c, ty_c) = dest_equals(q.proposition) match { case None => failwith("transitive: second theorem is not an equation") case Some(u) => u } if (ty_a == ty_c && equivalent(b1, b2)) mk_theorem(this, mk_equals(a, c, ty_a)) else failwith("transitive: middle propositions are not equivalent") } def comb(p : Theorem, q : Theorem) : Theorem = { checkTheoremContext(p) checkTheoremContext(q) val (f, g, fun_ty) = dest_equals(p.proposition) match { case None => failwith("comb: first theorem is not an equation") case Some(u) => u } val (a, b, arg_ty) = dest_equals(q.proposition) match { case None => failwith("comb: second theorem is not an equation") case Some(u) => u } fun_ty match { case Fun(domain, range) if domain == arg_ty => mk_theorem(this, mk_equals(Comb(f, a), Comb(g, b), range)) case _ => failwith("comb: types do not match up") } } def modusponens(p : Theorem, q : Theorem) : Theorem = { checkTheoremContext(p) checkTheoremContext(q) def mk(antecedent : Term, conclusion : Term) : Theorem = { if (equivalent(p.proposition, antecedent)) mk_theorem(this, conclusion) else failwith("modusponens: antecedents do not match") } dest_equals(q.proposition) match { case Some((a, b, _)) => mk(a, b) case None => dest_implies(q.proposition) match { case Some((a, b)) => mk(a, b) case None => failwith("modusponens: equality or implication expected as second theorem") } } } def abs(p : Theorem) : Theorem = { checkTheoremContext(p) val (x, xty, body) = dest_forall(p.proposition) match { case None => failwith("abs: theorem is not a universal quantification") case Some(u) => u } val (a, b, ty) = dest_equals(body) match { case None => failwith("abs: theorem is not a universally quantified equality") case Some(u) => u } val left = Abs(x, xty, a) val right = Abs(x, xty, b) mk_theorem(this, mk_equals(left, right, Fun(xty, ty))) } def equiv(p : Theorem, q : Theorem) : Theorem = { checkTheoremContext(p) checkTheoremContext(q) (dest_implies(p.proposition), dest_implies(q.proposition)) match { case (Some((a,b)), Some((b_, a_))) => if (equivalent(a, a_) && equivalent(b, b_)) mk_theorem(this, mk_equals(a, b, Prop)) else failwith("equiv: conclusion and hypothesis pairs do not match up") case _ => failwith("equiv: two implications expected") } } def publicConstants : Set[Name] = { var set : Set[Name] = Set() if (namespace == Kernel.root_namespace) { set = set + Kernel.forall + Kernel.equals + Kernel.exists } for ((name, _) <- constants) { if (!name.namespace.isDefined || name.namespace == Some(namespace)) { set = set + name } } set } def resolveLogicalName(name : Name) : Either[Name, Set[Namespace]] = { def resolveQualifiedName(name : Name, allowBaseResolution : Boolean) : Either[Name, Set[Namespace]] = { if (isPolyConst(name)) { val c = name.name match { case Kernel.equals.name => Kernel.equals case Kernel.forall.name => Kernel.forall case Kernel.exists.name => Kernel.exists case _ => failwith("internal error in resolveLogicalName " + name) } Left(c) } else if (typeOfConst(name).isDefined) { Left(name) } else if (allowBaseResolution) { baseResolutionOfNamespace(name.namespace.get).get(name.name) match { case None => Right(Set()) case Some(namespaces) => if (namespaces.size == 1) Left(Name(Some(namespaces.head), name.name)) else Right(namespaces) } } else { Right(Set()) } } if (name.namespace.isDefined) { val ns = aliasesOfNamespace(namespace).get.resolve(name.namespace.get) resolveQualifiedName(Name(Some(ns), name.name), name.namespace.get != namespace) } else { if (typeOfConst(name).isDefined) Left(name) else resolveQualifiedName(Name(Some(namespace), name.name), true) } } } private case class NamespaceInfo(parents : Set[Namespace], aliases : Aliases) private var namespaces : Map[Namespace, Context] = Map() private var namespaceInfo : Map[Namespace, NamespaceInfo] = Map() private val logicNamespaceResolution = { def parentsOf(namespace : Namespace) : Set[Namespace] = parentsOfNamespace(namespace) match { case None => Utils.failwith("no such namespace: " + namespace) case Some(namespaces) => namespaces } def namesOf(namespace : Namespace) : Set[IndexedName] = contextOfNamespace(namespace) match { case None => Utils.failwith("no completed context for namespace: " + namespace) case Some(context) => context.publicConstants.map(name => name.name) } new NamespaceResolution[IndexedName](parentsOf _, namesOf _) } def completedNamespaces = namespaces.keySet def contextOfNamespace(namespace : Namespace) : Option[Context] = namespaces.get(namespace) def parentsOfNamespace(namespace : Namespace) : Option[Set[Namespace]] = namespaceInfo.get(namespace).map(_.parents) def aliasesOfNamespace(namespace : Namespace) : Option[Aliases] = namespaceInfo.get(namespace).map(_.aliases) private def baseResolutionOfNamespace(namespace : Namespace) : NamespaceResolution.Resolution[IndexedName] = { logicNamespaceResolution.baseResolution(namespace) } private def completedContext(namespace : Namespace) : ContextImpl = { contextOfNamespace(namespace) match { case Some(c) => c.asInstanceOf[ContextImpl] case None => failwith("there is no completed namespace '" + namespace + "'") } } private var mainthreads : Map[Namespace, ContextImpl] = Map() // Sets the main thread for this Namespace, returns whether successful private def setMainThread(context : ContextImpl) : Boolean = { val namespace = context.namespace if (namespaces.contains(namespace)) return false mainthreads.get(namespace) match { case None => mainthreads += (namespace -> context) true case Some(mainthread) => if (context.parentContext == Some(mainthread)) { mainthreads += (namespace -> context) true } else false } } def completeNamespace(context : Context) : Context = { if (!context.isInstanceOf[ContextImpl] || context.kernel != this) failwith("context does not belong to this kernel") val namespace = context.namespace if (namespaces.contains(namespace)) failwith("this namespace has already been completed: " + namespace) val ctx = context.asInstanceOf[ContextImpl] val constants = ctx.constants.filterKeys(n => isQualifiedName(n)) val completedContext = new ContextImpl( true, ContextKind.Complete, ctx.depth + 1, ctx.created, Some(ctx), constants) if (setMainThread(completedContext)) { namespaces += (namespace -> completedContext) completedContext } else failwith("Cannot complete namespace '" + namespace + "' because main context thread was snatched away.") } def restoreCompletedNamespace(parents : Set[Namespace], aliases : Aliases, context : Context) { if (!context.isInstanceOf[ContextImpl] || context.kernel != this) failwith("context does not belong to this kernel") val namespace = context.namespace if (namespaces.contains(namespace)) failwith("this namespace has already been completed: " + namespace) namespaces += (namespace -> context) namespaceInfo = namespaceInfo + (namespace -> new NamespaceInfo(parents, aliases)) } def createNewNamespace(namespace : Namespace, parents : Set[Namespace], aliases : Aliases) : Context = { var ancestors : Set[Namespace] = Set() if (parentsOfNamespace(namespace).isDefined) failwith("namespace already exists: " + namespace) for (parent <- parents) { contextOfNamespace(parent) match { case Some(context) => ancestors = ancestors ++ context.asInstanceOf[ContextImpl].created.ancestorNamespaces ancestors = ancestors + parent case None => failwith("no such completed namespace: "+parent) } } val created = ContextKind.Created(namespace, parents, ancestors) val constants : Map[Name, Type] = if (namespace == Kernel.root_namespace) Map(Kernel.implies -> Type.Fun(Type.Prop, Type.Fun(Type.Prop, Type.Prop))) else Map() namespaceInfo = namespaceInfo + (namespace -> new NamespaceInfo(parents, aliases)) val ctx = new ContextImpl( true, created, 0, created, None, constants) ctx } // This assumes that c1 and c2 belong to the same kernel and the same context private def findCommonAncestorContext(c1 : ContextImpl, c2 : ContextImpl) : ContextImpl = { var depth1 = c1.depth var context1 = c1 var depth2 = c2.depth var context2 = c2 var depth = if (depth1 > depth2) depth1 else depth2 while (depth1 != depth2) { if (depth1 > depth2) { context1 = context1.parentContext.get depth1 = depth1 - 1 } else { context2 = context2.parentContext.get depth2 = depth2 - 1 } } while (context1 != context2) { (context1.parentContext, context2.parentContext) match { case (Some(c1), Some(c2)) => context1 = c1 context2 = c2 case _ => failwith("no common ancestor context found") } } context1 } import proofpeer.proofscript.serialization.UniquelyIdentifiableStore private class Serializers(store : UniquelyIdentifiableStore) extends KernelSerializers { import proofpeer.general._ import proofpeer.proofscript.serialization._ private val N = new NameSerializers(store) val IndexedNameSerializer : Serializer[IndexedName] = N.IndexedNameSerializer val NamespaceSerializer : Serializer[Namespace] = N.NamespaceSerializer val NameSerializer : Serializer[Name] = N.NameSerializer val AliasSerializer : Serializer[Alias] = N.AliasSerializer val AliasesSerializer : Serializer[Aliases] = N.AliasesSerializer private val T = new CustomizableTermSerializer(store, IndexedNameSerializer, NameSerializer) val TypeSerializer : Serializer[Type] = T.TypeSerializer val TermSerializer : Serializer[Term] = T val ContextKindSerializer = new CustomizableContextKindSerializer(store, TermSerializer, TypeSerializer, NamespaceSerializer, NameSerializer) private class ContextImplSerializer extends Serializer[ContextImpl] { val cis = new UniquelyIdentifiableSerializer(store, this, UISTypeCodes.CONTEXT) val serializer = QuintupleSerializer(PairSerializer(BooleanSerializer, ContextKindSerializer), BigIntSerializer, new TypecastSerializer[ContextKind.Created, ContextKind](ContextKindSerializer), OptionSerializer(cis), MapSerializer(NameSerializer, TypeSerializer)) def serialize(c : ContextImpl) : Any = serializer.serialize(((c.isMainThread, c.kind), c.depth, c.created, c.parentContext, c.constants)) def deserialize(b : Any) : ContextImpl = { val t = serializer.deserialize(b) new ContextImpl(t._1._1, t._1._2, t._2, t._3, t._4, t._5) } } val ContextSerializer : Serializer[Context] = new TypecastSerializer[Context, ContextImpl]( new UniquelyIdentifiableSerializer(store, new ContextImplSerializer, UISTypeCodes.CONTEXT)) private object BasicCTermSerializer extends TransformSerializer[CTerm, (Context, Term, Type)]( TripleSerializer(ContextSerializer, TermSerializer, TypeSerializer), (ct : CTerm) => (ct.context, ct.term, ct.typeOf), mk_cterm.tupled) object CTermSerializer extends UniquelyIdentifiableSerializer(store, BasicCTermSerializer, UISTypeCodes.CTERM) private object BasicTheoremSerializer extends TransformSerializer[Theorem, CTerm]( CTermSerializer, (th : Theorem) => th.prop, (ct : CTerm) => mk_theorem(ct.context, ct.term)) object TheoremSerializer extends UniquelyIdentifiableSerializer(store, BasicTheoremSerializer, UISTypeCodes.THEOREM) } def serializers(store : UniquelyIdentifiableStore) : KernelSerializers = { new Serializers(store) } }
proofpeer/proofpeer-proofscript
shared/src/main/scala/proofpeer/proofscript/logic/KernelImpl.scala
Scala
mit
31,244
/* * tuProlog - Copyright (C) 2001-2002 aliCE team at deis.unibo.it * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package com.szadowsz.gospel.core.event.interpreter import java.util.EventObject import com.szadowsz.gospel.core.Prolog /** * Base class for engine event * * @since 1.3 * */ @SerialVersionUID(1L) class PrologEvent(source: Prolog) extends EventObject(source) { val when = System.currentTimeMillis() /** * Gets the VM time of event occurrence. * @return time in millis */ def getTime() = when }
zakski/project-soisceal
scala-core/src/main/scala/com/szadowsz/gospel/core/event/interpreter/PrologEvent.scala
Scala
lgpl-3.0
1,229
/////////////////////////////////////////////////////////////////////////////// // Memoizer.scala // // Copyright (C) 2012 Ben Wing, The University of Texas at Austin // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /////////////////////////////////////////////////////////////////////////////// package opennlp.fieldspring.perceptron import collection.mutable /** * A class for "memoizing" words, i.e. mapping them to some other type * (e.g. Int) that should be faster to compare and potentially require * less space. */ abstract class Memoizer { /** * The type of a memoized word. */ type Word /** * Map a word as a string to its memoized form. */ def memoize_string(word: String): Word /** * Map a word from its memoized form back to a string. */ def unmemoize_string(word: Word): String /** * The type of a mutable map from memoized words to Ints. */ type WordIntMap /** * Create a mutable map from memoized words to Ints. */ def create_word_int_map(): WordIntMap /** * The type of a mutable map from memoized words to Doubles. */ type WordDoubleMap /** * Create a mutable map from memoized words to Doubles. */ def create_word_double_map(): WordDoubleMap lazy val blank_memoized_string = memoize_string("") def lowercase_memoized_word(word: Word) = memoize_string(unmemoize_string(word).toLowerCase) } /** * The memoizer we actually use. Maps word strings to Ints. * * @param minimum_index Minimum index used, usually either 0 or 1. */ class IntStringMemoizer(val minimum_index: Int = 0) extends Memoizer { type Word = Int protected var next_word_count: Word = minimum_index def number_of_entries = next_word_count - minimum_index // For replacing strings with ints. This should save space on 64-bit // machines (string pointers are 8 bytes, ints are 4 bytes) and might // also speed lookup. protected val word_id_map = mutable.Map[String,Word]() //protected val word_id_map = trovescala.ObjectIntMap[String]() // Map in the opposite direction. protected val id_word_map = mutable.Map[Word,String]() //protected val id_word_map = trovescala.IntObjectMap[String]() def memoize_string(word: String) = { val index = word_id_map.getOrElse(word, -1) // println("Saw word=%s, index=%s" format (word, index)) if (index != -1) index else { val newind = next_word_count next_word_count += 1 word_id_map(word) = newind id_word_map(newind) = word newind } } def unmemoize_string(word: Word) = id_word_map(word) //def create_word_int_map() = trovescala.IntIntMap() //type WordIntMap = trovescala.IntIntMap //def create_word_double_map() = trovescala.IntDoubleMap() //type WordDoubleMap = trovescala.IntDoubleMap def create_word_int_map() = mutable.Map[Word,Int]() type WordIntMap = mutable.Map[Word,Int] def create_word_double_map() = mutable.Map[Word,Double]() type WordDoubleMap = mutable.Map[Word,Double] } // /** // * Version that uses Trove for extremely fast and memory-efficient hash // * tables, making use of the Trove-Scala interface for easy access to the // * Trove hash tables. // */ // class TroveIntStringMemoizer( // minimum_index: Int = 0 // ) extends IntStringMemoizer(minimum_index) { // override protected val word_id_map = trovescala.ObjectIntMap[String]() // override protected val id_word_map = trovescala.IntObjectMap[String]() // override def create_word_int_map() = trovescala.IntIntMap() // override type WordIntMap = trovescala.IntIntMap // override def create_word_double_map() = trovescala.IntDoubleMap() // override type WordDoubleMap = trovescala.IntDoubleMap // }
utcompling/fieldspring
src/main/scala/opennlp/fieldspring/perceptron/Memoizer.scala
Scala
apache-2.0
4,225
package org.kimbasoft.akka.actor.regular import akka.actor.{Actor, Props} import org.kimbasoft.akka.actor.regular.ActorSimple.Exceptions.SimpleRequestException import org.kimbasoft.akka.actor.regular.ActorSimple.Messages.{SimpleResponse, SimpleRequest} import scala.util.{Failure, Success, Try} /** * Missing documentation * * @since 1.0 */ class ActorSimple extends Actor { val name = self.path.name /** * Method that needs to be implemented as the Actor's behavior * @return */ def receive: Receive = { case SimpleRequest("deadletter") => println(s"Actor[$name]: received a DeadLetter request") context.system.deadLetters ! SimpleResponse(Success("Request to send to DeadLetter!")) case SimpleRequest(message) => println(s"""Actor[$name]: received message: "$message"""") sender ! SimpleResponse(Success(s"""Received and processed message "$message"""")) case request => println(s"""Actor[$name]: received unknown request "$request"""") sender ! SimpleResponse(Failure(SimpleRequestException)) } @throws[Exception](classOf[Exception]) override def preStart(): Unit = { super.preStart() println(s"Actor[$name]: instance of ActorSimple, $self is about to start!") } @throws[Exception](classOf[Exception]) override def postStop(): Unit = { super.postStop() println(s"Actor[$name]: instance of ActorSimple $self was stopped!") } } object ActorSimple { /** * Recommended practice to encapsulate the creation of the Actor's * Props. This will help inexperienced users to create Props with * valid settings. */ def props : Props = Props[ActorSimple] /** * Encapsulating the possible Actor Messages in the Actor's companion * object. */ object Messages { case class SimpleRequest(message: String) case class SimpleResponse(response: Try[String]) } /** * Encapsulating possible Actor processing exceptions in the Actor's * companion object. */ object Exceptions { case object SimpleRequestException extends RuntimeException } }
kimba74/sandbox-scala
src/main/scala/org/kimbasoft/akka/actor/regular/ActorSimple.scala
Scala
gpl-3.0
2,092
package alexsmirnov.pbconsole import java.util.HashMap import org.apache.commons.lang3.text.StrSubstitutor import scala.io.Source import scalafx.beans.property.StringProperty import javafx.util.Callback import javafx.beans.Observable class Macro { val nameProperty = StringProperty("") def name = nameProperty.get def name_=(v: String) = nameProperty.update(v) val descriptionProperty = StringProperty("") def description = descriptionProperty.get def description_=(v: String) = descriptionProperty.update(v) val contentProperty = StringProperty("") def content = contentProperty.get def content_=(v: String) = contentProperty.update(v) } object Macro { import Settings._ object extractor extends Callback[Macro,Array[Observable]]{ def call(m: Macro) = Array(m.nameProperty,m.descriptionProperty,m.contentProperty) } def apply(name: String,description: String,content: String) = { val m = new Macro m.name = name m.description = description m.content = content m } def prepare(content: String,conf: Settings,params: (String,Any) *): Iterator[String] = { val values = new HashMap[String,Any] values.put(BED_W, conf.bedWidth()) values.put(BED_D, conf.bedDepth()) values.put(H, conf.height()) values.put(Z_OFFSET, conf.zOffset()) params.foreach{ case (name,value) => values.put(name, value) } val sub = new StrSubstitutor(values) val src = Source.fromString(sub.replace(content)) src.getLines() } }
alexsmirnov/printrbot-g2-console
src/main/scala/alexsmirnov/pbconsole/Macro.scala
Scala
bsd-3-clause
1,490
package scutil.lang import scutil.lang.extension._ object extensions extends extensions trait extensions extends AnyImplicits with AnyRefImplicits with AtomicReferenceImplicits with BooleanImplicits with ByteArrayImplicits with CharsetImplicits with ClassImplicits with ExceptionCatchImplicits with EitherImplicits with Function0Implicits with Function1Implicits with Function2Implicits with FutureImplicits with OptionImplicits with PEndoImplicits with PFunctionImplicits with PartialFunctionImplicits with PredicateImplicits with StringImplicits with ThrowableImplicits with TryImplicits
ritschwumm/scutil
modules/core/src/main/scala/scutil/lang/extensions.scala
Scala
bsd-2-clause
614
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql.catalyst.analysis import java.util.TimeZone import org.scalatest.ShouldMatchers import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.dsl.plans._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.catalyst.plans.Cross import org.apache.spark.sql.catalyst.plans.logical._ import org.apache.spark.sql.types._ class AnalysisSuite extends AnalysisTest with ShouldMatchers { import org.apache.spark.sql.catalyst.analysis.TestRelations._ test("union project *") { val plan = (1 to 120) .map(_ => testRelation) .fold[LogicalPlan](testRelation) { (a, b) => a.select(UnresolvedStar(None)).select('a).union(b.select(UnresolvedStar(None))) } assertAnalysisSuccess(plan) } test("check project's resolved") { assert(Project(testRelation.output, testRelation).resolved) assert(!Project(Seq(UnresolvedAttribute("a")), testRelation).resolved) val explode = Explode(AttributeReference("a", IntegerType, nullable = true)()) assert(!Project(Seq(Alias(explode, "explode")()), testRelation).resolved) assert(!Project(Seq(Alias(count(Literal(1)), "count")()), testRelation).resolved) } test("analyze project") { checkAnalysis( Project(Seq(UnresolvedAttribute("a")), testRelation), Project(testRelation.output, testRelation)) checkAnalysis( Project(Seq(UnresolvedAttribute("TbL.a")), SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))), Project(testRelation.output, testRelation)) assertAnalysisError( Project(Seq(UnresolvedAttribute("tBl.a")), SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))), Seq("cannot resolve")) checkAnalysis( Project(Seq(UnresolvedAttribute("TbL.a")), SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))), Project(testRelation.output, testRelation), caseSensitive = false) checkAnalysis( Project(Seq(UnresolvedAttribute("tBl.a")), SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))), Project(testRelation.output, testRelation), caseSensitive = false) } test("resolve sort references - filter/limit") { val a = testRelation2.output(0) val b = testRelation2.output(1) val c = testRelation2.output(2) // Case 1: one missing attribute is in the leaf node and another is in the unary node val plan1 = testRelation2 .where('a > "str").select('a, 'b) .where('b > "str").select('a) .sortBy('b.asc, 'c.desc) val expected1 = testRelation2 .where(a > "str").select(a, b, c) .where(b > "str").select(a, b, c) .sortBy(b.asc, c.desc) .select(a) checkAnalysis(plan1, expected1) // Case 2: all the missing attributes are in the leaf node val plan2 = testRelation2 .where('a > "str").select('a) .where('a > "str").select('a) .sortBy('b.asc, 'c.desc) val expected2 = testRelation2 .where(a > "str").select(a, b, c) .where(a > "str").select(a, b, c) .sortBy(b.asc, c.desc) .select(a) checkAnalysis(plan2, expected2) } test("resolve sort references - join") { val a = testRelation2.output(0) val b = testRelation2.output(1) val c = testRelation2.output(2) val h = testRelation3.output(3) // Case: join itself can resolve all the missing attributes val plan = testRelation2.join(testRelation3) .where('a > "str").select('a, 'b) .sortBy('c.desc, 'h.asc) val expected = testRelation2.join(testRelation3) .where(a > "str").select(a, b, c, h) .sortBy(c.desc, h.asc) .select(a, b) checkAnalysis(plan, expected) } test("resolve sort references - aggregate") { val a = testRelation2.output(0) val b = testRelation2.output(1) val c = testRelation2.output(2) val alias_a3 = count(a).as("a3") val alias_b = b.as("aggOrder") // Case 1: when the child of Sort is not Aggregate, // the sort reference is handled by the rule ResolveSortReferences val plan1 = testRelation2 .groupBy('a, 'c, 'b)('a, 'c, count('a).as("a3")) .select('a, 'c, 'a3) .orderBy('b.asc) val expected1 = testRelation2 .groupBy(a, c, b)(a, c, alias_a3, b) .select(a, c, alias_a3.toAttribute, b) .orderBy(b.asc) .select(a, c, alias_a3.toAttribute) checkAnalysis(plan1, expected1) // Case 2: when the child of Sort is Aggregate, // the sort reference is handled by the rule ResolveAggregateFunctions val plan2 = testRelation2 .groupBy('a, 'c, 'b)('a, 'c, count('a).as("a3")) .orderBy('b.asc) val expected2 = testRelation2 .groupBy(a, c, b)(a, c, alias_a3, alias_b) .orderBy(alias_b.toAttribute.asc) .select(a, c, alias_a3.toAttribute) checkAnalysis(plan2, expected2) } test("resolve relations") { assertAnalysisError(UnresolvedRelation(TableIdentifier("tAbLe")), Seq()) checkAnalysis(UnresolvedRelation(TableIdentifier("TaBlE")), testRelation) checkAnalysis( UnresolvedRelation(TableIdentifier("tAbLe")), testRelation, caseSensitive = false) checkAnalysis( UnresolvedRelation(TableIdentifier("TaBlE")), testRelation, caseSensitive = false) } test("divide should be casted into fractional types") { val plan = caseInsensitiveAnalyzer.execute( testRelation2.select( 'a / Literal(2) as 'div1, 'a / 'b as 'div2, 'a / 'c as 'div3, 'a / 'd as 'div4, 'e / 'e as 'div5)) val pl = plan.asInstanceOf[Project].projectList assert(pl(0).dataType == DoubleType) assert(pl(1).dataType == DoubleType) assert(pl(2).dataType == DoubleType) assert(pl(3).dataType == DoubleType) assert(pl(4).dataType == DoubleType) } test("pull out nondeterministic expressions from RepartitionByExpression") { val plan = RepartitionByExpression(Seq(Rand(33)), testRelation, numPartitions = 10) val projected = Alias(Rand(33), "_nondeterministic")() val expected = Project(testRelation.output, RepartitionByExpression(Seq(projected.toAttribute), Project(testRelation.output :+ projected, testRelation), numPartitions = 10)) checkAnalysis(plan, expected) } test("pull out nondeterministic expressions from Sort") { val plan = Sort(Seq(SortOrder(Rand(33), Ascending)), false, testRelation) val projected = Alias(Rand(33), "_nondeterministic")() val expected = Project(testRelation.output, Sort(Seq(SortOrder(projected.toAttribute, Ascending)), false, Project(testRelation.output :+ projected, testRelation))) checkAnalysis(plan, expected) } test("SPARK-9634: cleanup unnecessary Aliases in LogicalPlan") { val a = testRelation.output.head var plan = testRelation.select(((a + 1).as("a+1") + 2).as("col")) var expected = testRelation.select((a + 1 + 2).as("col")) checkAnalysis(plan, expected) plan = testRelation.groupBy(a.as("a1").as("a2"))((min(a).as("min_a") + 1).as("col")) expected = testRelation.groupBy(a)((min(a) + 1).as("col")) checkAnalysis(plan, expected) // CreateStruct is a special case that we should not trim Alias for it. plan = testRelation.select(CreateStruct(Seq(a, (a + 1).as("a+1"))).as("col")) expected = testRelation.select(CreateNamedStruct(Seq( Literal(a.name), a, Literal("a+1"), (a + 1))).as("col")) checkAnalysis(plan, expected) } test("Analysis may leave unnecassary aliases") { val att1 = testRelation.output.head var plan = testRelation.select( CreateStruct(Seq(att1, ((att1.as("aa")) + 1).as("a_plus_1"))).as("col"), att1 ) val prevPlan = getAnalyzer(true).execute(plan) plan = prevPlan.select(CreateArray(Seq( CreateStruct(Seq(att1, (att1 + 1).as("a_plus_1"))).as("col1"), /** alias should be eliminated by [[CleanupAliases]] */ "col".attr.as("col2") )).as("arr")) plan = getAnalyzer(true).execute(plan) val expectedPlan = prevPlan.select( CreateArray(Seq( CreateNamedStruct(Seq( Literal(att1.name), att1, Literal("a_plus_1"), (att1 + 1))), 'col.struct(prevPlan.output(0).dataType.asInstanceOf[StructType]).notNull )).as("arr") ) checkAnalysis(plan, expectedPlan) } test("SPARK-10534: resolve attribute references in order by clause") { val a = testRelation2.output(0) val c = testRelation2.output(2) val plan = testRelation2.select('c).orderBy(Floor('a).asc) val expected = testRelation2.select(c, a) .orderBy(Floor(Cast(a, LongType, Option(TimeZone.getDefault().getID))).asc).select(c) checkAnalysis(plan, expected) } test("self intersect should resolve duplicate expression IDs") { val plan = testRelation.intersect(testRelation) assertAnalysisSuccess(plan) } test("SPARK-8654: invalid CAST in NULL IN(...) expression") { val plan = Project(Alias(In(Literal(null), Seq(Literal(1), Literal(2))), "a")() :: Nil, LocalRelation() ) assertAnalysisSuccess(plan) } test("SPARK-8654: different types in inlist but can be converted to a common type") { val plan = Project(Alias(In(Literal(null), Seq(Literal(1), Literal(1.2345))), "a")() :: Nil, LocalRelation() ) assertAnalysisSuccess(plan) } test("SPARK-8654: check type compatibility error") { val plan = Project(Alias(In(Literal(null), Seq(Literal(true), Literal(1))), "a")() :: Nil, LocalRelation() ) assertAnalysisError(plan, Seq("data type mismatch: Arguments must be same type")) } test("SPARK-11725: correctly handle null inputs for ScalaUDF") { val string = testRelation2.output(0) val double = testRelation2.output(2) val short = testRelation2.output(4) val nullResult = Literal.create(null, StringType) def checkUDF(udf: Expression, transformed: Expression): Unit = { checkAnalysis( Project(Alias(udf, "")() :: Nil, testRelation2), Project(Alias(transformed, "")() :: Nil, testRelation2) ) } // non-primitive parameters do not need special null handling val udf1 = ScalaUDF((s: String) => "x", StringType, string :: Nil) val expected1 = udf1 checkUDF(udf1, expected1) // only primitive parameter needs special null handling val udf2 = ScalaUDF((s: String, d: Double) => "x", StringType, string :: double :: Nil) val expected2 = If(IsNull(double), nullResult, udf2) checkUDF(udf2, expected2) // special null handling should apply to all primitive parameters val udf3 = ScalaUDF((s: Short, d: Double) => "x", StringType, short :: double :: Nil) val expected3 = If( IsNull(short) || IsNull(double), nullResult, udf3) checkUDF(udf3, expected3) // we can skip special null handling for primitive parameters that are not nullable // TODO: this is disabled for now as we can not completely trust `nullable`. val udf4 = ScalaUDF( (s: Short, d: Double) => "x", StringType, short :: double.withNullability(false) :: Nil) val expected4 = If( IsNull(short), nullResult, udf4) // checkUDF(udf4, expected4) } test("SPARK-11863 mixture of aliases and real columns in order by clause - tpcds 19,55,71") { val a = testRelation2.output(0) val c = testRelation2.output(2) val alias1 = a.as("a1") val alias2 = c.as("a2") val alias3 = count(a).as("a3") val plan = testRelation2 .groupBy('a, 'c)('a.as("a1"), 'c.as("a2"), count('a).as("a3")) .orderBy('a1.asc, 'c.asc) val expected = testRelation2 .groupBy(a, c)(alias1, alias2, alias3) .orderBy(alias1.toAttribute.asc, alias2.toAttribute.asc) .select(alias1.toAttribute, alias2.toAttribute, alias3.toAttribute) checkAnalysis(plan, expected) } test("Eliminate the unnecessary union") { val plan = Union(testRelation :: Nil) val expected = testRelation checkAnalysis(plan, expected) } test("SPARK-12102: Ignore nullablity when comparing two sides of case") { val relation = LocalRelation('a.struct('x.int), 'b.struct('x.int.withNullability(false))) val plan = relation.select(CaseWhen(Seq((Literal(true), 'a.attr)), 'b).as("val")) assertAnalysisSuccess(plan) } test("Keep attribute qualifiers after dedup") { val input = LocalRelation('key.int, 'value.string) val query = Project(Seq($"x.key", $"y.key"), Join( Project(Seq($"x.key"), SubqueryAlias("x", input)), Project(Seq($"y.key"), SubqueryAlias("y", input)), Cross, None)) assertAnalysisSuccess(query) } private def assertExpressionType( expression: Expression, expectedDataType: DataType): Unit = { val afterAnalyze = Project(Seq(Alias(expression, "a")()), OneRowRelation).analyze.expressions.head if (!afterAnalyze.dataType.equals(expectedDataType)) { fail( s""" |data type of expression $expression doesn't match expected: |Actual data type: |${afterAnalyze.dataType} | |Expected data type: |${expectedDataType} """.stripMargin) } } test("SPARK-15776: test whether Divide expression's data type can be deduced correctly by " + "analyzer") { assertExpressionType(sum(Divide(1, 2)), DoubleType) assertExpressionType(sum(Divide(1.0, 2)), DoubleType) assertExpressionType(sum(Divide(1, 2.0)), DoubleType) assertExpressionType(sum(Divide(1.0, 2.0)), DoubleType) assertExpressionType(sum(Divide(1, 2.0f)), DoubleType) assertExpressionType(sum(Divide(1.0f, 2)), DoubleType) assertExpressionType(sum(Divide(1, Decimal(2))), DecimalType(31, 11)) assertExpressionType(sum(Divide(Decimal(1), 2)), DecimalType(31, 11)) assertExpressionType(sum(Divide(Decimal(1), 2.0)), DoubleType) assertExpressionType(sum(Divide(1.0, Decimal(2.0))), DoubleType) } test("SPARK-18058: union and set operations shall not care about the nullability" + " when comparing column types") { val firstTable = LocalRelation( AttributeReference("a", StructType(Seq(StructField("a", IntegerType, nullable = true))), nullable = false)()) val secondTable = LocalRelation( AttributeReference("a", StructType(Seq(StructField("a", IntegerType, nullable = false))), nullable = false)()) val unionPlan = Union(firstTable, secondTable) assertAnalysisSuccess(unionPlan) val r1 = Except(firstTable, secondTable) val r2 = Intersect(firstTable, secondTable) assertAnalysisSuccess(r1) assertAnalysisSuccess(r2) } test("resolve as with an already existed alias") { checkAnalysis( Project(Seq(UnresolvedAttribute("tbl2.a")), SubqueryAlias("tbl", testRelation).as("tbl2")), Project(testRelation.output, testRelation), caseSensitive = false) checkAnalysis(SubqueryAlias("tbl", testRelation).as("tbl2"), testRelation) } test("SPARK-20311 range(N) as alias") { def rangeWithAliases(args: Seq[Int], outputNames: Seq[String]): LogicalPlan = { SubqueryAlias("t", UnresolvedTableValuedFunction("range", args.map(Literal(_)), outputNames)) .select(star()) } assertAnalysisSuccess(rangeWithAliases(3 :: Nil, "a" :: Nil)) assertAnalysisSuccess(rangeWithAliases(1 :: 4 :: Nil, "b" :: Nil)) assertAnalysisSuccess(rangeWithAliases(2 :: 6 :: 2 :: Nil, "c" :: Nil)) assertAnalysisError( rangeWithAliases(3 :: Nil, "a" :: "b" :: Nil), Seq("expected 1 columns but found 2 columns")) } }
setjet/spark
sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
Scala
apache-2.0
16,566
/*********************************************************************** * Copyright (c) 2013-2019 Commonwealth Computer Research, Inc. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Apache License, Version 2.0 * which accompanies this distribution and is available at * http://www.opensource.org/licenses/apache2.0.php. ***********************************************************************/ package org.locationtech.geomesa.process.query import com.typesafe.scalalogging.LazyLogging import org.geotools.data.Query import org.geotools.data.collection.ListFeatureCollection import org.geotools.data.simple.{SimpleFeatureCollection, SimpleFeatureSource} import org.geotools.process.factory.{DescribeParameter, DescribeProcess, DescribeResult} import org.locationtech.geomesa.features.{ScalaSimpleFeature, TransformSimpleFeature} import org.locationtech.geomesa.filter.factory.FastFilterFactory import org.locationtech.geomesa.index.geotools.GeoMesaFeatureCollection import org.locationtech.geomesa.index.planning.QueryPlanner import org.locationtech.geomesa.process.{FeatureResult, GeoMesaProcess, GeoMesaProcessVisitor} import org.opengis.feature.Feature import org.opengis.feature.simple.SimpleFeature import org.opengis.filter.Filter @DescribeProcess( title = "Geomesa Query", description = "Performs a Geomesa optimized query using spatiotemporal indexes" ) class QueryProcess extends GeoMesaProcess with LazyLogging { @DescribeResult(description = "Output feature collection") def execute( @DescribeParameter( name = "features", description = "The feature set on which to query") features: SimpleFeatureCollection, @DescribeParameter( name = "filter", min = 0, description = "The filter to apply to the feature collection") filter: Filter, @DescribeParameter( name = "properties", min = 0, max = 128, collectionType = classOf[String], description = "The properties/transforms to apply to the feature collection") properties: java.util.List[String] = null ): SimpleFeatureCollection = { logger.debug("Attempting Geomesa query on type " + features.getClass.getName) val propsArray = Option(properties).map(_.toArray(Array.empty[String])).filter(_.length > 0).orNull val visitor = new QueryVisitor(features, Option(filter).getOrElse(Filter.INCLUDE), propsArray) GeoMesaFeatureCollection.visit(features, visitor) visitor.getResult.results } } class QueryVisitor(features: SimpleFeatureCollection, filter: Filter, properties: Array[String]) extends GeoMesaProcessVisitor with LazyLogging { private val (sft, transformFeature) = if (properties == null) { (features.getSchema, null) } else { val original = features.getSchema val (transforms, transformSft) = QueryPlanner.buildTransformSFT(original, properties) val transformSf = TransformSimpleFeature(original, transformSft, transforms) (transformSft, transformSf) } private val retype: (SimpleFeature) => SimpleFeature = if (transformFeature == null) { (sf) => sf } else { (sf) => { transformFeature.setFeature(sf) ScalaSimpleFeature.create(transformFeature.getFeatureType, transformFeature) } } // normally handled in our query planner, but we are going to use the filter directly here private lazy val manualFilter = FastFilterFactory.optimize(features.getSchema, filter) private val manualVisitResults = new ListFeatureCollection(sft) private var resultCalc = FeatureResult(manualVisitResults) // non-optimized visit override def visit(feature: Feature): Unit = { val sf = feature.asInstanceOf[SimpleFeature] if (manualFilter.evaluate(sf)) { manualVisitResults.add(retype(sf)) } } override def getResult: FeatureResult = resultCalc override def execute(source: SimpleFeatureSource, query: Query): Unit = { logger.debug(s"Running Geomesa query on source type ${source.getClass.getName}") query.setFilter(org.locationtech.geomesa.filter.mergeFilters(query.getFilter, filter)) if (properties != null && properties.length > 0) { if (query.getProperties != Query.ALL_PROPERTIES) { logger.warn(s"Overriding inner query's properties (${query.getProperties}) " + s"with properties/transforms ${properties.mkString(",")}.") } query.setPropertyNames(properties) } resultCalc = FeatureResult(source.getFeatures(query)) } }
elahrvivaz/geomesa
geomesa-process/geomesa-process-vector/src/main/scala/org/locationtech/geomesa/process/query/QueryProcess.scala
Scala
apache-2.0
4,724
package com.socrata.datacoordinator.truth.loader import scala.{collection => sc} import gnu.trove.map.hash.TIntObjectHashMap import com.socrata.datacoordinator.util.TIntObjectHashMapWrapper /** Accumulate a report from a `Loader`. These * reports can be large; this is provided to allow * them to be spooled to disk somewhere. * * @note These must safe to access concurrently * from multiple threads before the loader's * `finishe()` method is called. */ trait ReportWriter[-CV] { def inserted(job: Int, result: IdAndVersion[CV]): Unit def updated(job: Int, result: IdAndVersion[CV]): Unit def deleted(job: Int, result: CV): Unit def error(job: Int, result: Failure[CV]): Unit var finished: Boolean = false } class SimpleReportWriter[CV] extends ReportWriter[CV] { private val insertedMap = new TIntObjectHashMap[IdAndVersion[CV]] private val updatedMap = new TIntObjectHashMap[IdAndVersion[CV]] private val deletedMap = new TIntObjectHashMap[CV] private val errorMap = new TIntObjectHashMap[Failure[CV]] def inserted(job: Int, result: IdAndVersion[CV]) { insertedMap.synchronized { insertedMap.put(job, result) } } def updated(job: Int, result: IdAndVersion[CV]) { updatedMap.synchronized { updatedMap.put(job, result) } } def deleted(job: Int, result: CV) { deletedMap.synchronized { deletedMap.put(job, result) } } def error(job: Int, result: Failure[CV]) { errorMap.synchronized { errorMap.put(job, result) } } /* This must be called only after the loader is `finish()`ed */ def report: Report[CV] = { assert(finished, "report() called without being finished first") def w[T](x: TIntObjectHashMap[T]) = TIntObjectHashMapWrapper(x) JobReport(w(insertedMap), w(updatedMap), w(deletedMap), w(errorMap)) } private case class JobReport(inserted: sc.Map[Int, IdAndVersion[CV]], updated: sc.Map[Int, IdAndVersion[CV]], deleted: sc.Map[Int, CV], errors: sc.Map[Int, Failure[CV]]) extends Report[CV] } object NoopReportWriter extends ReportWriter[Any] { def inserted(job: Int, result: IdAndVersion[Any]) {} def updated(job: Int, result: IdAndVersion[Any]) {} def deleted(job: Int, result: Any) {} def error(job: Int, result: Failure[Any]) {} }
socrata-platform/data-coordinator
coordinatorlib/src/main/scala/com/socrata/datacoordinator/truth/loader/ReportWriter.scala
Scala
apache-2.0
2,255
package dbservice import play.api.Logger import pwguard.global.Globals.ExecutionContexts.DB._ import models.User import scala.concurrent.Future import scala.util.Try /** DAO for interacting with User objects. */ class UserDAO(_dal: DAL, _logger: Logger) extends BaseDAO[User](_dal, _logger) { import dal.profile.simple._ import dal.{UsersTable, Users} private type UserQuery = Query[UsersTable, User, Seq] // -------------------------------------------------------------------------- // Public methods // ------------------------------------------------------------------------ /** Get all users. * * @return `Future(Set[User])` */ def getAll: Future[Set[User]] = { withTransaction { implicit session => loadMany( for { u <- Users } yield u ) } } /** Get all users, with a count of the number of passwords each one has. * * @return a future of tuples, with each user paired with a password count */ def getAllWithPasswordCounts: Future[Seq[(User, Int)]] = { withSession { implicit session => getAll flatMap { users => DAO.passwordEntryDAO.totalsForUsers(users) } } } /** Find all users with the specified IDs. * @param idSet the IDs * * @return `Future(Set[model])` */ def findByIDs(idSet: Set[Int]): Future[Set[User]] = { withTransaction { implicit session => loadMany( for { u <- Users if u.id inSet idSet } yield u ) } } /** Find a user by email address. * * @param email the email address * * @return `Future(None)` if no such user exists; * `Future(Some(user))` if the user is found. */ def findByEmail(email: String): Future[Option[User]] = { withTransaction { implicit session => loadOneModel( for {u <- Users if u.email === email } yield u ) } } /** Create a user instance, erroring out if it already exists. * * @param user the user object to create * * @return `Future(user)`, with a possibly changed model object */ def create(user: User): Future[User] = { withTransaction { implicit session: SlickSession => findByEmail(user.email).flatMap { userOpt => userOpt.map { u => daoError(s"Email ${u.email} is taken.") } .getOrElse { save(user) } } } } // -------------------------------------------------------------------------- // Protected methods // ------------------------------------------------------------------------ protected def queryByID(id: Int): UserQuery = { for {u <- Users if u.id === id } yield u } protected val baseQuery = Users protected def insert(user: User)(implicit session: SlickSession): Try[User] = { doInsert(user) map { id => user.copy(id = Some(id)) } } protected def update(user: User)(implicit session: SlickSession): Try[User] = { Try { val q = for { u <- Users if u.id === user.id.get } yield (u.email, u.encryptedPassword, u.pwEntryEncryptionKey, u.firstName, u.lastName, u.active, u.admin) q.update((user.email, user.encryptedPassword, user.pwEntryEncryptionKeyString, user.firstName, user.lastName, user.active, user.admin)) user } } // -------------------------------------------------------------------------- // Private methods // ------------------------------------------------------------------------ private def checkUser(user: User): Future[User] = { Future.successful(user) } private def loadMany(query: UserQuery)(implicit session: SlickSession): Future[Set[User]] = { Future { query.list.toSet } } }
bmc/pwguard
app/dbservice/UserDAO.scala
Scala
bsd-3-clause
3,742
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cloudera.spark.cloud.operations import java.io.IOException import java.net.URI import com.cloudera.spark.cloud.utils.ConfigSerDeser import com.cloudera.spark.cloud.ObjectStoreExample import org.apache.hadoop.fs.Path import org.apache.hadoop.util.PureJavaCrc32 import org.apache.spark.{SparkConf, SparkContext} /** * Generate files containing some numbers in the remote repository. */ class CloudFileGenerator extends ObjectStoreExample { /** * List of the command args for the current example. * @return a string */ override protected def usageArgs(): String = { "<dest> <months> <files-per-month> <row-count>" } /** * Generate a file containing some numbers in the remote repository. * @param sparkConf configuration to use * @param args argument array; the first argument must be the destination filename. * @return an exit code */ override def action(sparkConf: SparkConf, args: Array[String]): Int = { val l = args.length if (l < 1) { // wrong number of arguments return usage() } val dest = args(0) val monthCount = intArg(args, 1, 1) val fileCount = intArg(args, 2, 1) val rowCount = longArg(args, 3, 1000) applyObjectStoreConfigurationOptions(sparkConf, false) val sc = new SparkContext(sparkConf) try { val suffix = ".txt" val destURI = new URI(dest) val destPath = new Path(destURI) val months = 1 to monthCount // list of (YYYY, 1), (YYYY, 2), ... val monthsByYear = months.flatMap(m => months.map(m => (2016 + m / 12, m % 12)) ) val filePerMonthRange = 1 to fileCount // build paths like 2016/2016-05/2016-05-0012.txt val filepaths = monthsByYear.flatMap { case (year, month) => filePerMonthRange.map(ranger => "%1$04d/%1$04d-%2$02d/%1$04d-%2$02d-%3$04d".format(year, month, ranger) + suffix ) }.map(new Path(destPath, _)) val fileURIs = filepaths.map(_.toUri) val destFS = destPath.getFileSystem(sc.hadoopConfiguration) // create the parent directories or fail rm(destFS, destPath) destFS.mkdirs(destPath.getParent()) val configSerDeser = new ConfigSerDeser(sc.hadoopConfiguration) // RDD to save the text to every path in the files RDD, returning path and // the time it took val filesRDD = sc.parallelize(fileURIs) val putDataRDD = filesRDD.map(uri => { val jobDest = new Path(uri) val hc = configSerDeser.get() val fs = jobDest.getFileSystem(hc) var written = 0 val crc = new PureJavaCrc32 val executionTime = time { val out = fs.create(jobDest, true) var row = 0 while (row < rowCount) { row += 1 val line = "%08x\\n".format(row).getBytes out.write(line) written += line.length crc.update(line, 0, line.length) } out.close() logInfo(s"File System = $fs") } (jobDest.toUri, written, crc.getValue, executionTime) }).cache() logInfo(s"Initial File System state = $destFS") // Trigger the evaluations of the RDDs val (executionResults, collectionTime) = durationOf { putDataRDD.collect() } // use the length of the first file as the length of all of them val expectedFileLength: Long = executionResults(0)._2 val execTimeRDD = putDataRDD.map(_._2) val aggregatedExecutionTime = execTimeRDD.sum().toLong logInfo(s"Time to generate ${filesRDD.count()} entries ${toHuman(collectionTime)}") logInfo(s"Aggregate execution time ${toHuman(aggregatedExecutionTime)}") logInfo(s"File System = $destFS") // list all files under the path using listFiles; verify size val (listing, listDuration) = durationOf(destFS.listFiles(destPath, true)) logInfo(s"time to list paths under $destPath: $listDuration") while (listing.hasNext) { val entry = listing.next() verifyLength(entry.getPath.toString, expectedFileLength, entry.getLen) } // do a parallel scan of a directory and count the entries val lenAccumulator = sc.longAccumulator("totalsize") val dataGlobPath = new Path(destPath, "*/*/*" + suffix) val fileContentRDD = sc.wholeTextFiles(dataGlobPath.toUri.toString) val fileSizeRdd = fileContentRDD.map(record => { val actual = record._2.length val name = record._1 verifyLength(name, expectedFileLength, actual) lenAccumulator.add(actual) actual }) logDuration("Verify the length of all the files")(fileSizeRdd.count()) } finally { logInfo("Stopping Spark Context") sc.stop() } 0 } // verify that the length of a listed file is that expected def verifyLength(name: String, expectedFileLength: Long, actual: Long): Unit = { if (expectedFileLength != actual) { throw new IOException( s"Expected length of ${name}: $expectedFileLength;" + s" actual $actual") } } } object CloudFileGenerator { def main(args: Array[String]) { new CloudFileGenerator().run(args) } }
hortonworks-spark/cloud-integration
cloud-examples/src/main/scala/com/cloudera/spark/cloud/operations/CloudFileGenerator.scala
Scala
apache-2.0
6,012
/** * Copyright (c) 2015, Cloudera, Inc. All Rights Reserved. * * Cloudera, Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"). You may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for * the specific language governing permissions and limitations under the * License. */ package com.cloudera.sparkts import breeze.linalg._ import breeze.plot._ import org.apache.commons.math3.distribution.NormalDistribution object EasyPlot { def ezplot(vec: Vector[Double], style: Char): Unit = { val f = Figure() val p = f.subplot(0) p += plot((0 until vec.length).map(_.toDouble).toArray, vec, style = style) } def ezplot(vec: Vector[Double]): Unit = ezplot(vec, '-') def ezplot(arr: Array[Double], style: Char): Unit = { val f = Figure() val p = f.subplot(0) p += plot(arr.indices.map(_.toDouble).toArray, arr, style = style) } def ezplot(arr: Array[Double]): Unit = ezplot(arr, '-') def ezplot(vecs: Seq[Vector[Double]], style: Char): Unit = { val f = Figure() val p = f.subplot(0) val first = vecs.head vecs.foreach { vec => p += plot((0 until first.length).map(_.toDouble).toArray, vec, style) } } def ezplot(vecs: Seq[Vector[Double]]): Unit = ezplot(vecs, '-') /** * Autocorrelation function plot * @param data array of data to analyze * @param maxLag maximum lag for autocorrelation * @param conf confidence bounds to display */ def acfPlot(data: Array[Double], maxLag: Int, conf: Double = 0.95): Unit = { // calculate correlations and confidence bound val autoCorrs = UnivariateTimeSeries.autocorr(data, maxLag) val confVal = calcConfVal(conf, data.length) // Basic plot information val f = Figure() val p = f.subplot(0) p.title = "Autocorrelation function" p.xlabel = "Lag" p.ylabel = "Autocorrelation" drawCorrPlot(autoCorrs, confVal, p) } /** * Partial autocorrelation function plot * @param data array of data to analyze * @param maxLag maximum lag for partial autocorrelation function * @param conf confidence bounds to display */ def pacfPlot(data: Array[Double], maxLag: Int, conf: Double = 0.95): Unit = { // create AR(maxLag) model, retrieve coefficients and calculate confidence bound val model = Autoregression.fitModel(new DenseVector(data), maxLag) val pCorrs = model.coefficients // partial autocorrelations are the coefficients in AR(n) model val confVal = calcConfVal(conf, data.length) // Basic plot information val f = Figure() val p = f.subplot(0) p.title = "Partial autocorrelation function" p.xlabel = "Lag" p.ylabel = "Partial Autocorrelation" drawCorrPlot(pCorrs, confVal, p) } private[sparkts] def calcConfVal(conf:Double, n: Int): Double = { val stdNormDist = new NormalDistribution(0, 1) val pVal = (1 - conf) / 2.0 stdNormDist.inverseCumulativeProbability(1 - pVal) / Math.sqrt(n) } private[sparkts] def drawCorrPlot(corrs: Array[Double], confVal: Double, p: Plot): Unit = { // make decimal ticks visible p.setYAxisDecimalTickUnits() // plot correlations as vertical lines val verticalLines = corrs.zipWithIndex.map { case (corr, ix) => (Array(ix.toDouble + 1, ix.toDouble + 1), Array(0, corr)) } verticalLines.foreach { case (xs, ys) => p += plot(xs, ys) } // plot confidence intervals as horizontal lines val n = corrs.length Array(confVal, -1 * confVal).foreach { conf => val xs = (0 to n).toArray.map(_.toDouble) val ys = Array.fill(n + 1)(conf) p += plot(xs, ys, '-', colorcode = "red") } } }
linearregression/spark-timeseries
src/main/scala/com/cloudera/sparkts/EasyPlot.scala
Scala
apache-2.0
3,910
package gitbucket.core.service import java.util.Date import gitbucket.core.model.Account import gitbucket.core.util._ import gitbucket.core.util.ControlUtil._ import org.eclipse.jgit.api.Git import org.eclipse.jgit.treewalk.CanonicalTreeParser import org.eclipse.jgit.lib._ import org.eclipse.jgit.dircache.DirCache import org.eclipse.jgit.diff.{DiffEntry, DiffFormatter} import java.io.ByteArrayInputStream import org.eclipse.jgit.patch._ import org.eclipse.jgit.api.errors.PatchFormatException import scala.collection.JavaConverters._ import RepositoryService.RepositoryInfo object WikiService { /** * The model for wiki page. * * @param name the page name * @param content the page content * @param committer the last committer * @param time the last modified time * @param id the latest commit id */ case class WikiPageInfo(name: String, content: String, committer: String, time: Date, id: String) /** * The model for wiki page history. * * @param name the page name * @param committer the committer the committer * @param message the commit message * @param date the commit date */ case class WikiPageHistoryInfo(name: String, committer: String, message: String, date: Date) def httpUrl(repository: RepositoryInfo) = repository.httpUrl.replaceFirst("\\.git\\Z", ".wiki.git") def sshUrl(repository: RepositoryInfo, settings: SystemSettingsService.SystemSettings, userName: String) = repository.sshUrl(settings.sshPort.getOrElse(SystemSettingsService.DefaultSshPort), userName).replaceFirst("\\.git\\Z", ".wiki.git") } trait WikiService { import WikiService._ def createWikiRepository(loginAccount: Account, owner: String, repository: String): Unit = LockUtil.lock(s"${owner}/${repository}/wiki"){ defining(Directory.getWikiRepositoryDir(owner, repository)){ dir => if(!dir.exists){ JGitUtil.initRepository(dir) saveWikiPage(owner, repository, "Home", "Home", s"Welcome to the ${repository} wiki!!", loginAccount, "Initial Commit", None) } } } /** * Returns the wiki page. */ def getWikiPage(owner: String, repository: String, pageName: String): Option[WikiPageInfo] = { using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git => if(!JGitUtil.isEmpty(git)){ JGitUtil.getFileList(git, "master", ".").find(_.name == pageName + ".md").map { file => WikiPageInfo(file.name, StringUtil.convertFromByteArray(git.getRepository.open(file.id).getBytes), file.author, file.time, file.commitId) } } else None } } /** * Returns the content of the specified file. */ def getFileContent(owner: String, repository: String, path: String): Option[Array[Byte]] = using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git => if(!JGitUtil.isEmpty(git)){ val index = path.lastIndexOf('/') val parentPath = if(index < 0) "." else path.substring(0, index) val fileName = if(index < 0) path else path.substring(index + 1) JGitUtil.getFileList(git, "master", parentPath).find(_.name == fileName).map { file => git.getRepository.open(file.id).getBytes } } else None } /** * Returns the list of wiki page names. */ def getWikiPageList(owner: String, repository: String): List[String] = { using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git => JGitUtil.getFileList(git, "master", ".") .filter(_.name.endsWith(".md")).filterNot(_.name.startsWith("_")) .map(_.name.stripSuffix(".md")) .sortBy(x => x) } } /** * Reverts specified changes. */ def revertWikiPage(owner: String, repository: String, from: String, to: String, committer: Account, pageName: Option[String]): Boolean = { case class RevertInfo(operation: String, filePath: String, source: String) try { LockUtil.lock(s"${owner}/${repository}/wiki"){ using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git => val reader = git.getRepository.newObjectReader val oldTreeIter = new CanonicalTreeParser oldTreeIter.reset(reader, git.getRepository.resolve(from + "^{tree}")) val newTreeIter = new CanonicalTreeParser newTreeIter.reset(reader, git.getRepository.resolve(to + "^{tree}")) val diffs = git.diff.setNewTree(oldTreeIter).setOldTree(newTreeIter).call.asScala.filter { diff => pageName match { case Some(x) => diff.getNewPath == x + ".md" case None => true } } val patch = using(new java.io.ByteArrayOutputStream()){ out => val formatter = new DiffFormatter(out) formatter.setRepository(git.getRepository) formatter.format(diffs.asJava) new String(out.toByteArray, "UTF-8") } val p = new Patch() p.parse(new ByteArrayInputStream(patch.getBytes("UTF-8"))) if(!p.getErrors.isEmpty){ throw new PatchFormatException(p.getErrors()) } val revertInfo = (p.getFiles.asScala.map { fh => fh.getChangeType match { case DiffEntry.ChangeType.MODIFY => { val source = getWikiPage(owner, repository, fh.getNewPath.stripSuffix(".md")).map(_.content).getOrElse("") val applied = PatchUtil.apply(source, patch, fh) if(applied != null){ Seq(RevertInfo("ADD", fh.getNewPath, applied)) } else Nil } case DiffEntry.ChangeType.ADD => { val applied = PatchUtil.apply("", patch, fh) if(applied != null){ Seq(RevertInfo("ADD", fh.getNewPath, applied)) } else Nil } case DiffEntry.ChangeType.DELETE => { Seq(RevertInfo("DELETE", fh.getNewPath, "")) } case DiffEntry.ChangeType.RENAME => { val applied = PatchUtil.apply("", patch, fh) if(applied != null){ Seq(RevertInfo("DELETE", fh.getOldPath, ""), RevertInfo("ADD", fh.getNewPath, applied)) } else { Seq(RevertInfo("DELETE", fh.getOldPath, "")) } } case _ => Nil } }).flatten if(revertInfo.nonEmpty){ val builder = DirCache.newInCore.builder() val inserter = git.getRepository.newObjectInserter() val headId = git.getRepository.resolve(Constants.HEAD + "^{commit}") JGitUtil.processTree(git, headId){ (path, tree) => if(revertInfo.find(x => x.filePath == path).isEmpty){ builder.add(JGitUtil.createDirCacheEntry(path, tree.getEntryFileMode, tree.getEntryObjectId)) } } revertInfo.filter(_.operation == "ADD").foreach { x => builder.add(JGitUtil.createDirCacheEntry(x.filePath, FileMode.REGULAR_FILE, inserter.insert(Constants.OBJ_BLOB, x.source.getBytes("UTF-8")))) } builder.finish() JGitUtil.createNewCommit(git, inserter, headId, builder.getDirCache.writeTree(inserter), Constants.HEAD, committer.fullName, committer.mailAddress, pageName match { case Some(x) => s"Revert ${from} ... ${to} on ${x}" case None => s"Revert ${from} ... ${to}" }) } } } true } catch { case e: Exception => { e.printStackTrace() false } } } /** * Save the wiki page and return the commit id. */ def saveWikiPage(owner: String, repository: String, currentPageName: String, newPageName: String, content: String, committer: Account, message: String, currentId: Option[String]): Option[String] = { LockUtil.lock(s"${owner}/${repository}/wiki"){ using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git => val builder = DirCache.newInCore.builder() val inserter = git.getRepository.newObjectInserter() val headId = git.getRepository.resolve(Constants.HEAD + "^{commit}") var created = true var updated = false var removed = false if(headId != null){ JGitUtil.processTree(git, headId){ (path, tree) => if(path == currentPageName + ".md" && currentPageName != newPageName){ removed = true } else if(path != newPageName + ".md"){ builder.add(JGitUtil.createDirCacheEntry(path, tree.getEntryFileMode, tree.getEntryObjectId)) } else { created = false updated = JGitUtil.getContentFromId(git, tree.getEntryObjectId, true).map(new String(_, "UTF-8") != content).getOrElse(false) } } } if(created || updated || removed){ builder.add(JGitUtil.createDirCacheEntry(newPageName + ".md", FileMode.REGULAR_FILE, inserter.insert(Constants.OBJ_BLOB, content.getBytes("UTF-8")))) builder.finish() val newHeadId = JGitUtil.createNewCommit(git, inserter, headId, builder.getDirCache.writeTree(inserter), Constants.HEAD, committer.fullName, committer.mailAddress, if(message.trim.length == 0) { if(removed){ s"Rename ${currentPageName} to ${newPageName}" } else if(created){ s"Created ${newPageName}" } else { s"Updated ${newPageName}" } } else { message }) Some(newHeadId.getName) } else None } } } /** * Delete the wiki page. */ def deleteWikiPage(owner: String, repository: String, pageName: String, committer: String, mailAddress: String, message: String): Unit = { LockUtil.lock(s"${owner}/${repository}/wiki"){ using(Git.open(Directory.getWikiRepositoryDir(owner, repository))){ git => val builder = DirCache.newInCore.builder() val inserter = git.getRepository.newObjectInserter() val headId = git.getRepository.resolve(Constants.HEAD + "^{commit}") var removed = false JGitUtil.processTree(git, headId){ (path, tree) => if(path != pageName + ".md"){ builder.add(JGitUtil.createDirCacheEntry(path, tree.getEntryFileMode, tree.getEntryObjectId)) } else { removed = true } } if(removed){ builder.finish() JGitUtil.createNewCommit(git, inserter, headId, builder.getDirCache.writeTree(inserter), Constants.HEAD, committer, mailAddress, message) } } } } }
noc06140728/gitbucket
src/main/scala/gitbucket/core/service/WikiService.scala
Scala
apache-2.0
10,942
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.ml.optim import org.apache.spark.internal.Logging import org.apache.spark.ml.feature.Instance import org.apache.spark.ml.linalg._ import org.apache.spark.mllib.linalg.CholeskyDecomposition import org.apache.spark.rdd.RDD /** * Model fitted by [[WeightedLeastSquares]]. * @param coefficients model coefficients * @param intercept model intercept * @param diagInvAtWA diagonal of matrix (A^T * W * A)^-1 */ private[ml] class WeightedLeastSquaresModel( val coefficients: DenseVector, val intercept: Double, val diagInvAtWA: DenseVector) extends Serializable { def predict(features: Vector): Double = { BLAS.dot(coefficients, features) + intercept } } /** * Weighted least squares solver via normal equation. * Given weighted observations (w,,i,,, a,,i,,, b,,i,,), we use the following weighted least squares * formulation: * * min,,x,z,, 1/2 sum,,i,, w,,i,, (a,,i,,^T^ x + z - b,,i,,)^2^ / sum,,i,, w_i * + 1/2 lambda / delta sum,,j,, (sigma,,j,, x,,j,,)^2^, * * where lambda is the regularization parameter, and delta and sigma,,j,, are controlled by * [[standardizeLabel]] and [[standardizeFeatures]], respectively. * * Set [[regParam]] to 0.0 and turn off both [[standardizeFeatures]] and [[standardizeLabel]] to * match R's `lm`. * Turn on [[standardizeLabel]] to match R's `glmnet`. * * @param fitIntercept whether to fit intercept. If false, z is 0.0. * @param regParam L2 regularization parameter (lambda) * @param standardizeFeatures whether to standardize features. If true, sigma_,,j,, is the * population standard deviation of the j-th column of A. Otherwise, * sigma,,j,, is 1.0. * @param standardizeLabel whether to standardize label. If true, delta is the population standard * deviation of the label column b. Otherwise, delta is 1.0. */ private[ml] class WeightedLeastSquares( val fitIntercept: Boolean, val regParam: Double, val standardizeFeatures: Boolean, val standardizeLabel: Boolean) extends Logging with Serializable { import WeightedLeastSquares._ require(regParam >= 0.0, s"regParam cannot be negative: $regParam") if (regParam == 0.0) { logWarning("regParam is zero, which might cause numerical instability and overfitting.") } /** * Creates a [[WeightedLeastSquaresModel]] from an RDD of [[Instance]]s. */ def fit(instances: RDD[Instance]): WeightedLeastSquaresModel = { val summary = instances.treeAggregate(new Aggregator)(_.add(_), _.merge(_)) summary.validate() logInfo(s"Number of instances: ${summary.count}.") val k = if (fitIntercept) summary.k + 1 else summary.k val triK = summary.triK val wSum = summary.wSum val bBar = summary.bBar val bStd = summary.bStd val aBar = summary.aBar val aVar = summary.aVar val abBar = summary.abBar val aaBar = summary.aaBar val aaValues = aaBar.values if (bStd == 0) { if (fitIntercept) { logWarning(s"The standard deviation of the label is zero, so the coefficients will be " + s"zeros and the intercept will be the mean of the label; as a result, " + s"training is not needed.") val coefficients = new DenseVector(Array.ofDim(k-1)) val intercept = bBar val diagInvAtWA = new DenseVector(Array(0D)) return new WeightedLeastSquaresModel(coefficients, intercept, diagInvAtWA) } else { require(!(regParam > 0.0 && standardizeLabel), "The standard deviation of the label is zero. " + "Model cannot be regularized with standardization=true") logWarning(s"The standard deviation of the label is zero. " + "Consider setting fitIntercept=true.") } } // add regularization to diagonals var i = 0 var j = 2 while (i < triK) { var lambda = regParam if (standardizeFeatures) { lambda *= aVar(j - 2) } if (standardizeLabel && bStd != 0) { lambda /= bStd } aaValues(i) += lambda i += j j += 1 } val aa = if (fitIntercept) { Array.concat(aaBar.values, aBar.values, Array(1.0)) } else { aaBar.values } val ab = if (fitIntercept) { Array.concat(abBar.values, Array(bBar)) } else { abBar.values } val x = CholeskyDecomposition.solve(aa, ab) val aaInv = CholeskyDecomposition.inverse(aa, k) // aaInv is a packed upper triangular matrix, here we get all elements on diagonal val diagInvAtWA = new DenseVector((1 to k).map { i => aaInv(i + (i - 1) * i / 2 - 1) / wSum }.toArray) val (coefficients, intercept) = if (fitIntercept) { (new DenseVector(x.slice(0, x.length - 1)), x.last) } else { (new DenseVector(x), 0.0) } new WeightedLeastSquaresModel(coefficients, intercept, diagInvAtWA) } } private[ml] object WeightedLeastSquares { /** * In order to take the normal equation approach efficiently, [[WeightedLeastSquares]] * only supports the number of features is no more than 4096. */ val MAX_NUM_FEATURES: Int = 4096 /** * Aggregator to provide necessary summary statistics for solving [[WeightedLeastSquares]]. */ // TODO: consolidate aggregates for summary statistics private class Aggregator extends Serializable { var initialized: Boolean = false var k: Int = _ var count: Long = _ var triK: Int = _ var wSum: Double = _ private var wwSum: Double = _ private var bSum: Double = _ private var bbSum: Double = _ private var aSum: DenseVector = _ private var abSum: DenseVector = _ private var aaSum: DenseVector = _ private def init(k: Int): Unit = { require(k <= MAX_NUM_FEATURES, "In order to take the normal equation approach efficiently, " + s"we set the max number of features to $MAX_NUM_FEATURES but got $k.") this.k = k triK = k * (k + 1) / 2 count = 0L wSum = 0.0 wwSum = 0.0 bSum = 0.0 bbSum = 0.0 aSum = new DenseVector(Array.ofDim(k)) abSum = new DenseVector(Array.ofDim(k)) aaSum = new DenseVector(Array.ofDim(triK)) initialized = true } /** * Adds an instance. */ def add(instance: Instance): this.type = { val Instance(l, w, f) = instance val ak = f.size if (!initialized) { init(ak) } assert(ak == k, s"Dimension mismatch. Expect vectors of size $k but got $ak.") count += 1L wSum += w wwSum += w * w bSum += w * l bbSum += w * l * l BLAS.axpy(w, f, aSum) BLAS.axpy(w * l, f, abSum) BLAS.spr(w, f, aaSum) this } /** * Merges another [[Aggregator]]. */ def merge(other: Aggregator): this.type = { if (!other.initialized) { this } else { if (!initialized) { init(other.k) } assert(k == other.k, s"dimension mismatch: this.k = $k but other.k = ${other.k}") count += other.count wSum += other.wSum wwSum += other.wwSum bSum += other.bSum bbSum += other.bbSum BLAS.axpy(1.0, other.aSum, aSum) BLAS.axpy(1.0, other.abSum, abSum) BLAS.axpy(1.0, other.aaSum, aaSum) this } } /** * Validates that we have seen observations. */ def validate(): Unit = { assert(initialized, "Training dataset is empty.") assert(wSum > 0.0, "Sum of weights cannot be zero.") } /** * Weighted mean of features. */ def aBar: DenseVector = { val output = aSum.copy BLAS.scal(1.0 / wSum, output) output } /** * Weighted mean of labels. */ def bBar: Double = bSum / wSum /** * Weighted population standard deviation of labels. */ def bStd: Double = math.sqrt(bbSum / wSum - bBar * bBar) /** * Weighted mean of (label * features). */ def abBar: DenseVector = { val output = abSum.copy BLAS.scal(1.0 / wSum, output) output } /** * Weighted mean of (features * features^T^). */ def aaBar: DenseVector = { val output = aaSum.copy BLAS.scal(1.0 / wSum, output) output } /** * Weighted population variance of features. */ def aVar: DenseVector = { val variance = Array.ofDim[Double](k) var i = 0 var j = 2 val aaValues = aaSum.values while (i < triK) { val l = j - 2 val aw = aSum(l) / wSum variance(l) = aaValues(i) / wSum - aw * aw i += j j += 1 } new DenseVector(variance) } } }
gioenn/xSpark
mllib/src/main/scala/org/apache/spark/ml/optim/WeightedLeastSquares.scala
Scala
apache-2.0
9,523
/** * This file is part of the TA Buddy project. * Copyright (c) 2012-2014 Alexey Aksenov [email protected] * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Affero General Global License version 3 * as published by the Free Software Foundation with the addition of the * following permission added to Section 15 as permitted in Section 7(a): * FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED * BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS», * Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS * THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS. * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Affero General Global License for more details. * You should have received a copy of the GNU Affero General Global License * along with this program; if not, see http://www.gnu.org/licenses or write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA, 02110-1301 USA, or download the license from the following URL: * http://www.gnu.org/licenses/agpl.html * * The interactive user interfaces in modified source and object code versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Global License. * * In accordance with Section 7(b) of the GNU Affero General Global License, * you must retain the producer line in every report, form or document * that is created or manipulated using TA Buddy. * * You can be released from the requirements of the license by purchasing * a commercial license. Buying such a license is mandatory as soon as you * develop commercial activities involving the TA Buddy software without * disclosing the source code of your own applications. * These activities include: offering paid services to customers, * serving files in a web or/and network application, * shipping TA Buddy with a closed source product. * * For more information, please contact Digimead Team at this * address: [email protected] */ package org.digimead.tabuddy.desktop.model.definition.ui.dialog.eltemed import org.digimead.digi.lib.log.api.XLoggable import org.digimead.tabuddy.desktop.core.Messages import org.digimead.tabuddy.desktop.model.definition.Default import org.digimead.tabuddy.desktop.core.ui.support.{ SymbolValidator, Validator } import org.eclipse.core.databinding.observable.{ ChangeEvent, IChangeListener } import org.eclipse.jface.databinding.swt.WidgetProperties import org.eclipse.jface.viewers.{ CellEditor, CellLabelProvider, EditingSupport, TableViewer, TextCellEditor, ViewerCell } import org.eclipse.swt.SWT import org.eclipse.swt.events.VerifyEvent import org.eclipse.swt.graphics.Point import org.eclipse.swt.widgets.{ Composite, Control, Text } object ColumnId extends XLoggable { class TLabelProvider extends CellLabelProvider { override def update(cell: ViewerCell) = cell.getElement() match { case item: ElementTemplateEditor.Item ⇒ cell.setText(item.id) item.idError.foreach(err ⇒ cell.setImage(err._2)) case unknown ⇒ log.fatal("Unknown item " + unknown.getClass()) } override def getToolTipText(element: AnyRef): String = element match { case item: ElementTemplateEditor.Item ⇒ item.idError match { case Some(error) ⇒ error._1 case None ⇒ null } case unknown ⇒ log.fatal("Unknown item " + unknown.getClass()) null } override def getToolTipShift(obj: Object): Point = Default.toolTipShift override def getToolTipDisplayDelayTime(obj: Object): Int = Default.toolTipDisplayDelayTime override def getToolTipTimeDisplayed(obj: Object): Int = Default.toolTipTimeDisplayed } class TEditingSupport(viewer: TableViewer, container: ElementTemplateEditor) extends EditingSupport(viewer) { override protected def getCellEditor(element: AnyRef): CellEditor = new IdTextCellEditor(viewer.getTable(), element.asInstanceOf[ElementTemplateEditor.Item], container) override protected def canEdit(element: AnyRef): Boolean = true override protected def getValue(element: AnyRef): AnyRef = element match { case item: ElementTemplateEditor.Item ⇒ item.id case unknown ⇒ log.fatal("Unknown item " + unknown.getClass()) "" } override protected def setValue(element: AnyRef, value: AnyRef): Unit = element match { case before: ElementTemplateEditor.Item ⇒ val id = value.asInstanceOf[String].trim if (id.nonEmpty && before.id != id && !container.actualProperties.exists(_.id == id)) { val after = before.copy(id = id) container.updateActualProperty(before, container.validateItem(after)) } case unknown ⇒ log.fatal("Unknown item " + unknown.getClass()) } } class IdTextCellEditor(parent: Composite, item: ElementTemplateEditor.Item, container: ElementTemplateEditor) extends TextCellEditor(parent) { /** Creates the control for this cell editor under the given parent control. */ override def createControl(parent: Composite): Control = { val text = super.createControl(parent).asInstanceOf[Text] val validator = SymbolValidator(text, true)(validate) WidgetProperties.text(SWT.Modify).observe(text).addChangeListener(new IChangeListener() { override def handleChange(event: ChangeEvent) = { val newId = text.getText().trim if (newId.isEmpty()) validator.withDecoration(validator.showDecorationRequired(_)) else if (container.actualProperties.exists(_.id == newId) && newId != item.id) validator.withDecoration(validator.showDecorationError(_, Messages.identificatorIsAlreadyInUse_text.format(newId))) else validator.withDecoration(_.hide) } }) text } /** Validates an input */ def validate(validator: Validator[VerifyEvent], event: VerifyEvent) = if (!event.doit) validator.withDecoration(validator.showDecorationError(_)) else validator.withDecoration(_.hide) } }
digimead/digi-TABuddy-desktop
part-model-definition/src/main/scala/org/digimead/tabuddy/desktop/model/definition/ui/dialog/eltemed/ColumnId.scala
Scala
agpl-3.0
6,342
package com.tritondigital.consul.http.client class NoNodeException(service: String) extends RuntimeException { override def getMessage = s"""No node found for service $service""" }
tritondigital/ConsulHttpClient
src/main/scala/com/tritondigital/consul/http/client/NoNodeException.scala
Scala
mit
186
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.sql import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.catalyst.plans.{Inner, InnerLike, LeftOuter, RightOuter} import org.apache.spark.sql.catalyst.plans.logical.{BROADCAST, Filter, HintInfo, Join, JoinHint, LogicalPlan, Project} import org.apache.spark.sql.connector.catalog.CatalogManager import org.apache.spark.sql.execution.FileSourceScanExec import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper import org.apache.spark.sql.execution.datasources.LogicalRelation import org.apache.spark.sql.execution.exchange.BroadcastExchangeExec import org.apache.spark.sql.execution.joins.BroadcastHashJoinExec import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession class DataFrameJoinSuite extends QueryTest with SharedSparkSession with AdaptiveSparkPlanHelper { import testImplicits._ test("join - join using") { val df = Seq(1, 2, 3).map(i => (i, i.toString)).toDF("int", "str") val df2 = Seq(1, 2, 3).map(i => (i, (i + 1).toString)).toDF("int", "str") checkAnswer( df.join(df2, "int"), Row(1, "1", "2") :: Row(2, "2", "3") :: Row(3, "3", "4") :: Nil) } test("join - join using multiple columns") { val df = Seq(1, 2, 3).map(i => (i, i + 1, i.toString)).toDF("int", "int2", "str") val df2 = Seq(1, 2, 3).map(i => (i, i + 1, (i + 1).toString)).toDF("int", "int2", "str") checkAnswer( df.join(df2, Seq("int", "int2")), Row(1, 2, "1", "2") :: Row(2, 3, "2", "3") :: Row(3, 4, "3", "4") :: Nil) } test("join - sorted columns not in join's outputSet") { val df = Seq((1, 2, "1"), (3, 4, "3")).toDF("int", "int2", "str_sort").as("df1") val df2 = Seq((1, 3, "1"), (5, 6, "5")).toDF("int", "int2", "str").as("df2") val df3 = Seq((1, 3, "1"), (5, 6, "5")).toDF("int", "int2", "str").as("df3") checkAnswer( df.join(df2, $"df1.int" === $"df2.int", "outer").select($"df1.int", $"df2.int2") .orderBy(Symbol("str_sort").asc, Symbol("str").asc), Row(null, 6) :: Row(1, 3) :: Row(3, null) :: Nil) checkAnswer( df2.join(df3, $"df2.int" === $"df3.int", "inner") .select($"df2.int", $"df3.int").orderBy($"df2.str".desc), Row(5, 5) :: Row(1, 1) :: Nil) } test("join - join using multiple columns and specifying join type") { val df = Seq((1, 2, "1"), (3, 4, "3")).toDF("int", "int2", "str") val df2 = Seq((1, 3, "1"), (5, 6, "5")).toDF("int", "int2", "str") checkAnswer( df.join(df2, Seq("int", "str"), "inner"), Row(1, "1", 2, 3) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "left"), Row(1, "1", 2, 3) :: Row(3, "3", 4, null) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "right"), Row(1, "1", 2, 3) :: Row(5, "5", null, 6) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "outer"), Row(1, "1", 2, 3) :: Row(3, "3", 4, null) :: Row(5, "5", null, 6) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "left_semi"), Row(1, "1", 2) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "semi"), Row(1, "1", 2) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "left_anti"), Row(3, "3", 4) :: Nil) checkAnswer( df.join(df2, Seq("int", "str"), "anti"), Row(3, "3", 4) :: Nil) } test("join - cross join") { val df1 = Seq((1, "1"), (3, "3")).toDF("int", "str") val df2 = Seq((2, "2"), (4, "4")).toDF("int", "str") checkAnswer( df1.crossJoin(df2), Row(1, "1", 2, "2") :: Row(1, "1", 4, "4") :: Row(3, "3", 2, "2") :: Row(3, "3", 4, "4") :: Nil) checkAnswer( df2.crossJoin(df1), Row(2, "2", 1, "1") :: Row(2, "2", 3, "3") :: Row(4, "4", 1, "1") :: Row(4, "4", 3, "3") :: Nil) } test("broadcast join hint using broadcast function") { val df1 = Seq((1, "1"), (2, "2")).toDF("key", "value") val df2 = Seq((1, "1"), (2, "2")).toDF("key", "value") // equijoin - should be converted into broadcast join val plan1 = df1.join(broadcast(df2), "key").queryExecution.sparkPlan assert(plan1.collect { case p: BroadcastHashJoinExec => p }.size === 1) // no join key -- should not be a broadcast join val plan2 = df1.crossJoin(broadcast(df2)).queryExecution.sparkPlan assert(plan2.collect { case p: BroadcastHashJoinExec => p }.size === 0) // planner should not crash without a join broadcast(df1).queryExecution.sparkPlan // SPARK-12275: no physical plan for BroadcastHint in some condition withTempPath { path => df1.write.parquet(path.getCanonicalPath) val pf1 = spark.read.parquet(path.getCanonicalPath) assert(df1.crossJoin(broadcast(pf1)).count() === 4) } } test("broadcast join hint using Dataset.hint") { // make sure a giant join is not broadcastable val plan1 = spark.range(10e10.toLong) .join(spark.range(10e10.toLong), "id") .queryExecution.executedPlan assert(plan1.collect { case p: BroadcastHashJoinExec => p }.size == 0) // now with a hint it should be broadcasted val plan2 = spark.range(10e10.toLong) .join(spark.range(10e10.toLong).hint("broadcast"), "id") .queryExecution.executedPlan assert(collect(plan2) { case p: BroadcastHashJoinExec => p }.size == 1) } test("join - outer join conversion") { val df = Seq((1, 2, "1"), (3, 4, "3")).toDF("int", "int2", "str").as("a") val df2 = Seq((1, 3, "1"), (5, 6, "5")).toDF("int", "int2", "str").as("b") // outer -> left val outerJoin2Left = df.join(df2, $"a.int" === $"b.int", "outer").where($"a.int" >= 3) assert(outerJoin2Left.queryExecution.optimizedPlan.collect { case j @ Join(_, _, LeftOuter, _, _) => j }.size === 1) checkAnswer( outerJoin2Left, Row(3, 4, "3", null, null, null) :: Nil) // outer -> right val outerJoin2Right = df.join(df2, $"a.int" === $"b.int", "outer").where($"b.int" >= 3) assert(outerJoin2Right.queryExecution.optimizedPlan.collect { case j @ Join(_, _, RightOuter, _, _) => j }.size === 1) checkAnswer( outerJoin2Right, Row(null, null, null, 5, 6, "5") :: Nil) // outer -> inner val outerJoin2Inner = df.join(df2, $"a.int" === $"b.int", "outer"). where($"a.int" === 1 && $"b.int2" === 3) assert(outerJoin2Inner.queryExecution.optimizedPlan.collect { case j @ Join(_, _, Inner, _, _) => j }.size === 1) checkAnswer( outerJoin2Inner, Row(1, 2, "1", 1, 3, "1") :: Nil) // right -> inner val rightJoin2Inner = df.join(df2, $"a.int" === $"b.int", "right").where($"a.int" > 0) assert(rightJoin2Inner.queryExecution.optimizedPlan.collect { case j @ Join(_, _, Inner, _, _) => j }.size === 1) checkAnswer( rightJoin2Inner, Row(1, 2, "1", 1, 3, "1") :: Nil) // left -> inner val leftJoin2Inner = df.join(df2, $"a.int" === $"b.int", "left").where($"b.int2" > 0) assert(leftJoin2Inner.queryExecution.optimizedPlan.collect { case j @ Join(_, _, Inner, _, _) => j }.size === 1) checkAnswer( leftJoin2Inner, Row(1, 2, "1", 1, 3, "1") :: Nil) } test("process outer join results using the non-nullable columns in the join input") { // Filter data using a non-nullable column from a right table val df1 = Seq((0, 0), (1, 0), (2, 0), (3, 0), (4, 0)).toDF("id", "count") val df2 = Seq(Tuple1(0), Tuple1(1)).toDF("id").groupBy("id").count checkAnswer( df1.join(df2, df1("id") === df2("id"), "left_outer").filter(df2("count").isNull), Row(2, 0, null, null) :: Row(3, 0, null, null) :: Row(4, 0, null, null) :: Nil ) // Coalesce data using non-nullable columns in input tables val df3 = Seq((1, 1)).toDF("a", "b") val df4 = Seq((2, 2)).toDF("a", "b") checkAnswer( df3.join(df4, df3("a") === df4("a"), "outer") .select(coalesce(df3("a"), df3("b")), coalesce(df4("a"), df4("b"))), Row(1, null) :: Row(null, 2) :: Nil ) } test("SPARK-16991: Full outer join followed by inner join produces wrong results") { val a = Seq((1, 2), (2, 3)).toDF("a", "b") val b = Seq((2, 5), (3, 4)).toDF("a", "c") val c = Seq((3, 1)).toDF("a", "d") val ab = a.join(b, Seq("a"), "fullouter") checkAnswer(ab.join(c, "a"), Row(3, null, 4, 1) :: Nil) } test("SPARK-17685: WholeStageCodegenExec throws IndexOutOfBoundsException") { val df = Seq((1, 1, "1"), (2, 2, "3")).toDF("int", "int2", "str") val df2 = Seq((1, 1, "1"), (2, 3, "5")).toDF("int", "int2", "str") val limit = 1310721 val innerJoin = df.limit(limit).join(df2.limit(limit), Seq("int", "int2"), "inner") .agg(count($"int")) checkAnswer(innerJoin, Row(1) :: Nil) } test("SPARK-23087: don't throw Analysis Exception in CheckCartesianProduct when join condition " + "is false or null") { withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "false") { val df = spark.range(10) val dfNull = spark.range(10).select(lit(null).as("b")) df.join(dfNull, $"id" === $"b", "left").queryExecution.optimizedPlan val dfOne = df.select(lit(1).as("a")) val dfTwo = spark.range(10).select(lit(2).as("b")) dfOne.join(dfTwo, $"a" === $"b", "left").queryExecution.optimizedPlan } } test("SPARK-24385: Resolve ambiguity in self-joins with EqualNullSafe") { withSQLConf(SQLConf.CROSS_JOINS_ENABLED.key -> "false") { val df = spark.range(2) // this throws an exception before the fix df.join(df, df("id") <=> df("id")).queryExecution.optimizedPlan } } def extractLeftDeepInnerJoins(plan: LogicalPlan): Seq[LogicalPlan] = plan match { case j @ Join(left, right, _: InnerLike, _, _) => right +: extractLeftDeepInnerJoins(left) case Filter(_, child) => extractLeftDeepInnerJoins(child) case Project(_, child) => extractLeftDeepInnerJoins(child) case _ => Seq(plan) } test("SPARK-24690 enables star schema detection even if CBO disabled") { withTable("r0", "r1", "r2", "r3") { withTempDir { dir => withSQLConf( SQLConf.STARSCHEMA_DETECTION.key -> "true", SQLConf.CBO_ENABLED.key -> "false", SQLConf.PLAN_STATS_ENABLED.key -> "true") { val path = dir.getAbsolutePath // Collects column statistics first spark.range(300).selectExpr("id AS a", "id AS b", "id AS c") .write.mode("overwrite").parquet(s"$path/r0") spark.read.parquet(s"$path/r0").write.saveAsTable("r0") spark.sql("ANALYZE TABLE r0 COMPUTE STATISTICS FOR COLUMNS a, b, c") spark.range(10).selectExpr("id AS a", "id AS d") .write.mode("overwrite").parquet(s"$path/r1") spark.read.parquet(s"$path/r1").write.saveAsTable("r1") spark.sql("ANALYZE TABLE r1 COMPUTE STATISTICS FOR COLUMNS a") spark.range(50).selectExpr("id AS b", "id AS e") .write.mode("overwrite").parquet(s"$path/r2") spark.read.parquet(s"$path/r2").write.saveAsTable("r2") spark.sql("ANALYZE TABLE r2 COMPUTE STATISTICS FOR COLUMNS b") spark.range(1).selectExpr("id AS c", "id AS f") .write.mode("overwrite").parquet(s"$path/r3") spark.read.parquet(s"$path/r3").write.saveAsTable("r3") spark.sql("ANALYZE TABLE r3 COMPUTE STATISTICS FOR COLUMNS c") val resultDf = sql( s"""SELECT * FROM r0, r1, r2, r3 | WHERE | r0.a = r1.a AND | r1.d >= 3 AND | r0.b = r2.b AND | r2.e >= 5 AND | r0.c = r3.c AND | r3.f <= 100 """.stripMargin) val optimized = resultDf.queryExecution.optimizedPlan val optJoins = extractLeftDeepInnerJoins(optimized) val joinOrder = optJoins .flatMap(_.collect { case p: LogicalRelation => p.catalogTable }.head) .map(_.identifier.identifier) assert(joinOrder === Seq("r2", "r1", "r3", "r0")) } } } } test("Supports multi-part names for broadcast hint resolution") { val (table1Name, table2Name) = ("t1", "t2") withTempDatabase { dbName => withTable(table1Name, table2Name) { withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1") { spark.range(50).write.saveAsTable(s"$dbName.$table1Name") spark.range(100).write.saveAsTable(s"$dbName.$table2Name") def checkIfHintApplied(df: DataFrame): Unit = { val sparkPlan = df.queryExecution.executedPlan val broadcastHashJoins = sparkPlan.collect { case p: BroadcastHashJoinExec => p } assert(broadcastHashJoins.size == 1) val broadcastExchanges = broadcastHashJoins.head.collect { case p: BroadcastExchangeExec => p } assert(broadcastExchanges.size == 1) val tables = broadcastExchanges.head.collect { case FileSourceScanExec(_, _, _, _, _, _, _, Some(tableIdent)) => tableIdent } assert(tables.size == 1) assert(tables.head === TableIdentifier(table1Name, Some(dbName))) } def checkIfHintNotApplied(df: DataFrame): Unit = { val sparkPlan = df.queryExecution.executedPlan val broadcastHashJoins = sparkPlan.collect { case p: BroadcastHashJoinExec => p } assert(broadcastHashJoins.isEmpty) } def sqlTemplate(tableName: String, hintTableName: String): DataFrame = { sql(s"SELECT /*+ BROADCASTJOIN($hintTableName) */ * " + s"FROM $tableName, $dbName.$table2Name " + s"WHERE $tableName.id = $table2Name.id") } def dfTemplate(tableName: String, hintTableName: String): DataFrame = { spark.table(tableName).join(spark.table(s"$dbName.$table2Name"), "id") .hint("broadcast", hintTableName) } sql(s"USE $dbName") checkIfHintApplied(sqlTemplate(table1Name, table1Name)) checkIfHintApplied(sqlTemplate(s"$dbName.$table1Name", s"$dbName.$table1Name")) checkIfHintApplied(sqlTemplate(s"$dbName.$table1Name", table1Name)) checkIfHintNotApplied(sqlTemplate(table1Name, s"$dbName.$table1Name")) checkIfHintApplied(dfTemplate(table1Name, table1Name)) checkIfHintApplied(dfTemplate(s"$dbName.$table1Name", s"$dbName.$table1Name")) checkIfHintApplied(dfTemplate(s"$dbName.$table1Name", table1Name)) checkIfHintApplied(dfTemplate(table1Name, s"$dbName.$table1Name")) checkIfHintApplied(dfTemplate(table1Name, s"${CatalogManager.SESSION_CATALOG_NAME}.$dbName.$table1Name")) withView("tv") { sql(s"CREATE VIEW tv AS SELECT * FROM $dbName.$table1Name") checkIfHintApplied(sqlTemplate("tv", "tv")) checkIfHintNotApplied(sqlTemplate("tv", s"$dbName.tv")) checkIfHintApplied(dfTemplate("tv", "tv")) checkIfHintApplied(dfTemplate("tv", s"$dbName.tv")) } } } } } test("The same table name exists in two databases for broadcast hint resolution") { val (db1Name, db2Name) = ("db1", "db2") withDatabase(db1Name, db2Name) { withTable("t") { withSQLConf(SQLConf.AUTO_BROADCASTJOIN_THRESHOLD.key -> "-1") { sql(s"CREATE DATABASE $db1Name") sql(s"CREATE DATABASE $db2Name") spark.range(1).write.saveAsTable(s"$db1Name.t") spark.range(1).write.saveAsTable(s"$db2Name.t") // Checks if a broadcast hint applied in both sides val statement = s"SELECT /*+ BROADCASTJOIN(t) */ * FROM $db1Name.t, $db2Name.t " + s"WHERE $db1Name.t.id = $db2Name.t.id" sql(statement).queryExecution.optimizedPlan match { case Join(_, _, _, _, JoinHint(Some(HintInfo(Some(BROADCAST))), Some(HintInfo(Some(BROADCAST))))) => case _ => fail("broadcast hint not found in both tables") } } } } } }
dbtsai/spark
sql/core/src/test/scala/org/apache/spark/sql/DataFrameJoinSuite.scala
Scala
apache-2.0
17,047
package mesosphere.marathon import com.google.protobuf.ByteString import scala.collection.JavaConverters._ class ContainerInfo protected (val image: String, val options: Seq[String]) { def toProto: Protos.ContainerInfo = Protos.ContainerInfo.newBuilder() .setImage(ByteString.copyFromUtf8(image)) .addAllOptions(options.map(ByteString.copyFromUtf8(_)).asJava) .build() } case object EmptyContainerInfo extends ContainerInfo("", Nil) object ContainerInfo { def apply(proto: Protos.ContainerInfo): ContainerInfo = ContainerInfo( proto.getImage.toStringUtf8, proto.getOptionsList.asScala.map(_.toStringUtf8).toSeq ) def apply(image: String = "", options: Seq[String] = Nil) = { if (image.isEmpty && options.isEmpty) EmptyContainerInfo else new ContainerInfo(image, options) } }
MiLk/marathon
src/main/scala/mesosphere/marathon/ContainerInfo.scala
Scala
apache-2.0
841
package org.openurp.edu.eams.teach.schedule.json import org.openurp.edu.eams.base.model.RoomBean import org.openurp.edu.eams.base.model.json.RoomBeanGsonAdapter import org.openurp.edu.eams.teach.lesson.model.ArrangeSuggestBean import org.openurp.edu.eams.teach.lesson.model.SuggestActivityBean import com.google.gson.Gson import com.google.gson.GsonBuilder object ArrangeSuggestGsonBuilder { def build(): Gson = { new GsonBuilder().registerTypeAdapter(classOf[ArrangeSuggestBean], new ArrangeSuggestGsonAdapter()) .registerTypeAdapter(classOf[SuggestActivityBean], new SuggestActivityGsonAdapter()) .registerTypeAdapter(classOf[RoomBean], new RoomBeanGsonAdapter()) .create() } }
openurp/edu-eams-webapp
schedule/src/main/scala/org/openurp/edu/eams/teach/schedule/json/ArrangeSuggestGsonBuilder.scala
Scala
gpl-3.0
711
package scala.slick.jdbc import java.sql.PreparedStatement import scala.slick.session.{PositionedParameters, PositionedResult} /** * Invoker for raw SQL queries. * The companion object contains utility methods for building static queries. */ abstract class StaticQuery[-P,+R](query: String, rconv: GetResult[R], pconv: SetParameter[P]) extends StatementInvoker[P,R] { protected def getStatement = query protected def setParam(param: P, st: PreparedStatement) = pconv(param, new PositionedParameters(st)) protected def extractValue(rs: PositionedResult): R = rconv(rs) protected[this] type Self <: StaticQuery[P, R] protected[this] def copy(query: String = this.query, pconv: SetParameter[P] = this.pconv): Self def + (s: String) = copy(query + s) def +? [T](v: T)(implicit p: SetParameter[T]) = copy(query + '?', new SetParameter[P] { def apply(param: P, pp: PositionedParameters) { pconv(param, pp) p(v, pp) } }) } object StaticQuery { def apply[R](implicit conv: GetResult[R]) = queryNA("") def apply[P, R](implicit pconv1: SetParameter[P], rconv: GetResult[R]) = query[P,R]("") def u = updateNA("") def u1[P](implicit pconv1: SetParameter[P]) = update[P]("") def query[P,R](query: String)(implicit rconv: GetResult[R], pconv: SetParameter[P]) = new StaticQuery1[P, R](query, rconv, pconv) def queryNA[R](query: String)(implicit conv: GetResult[R]) = new StaticQuery0[R](query, conv, SetParameter.SetUnit) def update[P](query: String)(implicit pconv: SetParameter[P]) = new StaticQuery1[P, Int](query, GetResult.GetUpdateValue, pconv) def updateNA(query: String) = new StaticQuery0[Int](query, GetResult.GetUpdateValue, SetParameter.SetUnit) } class StaticQuery0[R](query: String, rconv: GetResult[R], pconv: SetParameter[Unit]) extends StaticQuery[Unit, R](query, rconv, pconv) with UnitInvokerMixin[R] { protected[this] type Self = StaticQuery0[R] protected[this] def copy(query: String, pconv: SetParameter[Unit]): Self = new StaticQuery0(query, rconv, pconv) } class StaticQuery1[P1, R](query: String, rconv: GetResult[R], pconv: SetParameter[P1]) extends StaticQuery[P1, R](query, rconv, pconv) { protected[this] type Self = StaticQuery1[P1, R] protected[this] def copy(query: String, pconv: SetParameter[P1]): Self = new StaticQuery1(query, rconv, pconv) }
szeiger/scala-query
src/main/scala/scala/slick/jdbc/StaticQuery.scala
Scala
bsd-2-clause
2,356
package com.tribbloids.spookystuff.parsing import com.tribbloids.spookystuff.parsing.Pattern.Token import com.tribbloids.spookystuff.utils.RangeArg case class Pattern( token: Token, range: RangeArg = Pattern.RangeArgs.next ) { override def toString = s"'$token' $range" // ID is to make sure that rules created by P/P_* operator can be referenced in the resultSet trait Rule[+R] { def fn(ev: RuleInput): RuleOutcome[R] // lazy val name: String = def outer: Pattern = Pattern.this def token: Token = outer.token def range: RangeArg = outer.range override def toString: String = outer.toString def andThen[R1 >: R, R2](_fn: RuleIO[R1] => RuleOutcome[R2]): Rule[R2] = { AndThen(this, _fn) } } case object !! extends Rule[String] { override def fn(ev: RuleInput): RuleOutcome[String] = ev.!! } case class AndThen[R, R2]( base: Rule[R], _fn: RuleIO[R] => RuleOutcome[R2] ) extends Rule[R2] { override def fn(ev: RuleInput): RuleOutcome[R2] = { val o1 = base.fn(ev) _fn(RuleIO(ev, o1)) } } } object Pattern { trait Token extends Any case class CharToken(v: Char) extends AnyVal with Token { override def toString: String = v.toString } // inlining is subjective case object EndOfStream extends Token { override def toString: String = "[EOS]" } def tokens2Str(tokens: Seq[Pattern.Token]): String = { val chars = tokens.flatMap { case Pattern.CharToken(v) => Some(v) case _ => None } new String(chars.toArray) } object RangeArgs { val next: RangeArg = 0L to 0L val maxLength: RangeArg = 0L to Long.MaxValue } }
tribbloid/spookystuff
mldsl/src/main/scala/com/tribbloids/spookystuff/parsing/Pattern.scala
Scala
apache-2.0
1,699
/* * Copyright 2015 Webtrends (http://www.webtrends.com) * * See the LICENCE.txt file distributed with this work for additional * information regarding copyright ownership. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.webtrends.harness.component.cluster import com.webtrends.harness.component.Component import com.webtrends.harness.component.cluster.communication.{MessageService, MessagingAdapter} import com.webtrends.harness.component.zookeeper.Zookeeper import scala.collection.mutable /** * Important to note that if you use clustering you must not include the Zookeeper component * as Clustering will start it up independently, and zookeeper config must be within the clustering config */ class ClusterManager(name:String) extends Component(name) with Clustering with MessagingAdapter with Zookeeper { MessageService.getOrInitMediator(system) implicit val clusterSettings = ClusterSettings(config) override protected def defaultChildName: Option[String] = Some(ClusterManager.MessagingName) override def start = { startZookeeper() startClustering super.start } override def stop = { super.stop stopClustering } override protected def getHealthChildren = { super.getHealthChildren.toSet + MessageService.getOrInitMediator(system) } } object ClusterManager { val ComponentName = "wookiee-cluster" val MessagingName = "messaging" // TODO Think about moving this up to wookiee-core // Creates a thread-safe hash set as a Scala mutable.Set def createSet[T](): mutable.Set[T] = { import scala.collection.JavaConverters._ java.util.Collections.newSetFromMap( new java.util.concurrent.ConcurrentHashMap[T, java.lang.Boolean]).asScala } }
Webtrends/wookiee-cluster
src/main/scala/com/webtrends/harness/component/cluster/ClusterManager.scala
Scala
apache-2.0
2,261
package inlineconsume import inlinedefs.FakePredef.assert class Foo: def test = assert(3 > 2)
lampepfl/dotty
tests/semanticdb/expect/inlineconsume.scala
Scala
apache-2.0
98
import sbt._ object Dependencies { object versions { val c3p0 = "0.9.5-pre9" val commonsCli = "1.2" val commonsCodec = "1.5" val commonsIo = "1.4" val commonsLang = "2.6" val jodaConvert = "1.2" val jodaTime = "2.1" val liquibaseCore = "2.0.0" val liquibasePlugin = "1.9.5.0" val postgresql = "9.4.1212" val simpleArm = "1.1.10" val slf4j = "1.7.5" val scalatest = "3.0.8" val socrataUtils = "0.11.0" val socrataCuratorUtils = "1.2.0" val socrataThirdPartyUtils = "5.0.0" val socrataHttpCuratorBroker = "3.13.4" val soqlStdlib = "4.3.0" val typesafeConfig = "1.0.0" val dataCoordinator = "3.8.19" val typesafeScalaLogging = "3.9.2" val rojomaJson = "3.13.0" val metrics = "4.1.2" val metricsScala = "4.1.1" val clojure = "1.5.1" } val c3p0 = "com.mchange" % "c3p0" % versions.c3p0 val commonsCli = "commons-cli" % "commons-cli" % versions.commonsCli val commonsCodec = "commons-codec" % "commons-codec" % versions.commonsCodec val commonsIo = "commons-io" % "commons-io" % versions.commonsIo val commonsLang = "commons-lang" % "commons-lang" % versions.commonsLang val jodaConvert = "org.joda" % "joda-convert" % versions.jodaConvert val jodaTime = "joda-time" % "joda-time" % versions.jodaTime val liquibaseCore = "org.liquibase" % "liquibase-core" % versions.liquibaseCore val liquibasePlugin = "org.liquibase" % "liquibase-plugin" % versions.liquibasePlugin val postgresql = "org.postgresql" % "postgresql" % versions.postgresql val simpleArm = "com.rojoma" %% "simple-arm" % versions.simpleArm val socrataUtil = "com.socrata" %% "socrata-utils" % versions.socrataUtils val socrataCuratorUtils = "com.socrata" %% "socrata-curator-utils" % versions.socrataCuratorUtils val socrataThirdPartyUtils = "com.socrata" %% "socrata-thirdparty-utils" % versions.socrataThirdPartyUtils val socrataHttpCuratorBroker = "com.socrata" %% "socrata-http-curator-broker" % versions.socrataHttpCuratorBroker val soqlStdlib = "com.socrata" %% "soql-stdlib" % versions.soqlStdlib val typesafeConfig = "com.typesafe" % "config" % versions.typesafeConfig val secondarylib = "com.socrata" %% "secondarylib" % versions.dataCoordinator // % "provided" val coordinatorlib = "com.socrata" %% "coordinatorlib" % versions.dataCoordinator val coordinator = "com.socrata" %% "coordinator" % versions.dataCoordinator // ugh, this shouldn't be published at all val slf4j = "org.slf4j" % "slf4j-log4j12" % versions.slf4j val scalatest = "org.scalatest" %% "scalatest" % versions.scalatest val typesafeScalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % versions.typesafeScalaLogging val rojomaJson = "com.rojoma" %% "rojoma-json-v3" % versions.rojomaJson val metricsJetty = "io.dropwizard.metrics" % "metrics-jetty9" % versions.metrics val metricsGraphite = "io.dropwizard.metrics" % "metrics-graphite" % versions.metrics val metricsJmx = "io.dropwizard.metrics" % "metrics-jmx" % versions.metrics val metricsScala = "nl.grons" %% "metrics4-scala" % versions.metricsScala val clojure = "org.clojure" % "clojure" % versions.clojure }
socrata-platform/soql-postgres-adapter
project/Dependencies.scala
Scala
apache-2.0
3,200
package com.sksamuel.elastic4s.cats.effect package object instances extends CatsEffectInstances
stringbean/elastic4s
elastic4s-effect-cats/src/main/scala/com/sksamuel/elastic4s/cats/effect/instances/package.scala
Scala
apache-2.0
97
package com.truecar.mleap.serialization.ml.v1 import com.truecar.mleap.core.tree.Node import com.truecar.mleap.runtime.transformer._ import ml.bundle.support.v1.runtime.PipelineModelSerializer import ml.bundle.{BundleSerializer, Serializer, StreamSerializer} import Converters._ import ml.bundle.support.ConversionSerializer._ import ml.bundle.support.v1.core.classification.{DecisionTreeClassificationSerializer, RandomForestClassificationSerializer} import ml.bundle.support.v1.json.MlJsonSerializerSupport._ import ml.bundle.support.v1.core.regression.{DecisionTreeRegressionSerializer, RandomForestRegressionSerializer} import ml.bundle.support.v1.core.tree.node.LinearNodeSerializer import ml.bundle.support.v1.runtime import ml.bundle.support.v1.runtime.classification.RandomForestClassificationModelSerializer import ml.bundle.support.v1.runtime.regression.RandomForestRegressionModelSerializer import ml.bundle.v1.runtime.{feature, regression, classification} /** * Created by hollinwilkins on 3/8/16. */ trait MlJsonSerializer extends Serializer { // regression val regressionBundleNodeSerializer = LinearNodeSerializer(mlNodeMetaDataSerializer, mlNodeDataSerializer, includeImpurityStats = false) val decisionTreeRegressionSerializer = DecisionTreeRegressionSerializer(mlDecisionTreeMetaDataSerializer, regressionBundleNodeSerializer) val randomForestRegressionSerializer = RandomForestRegressionSerializer(mlRandomForestMetaDataSerializer, decisionTreeRegressionSerializer) val randomForestRegressionModelSerializer: BundleSerializer[RandomForestRegressionModel] = conversionSerializer[RandomForestRegressionModel, runtime.regression.RandomForestRegressionModel[Node]]( RandomForestRegressionModelSerializer(mlRandomForestRegressionModelMetaDataSerializer, randomForestRegressionSerializer)) val linearRegressionModelSerializer: StreamSerializer[LinearRegressionModel] = conversionSerializer[LinearRegressionModel, regression.LinearRegressionModel.LinearRegressionModel](mlLinearRegressionModelSerializer) addSerializer(randomForestRegressionModelSerializer) addSerializer(linearRegressionModelSerializer) // classification val classificationBundleNodeSerializer = LinearNodeSerializer(mlNodeMetaDataSerializer, mlNodeDataSerializer, includeImpurityStats = true) val decisionTreeClassificationSerializer = DecisionTreeClassificationSerializer(mlDecisionTreeClassificationMetaDataSerializer, classificationBundleNodeSerializer) val randomForestClassificationSerializer = RandomForestClassificationSerializer(mlRandomForestClassificationMetaDataSerializer, decisionTreeClassificationSerializer) val randomForestClassificationModelSerializer: BundleSerializer[RandomForestClassificationModel] = conversionSerializer[RandomForestClassificationModel, runtime.classification.RandomForestClassificationModel[Node]]( RandomForestClassificationModelSerializer(mlRandomForestClassificationModelMetaDataSerializer, randomForestClassificationSerializer)) val supportVectorMachineModelSerializer: StreamSerializer[SupportVectorMachineModel] = conversionSerializer[SupportVectorMachineModel, classification.SupportVectorMachineModel.SupportVectorMachineModel](mlSupportVectorMachineModelSerializer) addSerializer(supportVectorMachineModelSerializer) addSerializer(randomForestClassificationModelSerializer) // feature val hashingTermFrequencyModelSerializer: StreamSerializer[HashingTermFrequencyModel] = conversionSerializer[HashingTermFrequencyModel, feature.HashingTermFrequencyModel.HashingTermFrequencyModel](mlHashingTermFrequencyModelSerializer) val standardScalerModelSerializer: StreamSerializer[StandardScalerModel] = conversionSerializer[StandardScalerModel, feature.StandardScalerModel.StandardScalerModel](mlStandardScalerModelSerializer) val stringIndexerModelSerializer: StreamSerializer[StringIndexerModel] = conversionSerializer[StringIndexerModel, feature.StringIndexerModel.StringIndexerModel](mlStringIndexerModelSerializer) val reverseStringIndexerModelSerializer: StreamSerializer[ReverseStringIndexerModel] = conversionSerializer[ReverseStringIndexerModel, feature.ReverseStringIndexerModel.ReverseStringIndexerModel](mlReverseStringIndexerModelSerializer) val tokenizerModelSerializer: StreamSerializer[TokenizerModel] = conversionSerializer[TokenizerModel, feature.TokenizerModel.TokenizerModel](mlTokenizerModelSerializer) val vectorAssemblerModelSerializer: StreamSerializer[VectorAssemblerModel] = conversionSerializer[VectorAssemblerModel, feature.VectorAssemblerModel.VectorAssemblerModel](mlVectorAssemblerModelSerializer) addSerializer(hashingTermFrequencyModelSerializer) addSerializer(standardScalerModelSerializer) addSerializer(stringIndexerModelSerializer) addSerializer(reverseStringIndexerModelSerializer) addSerializer(tokenizerModelSerializer) addSerializer(vectorAssemblerModelSerializer) // pipeline val pipelineModelSerializer: BundleSerializer[PipelineModel] = conversionSerializer[PipelineModel, runtime.PipelineModel](PipelineModelSerializer(this, mlPipelineModelMetaDataSerializer)) addSerializer(pipelineModelSerializer) } object MlJsonSerializer extends MlJsonSerializer { val supportedVersions = Set("0.1-SNAPSHOT") override val namespace: String = "ml.bundle.json" override val version: String = "0.1-SNAPSHOT" override def isCompatibleVersion(otherVersion: String): Boolean = supportedVersions.contains(otherVersion) }
TrueCar/mleap
mleap-serialization/src/main/scala/com/truecar/mleap/serialization/ml/v1/MlJsonSerializer.scala
Scala
apache-2.0
5,487
package $package$.routes import $package$.utils.BaseRoute import $package$.controllers.HelloController import unfiltered.netty._ import unfiltered.netty.request._ import unfiltered.request._ import unfiltered.response._ import unfiltered.scalate._ import unfiltered.netty.websockets.{_ => Pass} object HelloRoute extends BaseRoute { var sockets = new scala.collection.mutable.ListBuffer[WebSocket]() val websockets = unfiltered.netty.websockets.Planify { case Path("/websocket") => { case Open(s) => sockets += s case Message(s, Text(str)) => sockets foreach(_.send(str.reverse)) case Close(s) => sockets -= s case Error(s, e) => println("error %s" format e.getMessage) } } val display = async.Planify { case req@GET(Path("/")) => req.respond(view) case req@GET(Path("/ping")) => req.respond(ResponseString("pong")) case req@GET(Path("/hello")) => req.respond(HtmlContent ~> Scalate(req, "hello.ssp")) case req@GET(Path(Seg("hello":: "say" :: word :: Nil))) => req.respond(JsonContent ~> ResponseString(HelloController.say(word))) case _ => Pass } val view = Html( <html> <body> <a href="/ping">ping</a> <a href="/hello">helo</a> <a href="/hello/say/love">say</a> </body> </html> ) }
ixiongdi/wpt.g8
src/main/g8/src/main/scala/routes/HelloRoute.scala
Scala
apache-2.0
1,303
package todomvc import japgolly.scalajs.react._ import japgolly.scalajs.react.extra._ import japgolly.scalajs.react.extra.router.RouterCtl import japgolly.scalajs.react.vdom.prefix_<^._ import org.scalajs.dom.ext.KeyCode import org.scalajs.dom.html object TodoList { case class Props ( ctl: RouterCtl[TodoFilter], model: TodoModel, currentFilter: TodoFilter ) case class State( todos: Seq[Todo], editing: Option[TodoId] ) /** * These specify when it makes sense to skip updating this component (see comment on `Listenable` below) */ implicit val r1: Reusability[Props] = Reusability.fn[Props]((p1, p2) => p1.currentFilter == p2.currentFilter) implicit val r2: Reusability[State] = Reusability.fn[State]((s1, s2) => s1.editing == s2.editing && (s1.todos eq s2.todos)) /** * One difference between normal react and scalajs-react is the use of backends. * Since components are not inheritance-based, we often use a backend class * where we put most of the functionality: rendering, state handling, etc. * * It extends OnUnmount so unsubscription of events can be made automatically. */ class Backend($: BackendScope[Props, State]) extends OnUnmount { /** * A backend lives for the entire life of a component. During that time, * it might receive new Props, * so we use this mechanism to keep state that is derived from Props, so * we only update it again if Props changed in a meaningful way (as determined * by the implicit `Reusability` defined above ) */ case class Callbacks(P: Props) { val handleNewTodoKeyDown: ReactKeyboardEventI => Option[Callback] = e => Some((e.nativeEvent.keyCode, UnfinishedTitle(e.target.value).validated)) collect { case (KeyCode.Enter, Some(title)) => Callback(e.target.value = "") >> P.model.addTodo(title) } val updateTitle: TodoId => Title => Callback = id => title => editingDone(cb = P.model.update(id, title)) val toggleAll: ReactEventI => Callback = e => P.model.toggleAll(e.target.checked) } val cbs: Px[Callbacks] = Px.cbA($.props).map(Callbacks) val startEditing: TodoId => Callback = id => $.modState(_.copy(editing = Some(id))) /** * @param cb Two changes to the same `State` must be combined using a callback like this. * If not, rerendering will prohibit the second from having its effect. * For this example, the current `State` contains both `editing` and the list of todos. */ def editingDone(cb: Callback = Callback.empty): Callback = $.modState(_.copy(editing = None), cb) def render(P: Props, S: State): ReactTagOf[html.Div] = { val todos = S.todos val filteredTodos = todos filter P.currentFilter.accepts val activeCount = todos count TodoFilter.Active.accepts val completedCount = todos.length - activeCount /** * `cbs.value()` checks if `Props` changed (according to `Reusability`), * and, if it did, creates a new instance of `Callbacks`. For best * performance, it's best to call value() once per render() pass. */ val callbacks = cbs.value() <.div( <.h1("todos"), <.header( ^.className := "header", <.input( ^.className := "new-todo", ^.placeholder := "What needs to be done?", ^.onKeyDown ==>? callbacks.handleNewTodoKeyDown, ^.autoFocus := true ) ), todos.nonEmpty ?= todoList(P, callbacks, S.editing, filteredTodos, activeCount), todos.nonEmpty ?= footer(P, activeCount, completedCount) ) } def todoList(P: Props, callbacks: Callbacks, editing: Option[TodoId], filteredTodos: Seq[Todo], activeCount: Int): ReactTagOf[html.Element] = <.section( ^.className := "main", <.input( ^.className := "toggle-all", ^.`type` := "checkbox", ^.checked := activeCount == 0, ^.onChange ==> callbacks.toggleAll ), <.ul( ^.className := "todo-list", filteredTodos.map(todo => TodoItem(TodoItem.Props( onToggle = P.model.toggleCompleted(todo.id), onDelete = P.model.delete(todo.id), onStartEditing = startEditing(todo.id), onUpdateTitle = callbacks.updateTitle(todo.id), onCancelEditing = editingDone(), todo = todo, isEditing = editing.contains(todo.id) )) ) ) ) def footer(P: Props, activeCount: Int, completedCount: Int): ReactElement = Footer(Footer.Props( filterLink = P.ctl.link, onClearCompleted = P.model.clearCompleted, currentFilter = P.currentFilter, activeCount = activeCount, completedCount = completedCount )) } private val component = ReactComponentB[Props]("TodoList") /* state derived from the props */ .initialState_P(p => State(p.model.todos, None)) .renderBackend[Backend] /** * Makes the component subscribe to events coming from the model. * Unsubscription on component unmount is handled automatically. * The last function is the actual event handling, in this case * we just overwrite the whole list in `state`. */ .configure(Listenable.install((p: Props) => p.model, $ => (todos: Seq[Todo]) => $.modState(_.copy(todos = todos)))) /** * Optimization where we specify whether the component can have changed. * In this case we avoid comparing model and routerConfig, and only do * reference checking on the list of todos. * * The implementation of the «equality» checks are in the Reusability * typeclass instances for `State` and `Props` at the top of the file. * * To understand how things are redrawn, change `shouldComponentUpdate` for * either `shouldComponentUpdateWithOverlay` or `shouldComponentUpdateAndLog` */ .configure(Reusability.shouldComponentUpdate) /** * For performance reasons its important to only call `build` once for each component */ .build def apply(model: TodoModel, currentFilter: TodoFilter)(ctl: RouterCtl[TodoFilter]): ReactElement = component(Props(ctl, model, currentFilter)) }
gabrielmancini/interactor
src/demo/scalajs-react/src/main/scala/todomvc/TodoList.scala
Scala
bsd-2-clause
6,674
package main.scala import java.net.InetAddress import java.net.Socket class Requestor { def execRemoteObject(request: Request): Any = { var marshaller: IMarshaller = new Marshaller(); var clientRequestHandler: ClientRequestHandler = new ClientRequestHandler(); var inetAddress:InetAddress = InetAddress.getByName(request.ip); var socket: Socket = new Socket(inetAddress, request.port); var data: Array[Byte] = marshaller.marshall(request); var result: Array[Byte] = clientRequestHandler.execRemoteObject(data, socket); var response:Response = castFromAnyToResponse(marshaller.unMarshall(result)); response.result; } def castFromAnyToResponse(obj: Any): Response = obj match { case obj: Response => obj case _ => throw new ClassCastException } }
labs2/FLiMSy
ServerFLiMSy/src/main/scala/Requestor.scala
Scala
apache-2.0
807
package is.hail.io.hadoop import java.io.DataOutputStream import org.apache.hadoop.fs._ import org.apache.hadoop.io._ import org.apache.hadoop.mapred._ import org.apache.hadoop.util.Progressable class ByteArrayOutputFormat extends FileOutputFormat[NullWritable, BytesOnlyWritable] { class ByteArrayRecordWriter(out: DataOutputStream) extends RecordWriter[NullWritable, BytesOnlyWritable] { def write(key: NullWritable, value: BytesOnlyWritable) { if (value != null) value.write(out) } def close(reporter: Reporter) { out.close() } } override def getRecordWriter(ignored: FileSystem, job: JobConf, name: String, progress: Progressable): RecordWriter[NullWritable, BytesOnlyWritable] = { val file: Path = FileOutputFormat.getTaskOutputPath(job, name) val fs: FileSystem = file.getFileSystem(job) val fileOut: FSDataOutputStream = fs.create(file, progress) new ByteArrayRecordWriter(fileOut) } }
danking/hail
hail/src/main/scala/is/hail/io/hadoop/ByteArrayOutputFormat.scala
Scala
mit
965
import org.coursera.naptime.NaptimeModule import org.coursera.naptime.ari.EngineApi import org.coursera.naptime.ari.FetcherApi import org.coursera.naptime.ari.LocalSchemaProvider import org.coursera.naptime.ari.SchemaProvider import org.coursera.naptime.ari.engine.EngineImpl import org.coursera.naptime.ari.engine.EngineMetricsCollector import org.coursera.naptime.ari.engine.LoggingEngineMetricsCollector import org.coursera.naptime.ari.fetcher.LocalFetcher import org.coursera.naptime.ari.graphql.DefaultGraphqlSchemaProvider import org.coursera.naptime.ari.graphql.GraphqlSchemaProvider import org.coursera.naptime.ari.graphql.controllers.GraphQlControllerMetricsCollector import org.coursera.naptime.ari.graphql.controllers.LoggingGraphQlControllerMetricsCollector import org.coursera.naptime.ari.graphql.controllers.filters.ComplexityFilterConfiguration import org.coursera.naptime.ari.graphql.controllers.filters.DefaultFilters import org.coursera.naptime.ari.graphql.controllers.filters.FilterList import resources.UserStore import resources.UserStoreImpl import resources.UsersResource import resources.CoursesResource import resources.InstructorsResource import resources.PartnersResource class ResourceModule extends NaptimeModule { override def configure(): Unit = { bindResource[UsersResource] bindResource[CoursesResource] bindResource[InstructorsResource] bindResource[PartnersResource] bind[UserStore].to[UserStoreImpl] bind[EngineApi].to[EngineImpl] bind[FetcherApi].to[LocalFetcher] bind[EngineMetricsCollector].to[LoggingEngineMetricsCollector] bind[SchemaProvider].to[LocalSchemaProvider] bind[GraphqlSchemaProvider].to[DefaultGraphqlSchemaProvider] bind[FilterList].to[DefaultFilters] bind[ComplexityFilterConfiguration].toInstance(ComplexityFilterConfiguration.DEFAULT) bind[GraphQlControllerMetricsCollector].to[LoggingGraphQlControllerMetricsCollector] } }
vkuo-coursera/naptime
examples/src/main/scala/ResourceModule.scala
Scala
apache-2.0
1,939
/* * MkDoubleVector.scala * (FScape) * * Copyright (c) 2001-2022 Hanns Holger Rutz. All rights reserved. * * This software is published under the GNU Affero General Public License v3+ * * * For further information, please contact Hanns Holger Rutz at * [email protected] */ package de.sciss.fscape package lucre.stream import akka.stream.{Attributes, Inlet, SinkShape} import de.sciss.fscape.lucre.UGenGraphBuilder.OutputRef import de.sciss.fscape.stream.impl.Handlers.InDMain import de.sciss.fscape.stream.impl.{Handlers, NodeImpl, StageImpl} import de.sciss.fscape.stream._ import de.sciss.lucre.DoubleVector import de.sciss.proc.FScape.Output import de.sciss.serial.DataOutput import scala.annotation.tailrec import scala.collection.immutable.{IndexedSeq => Vec} object MkDoubleVector { def apply(in: OutD, ref: OutputRef)(implicit b: Builder): Unit = { val stage0 = new Stage(b.layer, ref) val stage = b.add(stage0) b.connect(in, stage.in) } private final val name = "MkDoubleVector" private type Shp = SinkShape[BufD] private final class Stage(layer: Layer, ref: OutputRef)(implicit a: Allocator) extends StageImpl[Shp](name) { stage => val shape: Shape = new SinkShape( in = InD(s"${stage.name}.in") ) def createLogic(attr: Attributes): NodeImpl[Shape] = new Logic(shape, layer, ref) } private final class Logic(shape: Shp, layer: Layer, ref: OutputRef)(implicit a: Allocator) extends Handlers(name, layer, shape) { type A = Double private[this] val builder = Vec.newBuilder[A] protected val hIn: InDMain = InDMain(this, shape.in) protected def onDone(inlet: Inlet[_]): Unit = finish() private def finish(): Unit = { val v = builder.result() ref.complete(new Output.Writer { override val outputValue: Vec[A] = v def write(out: DataOutput): Unit = DoubleVector.valueFormat.write(outputValue, out) }) completeStage() } @tailrec protected def process(): Unit = { val rem = hIn.available if (rem == 0) return val in = hIn.array var inOff0 = hIn.offset val stop0 = inOff0 + rem val _builder = builder while (inOff0 < stop0) { _builder += in(inOff0) inOff0 += 1 } hIn.advance(rem) if (hIn.isDone) { finish() } else { process() } } } }
Sciss/FScape-next
lucre/shared/src/main/scala/de/sciss/fscape/lucre/stream/MkDoubleVector.scala
Scala
agpl-3.0
2,431