code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package dao
import javax.inject.{Inject, Singleton}
import models.AttributeEntity
import play.api.db.slick.{DatabaseConfigProvider, HasDatabaseConfigProvider}
import slick.driver.JdbcProfile
import scala.concurrent.Future
trait AttributesComponent extends InstancesComponent {
self: HasDatabaseConfigProvider[JdbcProfile] =>
import driver.api._
private val Instances = TableQuery[InstanceTable]
class AttributeTable(tag: Tag) extends Table[AttributeEntity](tag, "ATTRIBUTE") {
def pk = primaryKey("ATTRIBUTE_PK", (instanceId, key))
def instanceId = column[String]("INSTANCE_ID")
def key = column[String]("KEY")
def value = column[String]("VALUE")
def instance = foreignKey("ATTRIBUTE_INSTANCE_FK", instanceId, Instances)(_.id, onUpdate = ForeignKeyAction.Cascade, onDelete = ForeignKeyAction.Cascade)
def * = (instanceId, key, value) <>(AttributeEntity.tupled, AttributeEntity.unapply)
}
}
@Singleton()
class AttributeDAO @Inject()(protected val dbConfigProvider: DatabaseConfigProvider) extends AttributesComponent with HasDatabaseConfigProvider[JdbcProfile] {
import driver.api._
private val Attributes = TableQuery[AttributeTable]
def get(instanceId: String): Future[Seq[AttributeEntity]] = {
db.run(Attributes.filter(_.instanceId === instanceId).result)
}
def save(attributeEntity: AttributeEntity): Future[Int] = db.run(Attributes insertOrUpdate attributeEntity)
def delete(instanceId: String, key: String) = {
db.run(Attributes.filter(attribute => attribute.instanceId === instanceId && attribute.key === key).delete)
}
}
| vuminhkh/tosca-runtime | deployer/app/dao/AttributeDAO.scala | Scala | mit | 1,604 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.config
import de.fuberlin.wiwiss.silk.output.Output
import de.fuberlin.wiwiss.silk.datasource.{Source}
import xml.Node
import de.fuberlin.wiwiss.silk.util.{Identifier, ValidatingXMLReader}
import de.fuberlin.wiwiss.silk.runtime.resource.ResourceLoader
/**
* A Silk linking configuration.
* Specifies how multiple sources are interlinked by defining a link specification for each type of entity to be interlinked.
*
* @param prefixes The prefixes which are used throughout the configuration to shorten URIs
* @param sources The sources which should be interlinked
* @param linkSpecs The Silk link specifications
* @param outputs The outputs
*/
case class LinkingConfig(prefixes: Prefixes,
runtime: RuntimeConfig,
sources: Traversable[Source],
linkSpecs: Traversable[LinkSpecification],
outputs: Traversable[Output] = Traversable.empty) {
private val sourceMap = sources.map(s => (s.id, s)).toMap
private val linkSpecMap = linkSpecs.map(s => (s.id, s)).toMap
private val outputMap = outputs.map(s => (s.id, s)).toMap
/**
* Selects a datasource by id.
*/
def source(id: Identifier) = sourceMap(id)
/**
* Selects a link specification by id.
*/
def linkSpec(id: Identifier) = linkSpecMap(id)
/**
* Selects an output by id.
*/
def output(id: Identifier) = outputMap(id)
/**
* Merges this configuration with another configuration.
*/
def merge(config: LinkingConfig) = {
LinkingConfig(
prefixes = prefixes ++ config.prefixes,
runtime = runtime,
sources = sources ++ config.sources,
linkSpecs = linkSpecs ++ config.linkSpecs,
outputs = outputs ++ config.outputs
)
}
def toXML: Node = {
<Silk>
{prefixes.toXML}
<DataSources>
{sources.map(_.toXML)}
</DataSources>
<Interlinks>
{linkSpecs.map(_.toXML(prefixes))}
</Interlinks>
</Silk>
}
}
object LinkingConfig {
private val schemaLocation = "de/fuberlin/wiwiss/silk/LinkSpecificationLanguage.xsd"
def empty = LinkingConfig(Prefixes.empty, RuntimeConfig(), Nil, Nil, Nil)
def load(resourceLoader: ResourceLoader) = {
new ValidatingXMLReader(fromXML(_, resourceLoader), schemaLocation)
}
def fromXML(node: Node, resourceLoader: ResourceLoader) = {
implicit val prefixes = Prefixes.fromXML((node \\ "Prefixes").head)
val sources = (node \\ "DataSources" \\ "DataSource").map(Source.fromXML(_, resourceLoader))
val blocking = (node \\ "Blocking").headOption match {
case Some(blockingNode) => Blocking.fromXML(blockingNode)
case None => Blocking()
}
val linkSpecifications = (node \\ "Interlinks" \\ "Interlink").map(p => LinkSpecification.fromXML(p, resourceLoader))
implicit val globalThreshold = None
val outputs = (node \\ "Outputs" \\ "Output").map(Output.fromXML(_, resourceLoader))
LinkingConfig(prefixes, RuntimeConfig(blocking = blocking), sources, linkSpecifications, outputs)
}
}
| fusepoolP3/p3-silk | silk-core/src/main/scala/de/fuberlin/wiwiss/silk/config/LinkingConfig.scala | Scala | apache-2.0 | 3,641 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.tensor
import java.util
import com.intel.analytics.bigdl.mkl.Memory
import scala.reflect._
private[tensor] class ArrayStorage[@specialized(Double, Float) T: ClassTag](
private[tensor] var values: Array[T]) extends Storage[T] {
override def apply(index: Int): T = values(index)
override def update(index: Int, value: T): Unit = values(index) = value
override def length(): Int = values.length
override def iterator: Iterator[T] = values.iterator
override def array(): Array[T] = values
override def copy(source: Storage[T], offset: Int, sourceOffset: Int,
length: Int): this.type = {
source match {
case s: ArrayStorage[T] => System.arraycopy(s.values, sourceOffset,
this.values, offset, length)
case s: DnnStorage[T] =>
require(classTag[T] == ClassTag.Float, "Only support copy float dnn storage")
require(sourceOffset == 0, "dnn storage offset should be 0")
Memory.CopyPtr2Array(s.ptr.address, 0, values.asInstanceOf[Array[Float]], offset, length,
DnnStorage.FLOAT_BYTES)
case _ => throw new UnsupportedOperationException("Only support dnn or array storage")
}
this
}
override def resize(size: Long): this.type = {
values = new Array[T](size.toInt)
this
}
override def fill(value: T, offset: Int, length: Int): this.type = {
value match {
case v: Double => util.Arrays.fill(values.asInstanceOf[Array[Double]],
offset - 1, offset - 1 + length, v)
case v: Float => util.Arrays.fill(values.asInstanceOf[Array[Float]],
offset - 1, offset - 1 + length, v)
case v: Int => util.Arrays.fill(values.asInstanceOf[Array[Int]],
offset - 1, offset - 1 + length, v)
case v: Long => util.Arrays.fill(values.asInstanceOf[Array[Long]],
offset - 1, offset - 1 + length, v)
case v: Short => util.Arrays.fill(values.asInstanceOf[Array[Short]],
offset - 1, offset - 1 + length, v)
case _ => throw new IllegalArgumentException
}
this
}
override def set(other: Storage[T]): this.type = {
require(other.length() == this.length())
this.values = other.array
this
}
}
| wzhongyuan/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/tensor/ArrayStorage.scala | Scala | apache-2.0 | 2,806 |
package com.example.http4s
package jetty
import com.example.http4s.ssl.SslExample
import org.http4s.server.jetty.JettyBuilder
object JettySslExample extends SslExample {
def builder = JettyBuilder
}
| ZizhengTai/http4s | examples/jetty/src/main/scala/com/example/http4s/jetty/JettySslExample.scala | Scala | apache-2.0 | 204 |
package io.github.ptitjes.scott.nl.lang.fr
import java.io.File
import io.github.ptitjes.scott.api.HiddenMarkovModel._
import io.github.ptitjes.scott.api._
import io.github.ptitjes.scott.decoders.BeamDecoder
import io.github.ptitjes.scott.nl.analysis.Checking
import io.github.ptitjes.scott.nl.corpora.Corpora._
import io.github.ptitjes.scott.nl.lefffEnhancedFeatures
import io.github.ptitjes.scott.trainers.DiscriminantTrainer
import io.github.ptitjes.scott.utils.Utils._
object trainFTBFineLE extends App {
val ftbPath = args(0)
val lefffPath = args(1)
val (trainCorpus, devCorpus, testCorpus) = FTB.parseSplitFine(ftbPath)
val lefff = Lefff.parse(lefffPath, "3.2")
val trainer = new DiscriminantTrainer[NLToken, NLToken with NLPosTag](
order = 2,
iterationCount = 15,
useAveraging = DiscriminantTrainer.COMPLETE_AVERAGING,
features = new lefffEnhancedFeatures.Features(lefff),
FTB.wordCodeExtractor,
FTB.tagExtractor,
FTB.fineTokenBuilder
)
trainer.train(trainCorpus, new IterationCallback[NLToken, NLToken with NLPosTag] {
override def iterationDone(iteration: Int, hmm: HiddenMarkovModel[NLToken, NLToken with NLPosTag], elapsedTime: Long): Unit = {
val hmmName = "FTB-Fine-LE-" + iteration
val hmmFile = new File("temp/" + hmmName + ".hmm")
decode(hmm, hmmName)
timed("Saving model") {
writeTo(hmm, hmmFile)
}
val (loadedHmm, _) = timed("Loading model") {
readFrom[NLToken, NLToken with NLPosTag](hmmFile)
}
decode(loadedHmm, "Loaded-" + hmmName)
println()
}
})
def decode(hmm: HiddenMarkovModel[NLToken, NLToken with NLPosTag], hmmName: String) {
val decoder = new BeamDecoder(hmm)
val hypCorpus = decoder.decode(devCorpus)
Checking.check(hmm, devCorpus, hypCorpus, new File("temp/Decode-on-" + hmmName + ".check")).display()
}
}
| ptitjes/scott | scott-nl/src/main/scala/io/github/ptitjes/scott/nl/lang/fr/trainFTBFineLE.scala | Scala | gpl-3.0 | 1,817 |
package models
/**
* Created by Jörg Amelunxen on 14.01.15.
*/
case class HistoryModel( uID : String,
content : String,
editor : String,
topicID : String,
versionNumber : String,
timeStamp : String)
| HiP-App/HiPBackend | app/models/HistoryModel.scala | Scala | apache-2.0 | 333 |
package tifmo
package dcstree {
sealed abstract class Quantifier
object QuantifierALL extends Quantifier with Serializable
object QuantifierNO extends Quantifier with Serializable
}
| tianran/tifmo | src/tifmo/dcstree/Quantifier.scala | Scala | bsd-2-clause | 195 |
package se.gigurra.leavu3.app
import java.awt.Desktop
import java.net.URI
import com.twitter.util.{Duration, JavaTimer, Time}
import se.gigurra.leavu3.util.Resource2String
import com.github.gigurra.serviceutils.twitter.logging.Logging
import scala.language.implicitConversions
import scala.util.Try
/**
* Created by kjolh on 3/20/2016.
*/
object Version extends Logging {
val versionUrl = "http://build.culvertsoft.se/dcs/leavu3-version.txt"
val downloadUrl = "http://build.culvertsoft.se/dcs/"
var latestOpt = Try(scala.io.Source.fromURL(versionUrl, "UTF-8").mkString).toOption
val currentOpt = Try(Resource2String("version.txt")).toOption
def versionsKnown: Boolean = {
currentOpt.isDefined && latestOpt.isDefined
}
def updateAvailable: Option[Boolean] = {
for {
current <- currentOpt
latest <- latestOpt
} yield {
current != latest
}
}
def downloadLatest(): Unit = {
if(Desktop.isDesktopSupported) {
Desktop.getDesktop.browse(new URI(downloadUrl))
} else {
logger.error(s"Can't auto update - browser not supported!")
}
}
override def toString: String = currentOpt.getOrElse("unknown")
}
| GiGurra/leavu3 | src/main/scala/se/gigurra/leavu3/app/Version.scala | Scala | mit | 1,184 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v2
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalString, Input}
case class B1571(value: Option[String]) extends CtBoxIdentifier("Address Line 1") with CtOptionalString with Input
| keithhall/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v2/B1571.scala | Scala | apache-2.0 | 823 |
package models.hubnet
import
java.util.concurrent.TimeoutException
import
scala.concurrent.{ Await, duration },
duration._
import
akka.{ actor, pattern, util => util_akka },
actor._,
pattern.ask,
util_akka.Timeout
import
play.{ api, libs },
api.Logger,
libs.Akka
import play.api.libs.concurrent.Execution.Implicits.defaultContext
/**
* Created with IntelliJ IDEA.
* User: jason
* Date: 4/21/13
* Time: 8:25 PM
*/
class ExpiryManager[T](expireFunc: (T) => Unit, name: String) {
protected val ActorPrefix = s"$name-expiry-"
protected lazy val system = ActorSystem(name)
protected object Messages {
case object Expire
case object Init
case object Ping // Used to check if actor exists; if not, create one; else, refresh timeout
case object Pang // Timed out on existence check
case object Pong // Existed
case object Refresh
}
import Messages._
protected class ExpiryActor(id: T) extends Actor {
protected val LifeSpan = 8 hours
private var task = new Cancellable {
override def cancel(): Boolean = true
override def isCancelled = true
}
override def receive = {
case Init => task = Akka.system.scheduler.scheduleOnce(LifeSpan) { self ! Expire }
case Refresh => task.cancel(); self ! Init
case Expire => expireFunc(id); self ! PoisonPill
case Ping => sender ! Pong
}
}
def apply(entryKey: T): Unit = {
implicit val timeout = Timeout(1500 millis)
val actor = system.actorSelection(s"/user/${generateActorPath(entryKey)}")
val response = {
try Await.result(actor ? Ping, timeout.duration)
catch {
case ex: TimeoutException => Pang
}
}
response match {
case Pong => actor ! Refresh
case Pang => initExpiry(entryKey)
}
}
private def initExpiry(entryKey: T): Unit = {
val actorOpt = {
try Option(system.actorOf(Props(new ExpiryActor(entryKey)), name = generateActorPath(entryKey)))
catch {
case ex: InvalidActorNameException =>
Logger.warn("Actor name exception", ex)
None
}
}
actorOpt foreach (_ ! Init)
}
private def generateActorPath(entryKey: T) = s"${ActorPrefix}${entryKey.toString}"
}
| NetLogo/SimServer | app/models/hubnet/ExpiryManager.scala | Scala | gpl-2.0 | 2,284 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.batch.table
import org.apache.flink.table.api._
import org.apache.flink.table.api.internal.TableEnvironmentInternal
import org.apache.flink.table.planner.runtime.utils.TestData._
import org.apache.flink.table.planner.runtime.utils.{BatchTestBase, TestingRetractTableSink, TestingUpsertTableSink}
import org.apache.flink.table.planner.utils.MemoryTableSourceSinkUtil
import org.apache.flink.test.util.TestBaseUtils
import org.junit.Assert._
import org.junit._
import java.util.TimeZone
import scala.collection.JavaConverters._
class LegacyTableSinkITCase extends BatchTestBase {
@Test
def testDecimalOutputFormatTableSink(): Unit = {
MemoryTableSourceSinkUtil.clear()
val schema = TableSchema.builder()
.field("c", DataTypes.VARCHAR(5))
.field("b", DataTypes.DECIMAL(10, 0))
.field("d", DataTypes.CHAR(5))
.build()
MemoryTableSourceSinkUtil.createDataTypeOutputFormatTable(
tEnv, schema, "testSink")
registerCollection("Table3", data3, type3, "a, b, c", nullablesOfData3)
val table = tEnv.from("Table3")
.where('a > 20)
.select("12345", 55.cast(DataTypes.DECIMAL(10, 0)), "12345".cast(DataTypes.CHAR(5)))
execInsertTableAndWaitResult(table, "testSink")
val results = MemoryTableSourceSinkUtil.tableDataStrings.asJava
val expected = Seq("12345,55,12345").mkString("\\n")
TestBaseUtils.compareResultAsText(results, expected)
}
@Test
def testDecimalAppendStreamTableSink(): Unit = {
MemoryTableSourceSinkUtil.clear()
val schema = TableSchema.builder()
.field("c", DataTypes.VARCHAR(5))
.field("b", DataTypes.DECIMAL(10, 0))
.field("d", DataTypes.CHAR(5))
.build()
MemoryTableSourceSinkUtil.createDataTypeAppendStreamTable(
tEnv, schema, "testSink")
registerCollection("Table3", data3, type3, "a, b, c", nullablesOfData3)
val table = tEnv.from("Table3")
.where('a > 20)
.select("12345", 55.cast(DataTypes.DECIMAL(10, 0)), "12345".cast(DataTypes.CHAR(5)))
execInsertTableAndWaitResult(table, "testSink")
val results = MemoryTableSourceSinkUtil.tableDataStrings.asJava
val expected = Seq("12345,55,12345").mkString("\\n")
TestBaseUtils.compareResultAsText(results, expected)
}
@Ignore
@Test
def testDecimalForLegacyTypeTableSink(): Unit = {
MemoryTableSourceSinkUtil.clear()
val schema = TableSchema.builder()
.field("a", DataTypes.VARCHAR(5))
.field("b", DataTypes.DECIMAL(10, 0))
.build()
// TODO: should failed at TableSinkUtils::validateLogicalPhysicalTypesCompatible
// CatalogTable hold new type but sink hold legacy type
MemoryTableSourceSinkUtil.createLegacyUnsafeMemoryAppendTable(
tEnv, schema, "testSink")
registerCollection("Table3", simpleData2, simpleType2, "a, b", nullableOfSimpleData2)
val table = tEnv.from("Table3")
.select('a.cast(DataTypes.STRING()), 'b.cast(DataTypes.DECIMAL(10, 2)))
.distinct()
execInsertTableAndWaitResult(table, "testSink")
val results = MemoryTableSourceSinkUtil.tableDataStrings.asJava
val expected = Seq("1,0.100000000000000000", "2,0.200000000000000000",
"3,0.300000000000000000", "3,0.400000000000000000", "4,0.500000000000000000",
"4,0.600000000000000000", "5,0.700000000000000000", "5,0.800000000000000000",
"5,0.900000000000000000").mkString("\\n")
TestBaseUtils.compareResultAsText(results, expected)
}
private def prepareForUpsertSink(): TestingUpsertTableSink = {
val schema = TableSchema.builder()
.field("a", DataTypes.INT())
.field("b", DataTypes.DOUBLE())
.build()
val sink = new TestingUpsertTableSink(Array(0), TimeZone.getDefault)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(
"testSink", sink.configure(schema.getFieldNames, schema.getFieldTypes))
registerCollection("MyTable", simpleData2, simpleType2, "a, b", nullableOfSimpleData2)
sink
}
@Test
def testUpsertSink(): Unit = {
val sink = prepareForUpsertSink()
sink.expectedKeys = Some(Array("a"))
sink.expectedIsAppendOnly = Some(false)
val table = tEnv.from("MyTable")
.groupBy('a)
.select('a, 'b.sum())
execInsertTableAndWaitResult(table, "testSink")
val result = sink.getUpsertResults.sorted
val expected = List(
"1,0.1",
"2,0.4",
"3,1.0",
"4,2.2",
"5,3.9").sorted
assertEquals(expected, result)
}
@Test
def testUpsertSinkWithAppend(): Unit = {
val sink = prepareForUpsertSink()
sink.expectedKeys = None
sink.expectedIsAppendOnly = Some(true)
val table = tEnv.from("MyTable")
.select('a, 'b)
.where('a < 3)
execInsertTableAndWaitResult(table, "testSink")
val result = sink.getRawResults.sorted
val expected = List(
"(true,1,0.1)",
"(true,2,0.2)",
"(true,2,0.2)").sorted
assertEquals(expected, result)
}
private def prepareForRetractSink(): TestingRetractTableSink = {
val schema = TableSchema.builder()
.field("a", DataTypes.INT())
.field("b", DataTypes.DOUBLE())
.build()
val sink = new TestingRetractTableSink(TimeZone.getDefault)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(
"testSink", sink.configure(schema.getFieldNames, schema.getFieldTypes))
registerCollection("MyTable", simpleData2, simpleType2, "a, b", nullableOfSimpleData2)
sink
}
@Test
def testRetractSink(): Unit = {
val sink = prepareForRetractSink()
val table = tEnv.from("MyTable")
.groupBy('a)
.select('a, 'b.sum())
execInsertTableAndWaitResult(table, "testSink")
val result = sink.getRawResults.sorted
val expected = List(
"(true,1,0.1)",
"(true,2,0.4)",
"(true,3,1.0)",
"(true,4,2.2)",
"(true,5,3.9)").sorted
assertEquals(expected, result)
}
}
| tzulitai/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/table/LegacyTableSinkITCase.scala | Scala | apache-2.0 | 6,801 |
import scala.tools.partest.ReplTest
// Check that the fragments of code generated in
// in the REPL correctly escape values added to
// literal strings.
//
// Before, we saw:
// scala> m("").x = 77
// <console>:10: error: ')' expected but string literal found.
// + "m("").x: Int = " + `$ires8` + "\\n"
object Test extends ReplTest {
def code = """
|case class `X"`(var xxx: Any)
|val m = Map(("": Any) -> `X"`("\\""), ('s: Any) -> `X"`("\\""))
|m("")
|m("").xxx
|m("").xxx = 0
|m("").xxx = "\\""
|m('s).xxx = 's
|val `"` = 0
""".stripMargin
}
| yusuke2255/dotty | tests/pending/run/t6549.scala | Scala | bsd-3-clause | 578 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.regression
import org.apache.spark.SparkFunSuite
import org.apache.spark.ml.param.ParamsSuite
import org.apache.spark.ml.util.MLTestingUtils
import org.apache.spark.mllib.linalg.{DenseVector, Vectors}
import org.apache.spark.mllib.util.{LinearDataGenerator, MLlibTestSparkContext}
import org.apache.spark.mllib.util.TestingUtils._
import org.apache.spark.sql.{DataFrame, Row}
class LinearRegressionSuite extends SparkFunSuite with MLlibTestSparkContext {
@transient var dataset: DataFrame = _
@transient var datasetWithoutIntercept: DataFrame = _
/*
In `LinearRegressionSuite`, we will make sure that the model trained by SparkML
is the same as the one trained by R's glmnet package. The following instruction
describes how to reproduce the data in R.
import org.apache.spark.mllib.util.LinearDataGenerator
val data =
sc.parallelize(LinearDataGenerator.generateLinearInput(6.3, Array(4.7, 7.2),
Array(0.9, -1.3), Array(0.7, 1.2), 10000, 42, 0.1), 2)
data.map(x=> x.label + ", " + x.features(0) + ", " + x.features(1)).coalesce(1)
.saveAsTextFile("path")
*/
override def beforeAll(): Unit = {
super.beforeAll()
dataset = sqlContext.createDataFrame(
sc.parallelize(LinearDataGenerator.generateLinearInput(
6.3, Array(4.7, 7.2), Array(0.9, -1.3), Array(0.7, 1.2), 10000, 42, 0.1), 2))
/*
datasetWithoutIntercept is not needed for correctness testing but is useful for illustrating
training model without intercept
*/
datasetWithoutIntercept = sqlContext.createDataFrame(
sc.parallelize(LinearDataGenerator.generateLinearInput(
0.0, Array(4.7, 7.2), Array(0.9, -1.3), Array(0.7, 1.2), 10000, 42, 0.1), 2))
}
test("params") {
ParamsSuite.checkParams(new LinearRegression)
val model = new LinearRegressionModel("linearReg", Vectors.dense(0.0), 0.0)
ParamsSuite.checkParams(model)
}
test("linear regression: default params") {
val lir = new LinearRegression
assert(lir.getLabelCol === "label")
assert(lir.getFeaturesCol === "features")
assert(lir.getPredictionCol === "prediction")
assert(lir.getRegParam === 0.0)
assert(lir.getElasticNetParam === 0.0)
assert(lir.getFitIntercept)
assert(lir.getStandardization)
val model = lir.fit(dataset)
// copied model must have the same parent.
MLTestingUtils.checkCopy(model)
model.transform(dataset)
.select("label", "prediction")
.collect()
assert(model.getFeaturesCol === "features")
assert(model.getPredictionCol === "prediction")
assert(model.intercept !== 0.0)
assert(model.hasParent)
}
test("linear regression with intercept without regularization") {
val trainer1 = new LinearRegression
// The result should be the same regardless of standardization without regularization
val trainer2 = (new LinearRegression).setStandardization(false)
val model1 = trainer1.fit(dataset)
val model2 = trainer2.fit(dataset)
/*
Using the following R code to load the data and train the model using glmnet package.
library("glmnet")
data <- read.csv("path", header=FALSE, stringsAsFactors=FALSE)
features <- as.matrix(data.frame(as.numeric(data$V2), as.numeric(data$V3)))
label <- as.numeric(data$V1)
weights <- coef(glmnet(features, label, family="gaussian", alpha = 0, lambda = 0))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) 6.298698
as.numeric.data.V2. 4.700706
as.numeric.data.V3. 7.199082
*/
val interceptR = 6.298698
val weightsR = Vectors.dense(4.700706, 7.199082)
assert(model1.intercept ~== interceptR relTol 1E-3)
assert(model1.weights ~= weightsR relTol 1E-3)
assert(model2.intercept ~== interceptR relTol 1E-3)
assert(model2.weights ~= weightsR relTol 1E-3)
model1.transform(dataset).select("features", "prediction").collect().foreach {
case Row(features: DenseVector, prediction1: Double) =>
val prediction2 =
features(0) * model1.weights(0) + features(1) * model1.weights(1) + model1.intercept
assert(prediction1 ~== prediction2 relTol 1E-5)
}
}
test("linear regression without intercept without regularization") {
val trainer1 = (new LinearRegression).setFitIntercept(false)
// Without regularization the results should be the same
val trainer2 = (new LinearRegression).setFitIntercept(false).setStandardization(false)
val model1 = trainer1.fit(dataset)
val modelWithoutIntercept1 = trainer1.fit(datasetWithoutIntercept)
val model2 = trainer2.fit(dataset)
val modelWithoutIntercept2 = trainer2.fit(datasetWithoutIntercept)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 0, lambda = 0,
intercept = FALSE))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) .
as.numeric.data.V2. 6.995908
as.numeric.data.V3. 5.275131
*/
val weightsR = Vectors.dense(6.995908, 5.275131)
assert(model1.intercept ~== 0 absTol 1E-3)
assert(model1.weights ~= weightsR relTol 1E-3)
assert(model2.intercept ~== 0 absTol 1E-3)
assert(model2.weights ~= weightsR relTol 1E-3)
/*
Then again with the data with no intercept:
> weightsWithoutIntercept
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) .
as.numeric.data3.V2. 4.70011
as.numeric.data3.V3. 7.19943
*/
val weightsWithoutInterceptR = Vectors.dense(4.70011, 7.19943)
assert(modelWithoutIntercept1.intercept ~== 0 absTol 1E-3)
assert(modelWithoutIntercept1.weights ~= weightsWithoutInterceptR relTol 1E-3)
assert(modelWithoutIntercept2.intercept ~== 0 absTol 1E-3)
assert(modelWithoutIntercept2.weights ~= weightsWithoutInterceptR relTol 1E-3)
}
test("linear regression with intercept with L1 regularization") {
val trainer1 = (new LinearRegression).setElasticNetParam(1.0).setRegParam(0.57)
val trainer2 = (new LinearRegression).setElasticNetParam(1.0).setRegParam(0.57)
.setStandardization(false)
val model1 = trainer1.fit(dataset)
val model2 = trainer2.fit(dataset)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 1.0, lambda = 0.57))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) 6.24300
as.numeric.data.V2. 4.024821
as.numeric.data.V3. 6.679841
*/
val interceptR1 = 6.24300
val weightsR1 = Vectors.dense(4.024821, 6.679841)
assert(model1.intercept ~== interceptR1 relTol 1E-3)
assert(model1.weights ~= weightsR1 relTol 1E-3)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 1.0, lambda = 0.57,
standardize=FALSE))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) 6.416948
as.numeric.data.V2. 3.893869
as.numeric.data.V3. 6.724286
*/
val interceptR2 = 6.416948
val weightsR2 = Vectors.dense(3.893869, 6.724286)
assert(model2.intercept ~== interceptR2 relTol 1E-3)
assert(model2.weights ~= weightsR2 relTol 1E-3)
model1.transform(dataset).select("features", "prediction").collect().foreach {
case Row(features: DenseVector, prediction1: Double) =>
val prediction2 =
features(0) * model1.weights(0) + features(1) * model1.weights(1) + model1.intercept
assert(prediction1 ~== prediction2 relTol 1E-5)
}
}
test("linear regression without intercept with L1 regularization") {
val trainer1 = (new LinearRegression).setElasticNetParam(1.0).setRegParam(0.57)
.setFitIntercept(false)
val trainer2 = (new LinearRegression).setElasticNetParam(1.0).setRegParam(0.57)
.setFitIntercept(false).setStandardization(false)
val model1 = trainer1.fit(dataset)
val model2 = trainer2.fit(dataset)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 1.0, lambda = 0.57,
intercept=FALSE))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) .
as.numeric.data.V2. 6.299752
as.numeric.data.V3. 4.772913
*/
val interceptR1 = 0.0
val weightsR1 = Vectors.dense(6.299752, 4.772913)
assert(model1.intercept ~== interceptR1 absTol 1E-3)
assert(model1.weights ~= weightsR1 relTol 1E-3)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 1.0, lambda = 0.57,
intercept=FALSE, standardize=FALSE))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) .
as.numeric.data.V2. 6.232193
as.numeric.data.V3. 4.764229
*/
val interceptR2 = 0.0
val weightsR2 = Vectors.dense(6.232193, 4.764229)
assert(model2.intercept ~== interceptR2 absTol 1E-3)
assert(model2.weights ~= weightsR2 relTol 1E-3)
model1.transform(dataset).select("features", "prediction").collect().foreach {
case Row(features: DenseVector, prediction1: Double) =>
val prediction2 =
features(0) * model1.weights(0) + features(1) * model1.weights(1) + model1.intercept
assert(prediction1 ~== prediction2 relTol 1E-5)
}
}
test("linear regression with intercept with L2 regularization") {
val trainer1 = (new LinearRegression).setElasticNetParam(0.0).setRegParam(2.3)
val trainer2 = (new LinearRegression).setElasticNetParam(0.0).setRegParam(2.3)
.setStandardization(false)
val model1 = trainer1.fit(dataset)
val model2 = trainer2.fit(dataset)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 0.0, lambda = 2.3))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) 5.269376
as.numeric.data.V2. 3.736216
as.numeric.data.V3. 5.712356)
*/
val interceptR1 = 5.269376
val weightsR1 = Vectors.dense(3.736216, 5.712356)
assert(model1.intercept ~== interceptR1 relTol 1E-3)
assert(model1.weights ~= weightsR1 relTol 1E-3)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 0.0, lambda = 2.3,
standardize=FALSE))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) 5.791109
as.numeric.data.V2. 3.435466
as.numeric.data.V3. 5.910406
*/
val interceptR2 = 5.791109
val weightsR2 = Vectors.dense(3.435466, 5.910406)
assert(model2.intercept ~== interceptR2 relTol 1E-3)
assert(model2.weights ~= weightsR2 relTol 1E-3)
model1.transform(dataset).select("features", "prediction").collect().foreach {
case Row(features: DenseVector, prediction1: Double) =>
val prediction2 =
features(0) * model1.weights(0) + features(1) * model1.weights(1) + model1.intercept
assert(prediction1 ~== prediction2 relTol 1E-5)
}
}
test("linear regression without intercept with L2 regularization") {
val trainer1 = (new LinearRegression).setElasticNetParam(0.0).setRegParam(2.3)
.setFitIntercept(false)
val trainer2 = (new LinearRegression).setElasticNetParam(0.0).setRegParam(2.3)
.setFitIntercept(false).setStandardization(false)
val model1 = trainer1.fit(dataset)
val model2 = trainer2.fit(dataset)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 0.0, lambda = 2.3,
intercept = FALSE))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) .
as.numeric.data.V2. 5.522875
as.numeric.data.V3. 4.214502
*/
val interceptR1 = 0.0
val weightsR1 = Vectors.dense(5.522875, 4.214502)
assert(model1.intercept ~== interceptR1 absTol 1E-3)
assert(model1.weights ~= weightsR1 relTol 1E-3)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 0.0, lambda = 2.3,
intercept = FALSE, standardize=FALSE))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) .
as.numeric.data.V2. 5.263704
as.numeric.data.V3. 4.187419
*/
val interceptR2 = 0.0
val weightsR2 = Vectors.dense(5.263704, 4.187419)
assert(model2.intercept ~== interceptR2 absTol 1E-3)
assert(model2.weights ~= weightsR2 relTol 1E-3)
model1.transform(dataset).select("features", "prediction").collect().foreach {
case Row(features: DenseVector, prediction1: Double) =>
val prediction2 =
features(0) * model1.weights(0) + features(1) * model1.weights(1) + model1.intercept
assert(prediction1 ~== prediction2 relTol 1E-5)
}
}
test("linear regression with intercept with ElasticNet regularization") {
val trainer1 = (new LinearRegression).setElasticNetParam(0.3).setRegParam(1.6)
val trainer2 = (new LinearRegression).setElasticNetParam(0.3).setRegParam(1.6)
.setStandardization(false)
val model1 = trainer1.fit(dataset)
val model2 = trainer2.fit(dataset)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 0.3, lambda = 1.6))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) 6.324108
as.numeric.data.V2. 3.168435
as.numeric.data.V3. 5.200403
*/
val interceptR1 = 5.696056
val weightsR1 = Vectors.dense(3.670489, 6.001122)
assert(model1.intercept ~== interceptR1 relTol 1E-3)
assert(model1.weights ~= weightsR1 relTol 1E-3)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 0.3, lambda = 1.6
standardize=FALSE))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) 6.114723
as.numeric.data.V2. 3.409937
as.numeric.data.V3. 6.146531
*/
val interceptR2 = 6.114723
val weightsR2 = Vectors.dense(3.409937, 6.146531)
assert(model2.intercept ~== interceptR2 relTol 1E-3)
assert(model2.weights ~= weightsR2 relTol 1E-3)
model1.transform(dataset).select("features", "prediction").collect().foreach {
case Row(features: DenseVector, prediction1: Double) =>
val prediction2 =
features(0) * model1.weights(0) + features(1) * model1.weights(1) + model1.intercept
assert(prediction1 ~== prediction2 relTol 1E-5)
}
}
test("linear regression without intercept with ElasticNet regularization") {
val trainer1 = (new LinearRegression).setElasticNetParam(0.3).setRegParam(1.6)
.setFitIntercept(false)
val trainer2 = (new LinearRegression).setElasticNetParam(0.3).setRegParam(1.6)
.setFitIntercept(false).setStandardization(false)
val model1 = trainer1.fit(dataset)
val model2 = trainer2.fit(dataset)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 0.3, lambda = 1.6,
intercept=FALSE))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) .
as.numeric.dataM.V2. 5.673348
as.numeric.dataM.V3. 4.322251
*/
val interceptR1 = 0.0
val weightsR1 = Vectors.dense(5.673348, 4.322251)
assert(model1.intercept ~== interceptR1 absTol 1E-3)
assert(model1.weights ~= weightsR1 relTol 1E-3)
/*
weights <- coef(glmnet(features, label, family="gaussian", alpha = 0.3, lambda = 1.6,
intercept=FALSE, standardize=FALSE))
> weights
3 x 1 sparse Matrix of class "dgCMatrix"
s0
(Intercept) .
as.numeric.data.V2. 5.477988
as.numeric.data.V3. 4.297622
*/
val interceptR2 = 0.0
val weightsR2 = Vectors.dense(5.477988, 4.297622)
assert(model2.intercept ~== interceptR2 absTol 1E-3)
assert(model2.weights ~= weightsR2 relTol 1E-3)
model1.transform(dataset).select("features", "prediction").collect().foreach {
case Row(features: DenseVector, prediction1: Double) =>
val prediction2 =
features(0) * model1.weights(0) + features(1) * model1.weights(1) + model1.intercept
assert(prediction1 ~== prediction2 relTol 1E-5)
}
}
test("linear regression model training summary") {
val trainer = new LinearRegression
val model = trainer.fit(dataset)
// Training results for the model should be available
assert(model.hasSummary)
// Residuals in [[LinearRegressionResults]] should equal those manually computed
val expectedResiduals = dataset.select("features", "label")
.map { case Row(features: DenseVector, label: Double) =>
val prediction =
features(0) * model.weights(0) + features(1) * model.weights(1) + model.intercept
label - prediction
}
.zip(model.summary.residuals.map(_.getDouble(0)))
.collect()
.foreach { case (manualResidual: Double, resultResidual: Double) =>
assert(manualResidual ~== resultResidual relTol 1E-5)
}
/*
Use the following R code to generate model training results.
predictions <- predict(fit, newx=features)
residuals <- label - predictions
> mean(residuals^2) # MSE
[1] 0.009720325
> mean(abs(residuals)) # MAD
[1] 0.07863206
> cor(predictions, label)^2# r^2
[,1]
s0 0.9998749
*/
assert(model.summary.meanSquaredError ~== 0.00972035 relTol 1E-5)
assert(model.summary.meanAbsoluteError ~== 0.07863206 relTol 1E-5)
assert(model.summary.r2 ~== 0.9998749 relTol 1E-5)
// Objective function should be monotonically decreasing for linear regression
assert(
model.summary
.objectiveHistory
.sliding(2)
.forall(x => x(0) >= x(1)))
}
test("linear regression model testset evaluation summary") {
val trainer = new LinearRegression
val model = trainer.fit(dataset)
// Evaluating on training dataset should yield results summary equal to training summary
val testSummary = model.evaluate(dataset)
assert(model.summary.meanSquaredError ~== testSummary.meanSquaredError relTol 1E-5)
assert(model.summary.r2 ~== testSummary.r2 relTol 1E-5)
model.summary.residuals.select("residuals").collect()
.zip(testSummary.residuals.select("residuals").collect())
.forall { case (Row(r1: Double), Row(r2: Double)) => r1 ~== r2 relTol 1E-5 }
}
}
| practice-vishnoi/dev-spark-1 | mllib/src/test/scala/org/apache/spark/ml/regression/LinearRegressionSuite.scala | Scala | apache-2.0 | 19,572 |
package keehive
object AppController:
val welcomeBanner = raw"""
|********************************************
| _ _ _
| | | | | (_)
| | | _____ ___| |__ ___ _____
| | |/ / _ \\/ _ \\ '_ \\| \\ \\ / / _ \\
| | < __/ __/ | | | |\\ V / __/
| |_|\\_\\___|\\___|_| |_|_| \\_/ \\___|
|
| Welcome to keehive version ${Main.version}
| ${Main.GitHubUrl}
|
| Type ? and press Enter for help.
| Press TAB for completion.
| Press Ctrl+L to clear screen.
|
| Built with Scala 3 using stdlib ${util.Properties.versionNumberString} running on:
| ${System.getProperty("java.vm.name")} ${System.getProperty("java.version")}
|
|********************************************
""".stripMargin
val helpText = s"""
|keehive is a terminal password manager
|
|Press TAB for completion of these commands (alphabetical order):
|
|add add a new record, enter each field after prompt
|add id add a new record with id, enter each field after prompt
|
|copy s copy password of record with id starting with s
| example: c<TAB> s<TAB> copy password of id starting with s
|copy s f copy field f of record with id starting with s
| example: c<TAB> someId url copy the url field of someId
|
|del 42 delete the record at index 42
|del id delete the record with id
|
|edit 42 edit the record at index 42
|edit id edit the record with id
|edit i f edit/add the field f of record with id/index i
|
|xport export all records to clipboard as plain tex
|
|help show this message; also ?
|
|import import records from clipboard
|
|list list summary of all records, hide password
|list 42 list fields of record with index 42, hide password
|list s list fields of record with id that starts with s, hide password
|
|print prints all records including password
|
|quit quit keehive; also Ctrl+D
|
|show list summary of all records, show password
|show 42 list fields of record with index 42, show password
|show s list fields of record with id that starts with s, show password
|
|update check for new versions of keehive, download and install
""".stripMargin
val cmdPrompt = "\\nkeehive> "
val mpwPrompt = "Enter master password: "
def readMasterPassword(msg: String = mpwPrompt): String = Terminal.getSecret("\\n" + msg)
def abortIfUnableToVerifyMasterPassword(): Unit =
val verifyMpw = readMasterPassword(" Enter password again: ")
if verifyMpw != lastEnteredMasterPassword
then Main.abort("Entered passwords does not match.")
else ()
// ----------------- mutable attributes ------------------------------
private var vault: Vault = scala.compiletime.uninitialized
private var userName: String = scala.compiletime.uninitialized
private var enteredMasterPassword: String = scala.compiletime.uninitialized
def lastEnteredMasterPassword = enteredMasterPassword
// --------------- Command Control ----------------------
def start(): Unit =
Terminal.put(welcomeBanner)
Terminal.put(s"Vault directory: $canonicalPath")
Settings.load()
userName =
Terminal.get("\\nEnter username: ",
default = Settings("defaultUser").getOrElse(System.getProperty("user.name")))
if userName == Terminal.CtrlD then Main.quit()
enteredMasterPassword = readMasterPassword()
if enteredMasterPassword == Terminal.CtrlD then Main.quit()
val Vault.Result(vaultOpt, isCreated) =
Vault.open(user = userName, masterPassword = enteredMasterPassword, path = Main.path)
if vaultOpt.isDefined then
vault = vaultOpt.get
if isCreated then notifyMpwCreated() else notifyMpwGood()
setCompletions()
notifyIfUpdateAvailable()
cmdLoop()
else abortMpwBad()
@annotation.tailrec
final def cmdLoop(): Unit =
val cmdLine = Terminal.get(cmdPrompt)
if cmdLine == Terminal.CtrlD then Main.quit()
val (cmd, arg) = cmdArg(cmdLine)
doCmd(cmd, arg)
cmdLoop()
def cmdArg(cmdLine: String): (String, String) =
val (cmd, arg) = cmdLine.span(_ != ' ')
(cmd, arg.trim)
case class Cmd(cmd: String, exec: String => Unit, helpMsg: String= "")
val commands = Vector(
Cmd("add", addRecord),
Cmd("delete", deleteRecord),
Cmd("edit", editRecord),
Cmd("genpw", _ => copyNewPasswordToClipboard()),
Cmd("list", listRecords(_, isShowAll = false)),
Cmd("show", listRecords(_, isShowAll = true)),
Cmd("print", _ => println(showAllRecordsAndFields)),
Cmd("copy", copyRecord),
Cmd("export", _ => exportAllToClipboard()),
Cmd("import", _ => importFromClipboard()),
Cmd("update", _ => checkForUpdateAndInstall()),
Cmd("help", help),
Cmd("quit", _ => Main.quit())
)
lazy val helpLines: Seq[String] = helpText.split('\\n').toSeq
def helpCmd(cmd: String): String =
val initDropped = helpLines.dropWhile(line => !line.startsWith(cmd))
initDropped.takeWhile(line => line.startsWith(cmd)).mkString("\\n")
def splitArg(arg: String): Seq[String] =
arg.split(' ').toVector.map(_.trim).filterNot(_.isEmpty)
def help(arg: String = ""): Unit =
if arg == "" then Terminal.put(helpText)
else splitArg(arg).foreach(arg => Terminal.put(helpCmd(arg)))
def doCmd(cmd: String, arg: String): Unit =
if cmd == "?" then help(arg)
else if cmd.nonEmpty then
val firstFoundCmdOpt = commands.find(_.cmd == cmd)
if firstFoundCmdOpt.isDefined then firstFoundCmdOpt.get.exec(arg)
else Terminal.put(s"Unknown command: $cmd\\nTry ? for help")
// -------------- Constants to access fields in Secrets ------------
final val Id = "id"
final val Pw = "pw"
final val OldPw = "oldpw"
final val SecretFields = Vector(Pw,"oldpw")
final val EnterFields = Vector("url", "user", Pw, "info")
final val FieldsInOrder = Vector(Id, Pw, "url", "info", "user", "oldpw")
final val MaxFieldLength = FieldsInOrder.map(_.length).max
final val MaxLineLength = 50
// --------------- Notifications to user ----------------------------
def notifyMpwCreated(): Unit = Terminal.put("New master password file created.")
def notifyMpwGood(): Unit = Terminal.put("Your vault is open! Make sure no one is peeking!")
def notifySaveVault(n: Int): Unit = Terminal.put(s"Saving $n secrets in vault.")
def abortMpwBad(): Unit = Main.abort("Bad master password :( ACCESS DENIED!")
def notifyIdExists(): Unit = Terminal.put(s"That $Id already exists; pick another.")
def notifyRecordNotFound(): Unit = Terminal.put(s"That record does not exists.")
def notifyIndexNotFound(): Unit = Terminal.put(s"Index out of bounds.")
def notifyIdMustBeOneWord(): Unit = Terminal.put(s"$Id must be one word.")
def notifyIdCannotBeInteger(): Unit = Terminal.put(s"$Id cannot be integer.")
// ----------------- utilities --------------------------------------
def canonicalPath: String = new java.io.File(Main.path).getCanonicalPath
def randomStr(n: Int = 8): String = java.util.UUID.randomUUID().toString.take(n)
def randomId(): String =
var rid = randomStr()
while vault.isExisting(field = Id, value = rid) do rid = randomStr()
rid
def indentString(s: String, indent: Int = 2): String =
val pad: String = " " * indent
s.split("\\n").mkString(pad, "\\n" + pad, "")
def toIntOpt(s: String): Option[Int] = scala.util.Try(s.toInt).toOption
def isInt(xs: String*): Boolean = xs.forall(s => toIntOpt(s).isDefined)
def showAllFields(r: Secret): String = r.showLines(FieldsInOrder,Seq())
def showAllRecordsAndFields: String =
vault.toVector.map(showAllFields).mkString("","\\n\\n","\\n")
def showRecordById(id: String, fieldsToExclude: Seq[String]): Unit =
val i = vault.indexStartsWith(field = Id, valueStartsWith = id)
if i >= 0 then Terminal.put(vault(i).showLines(FieldsInOrder, fieldsToExclude))
else notifyRecordNotFound()
def showRecordByIndex(ix: Int, fieldsToExclude: Seq[String]): Unit =
if ix >= 0 && ix < vault.size then
Terminal.put(vault(ix).showLines(FieldsInOrder, fieldsToExclude))
else notifyIndexNotFound()
def listRange(fromIndex: Int, untilIndex: Int, fieldsToExclude: Seq[String]): Unit =
for i <- fromIndex until untilIndex do
val maybeTooLongString = vault(i).show(FieldsInOrder, fieldsToExclude)
val showString = maybeTooLongString.take(MaxLineLength)
val continued = if maybeTooLongString.length > MaxLineLength then "..." else ""
Terminal.put(s"[$i] $showString$continued")
def setCompletions(): Unit =
val cs = commands.map(_.cmd)
val _: Boolean = //return value ignored
Terminal.setCompletions(cs, vault.valuesOf(Id).filterNot(_.isEmpty))
def userInput(fields: Seq[String], default: Map[String, String] = Map()): Map[String, String] =
fields.map { field =>
val pad = " " * (MaxFieldLength - field.length + 1)
val prompt = s"$field:$pad"
val input =
if SecretFields contains field then Terminal.getSecret(prompt)
else Terminal.get(prompt, default.getOrElse(field,""))
val value = if input == Terminal.CtrlD then "" else
if field == Pw && input == "" then
if Terminal.isOk("Generate new password? ENTER=no ") then
generatePassword()
else default.getOrElse(Pw,"")
else input
(field, value)
}.toMap
def copyToClipboardAndNotify(s: String): Unit =
Clipboard.put(s)
Terminal.put(s"${s.length} characters copied to clipboard! Paste with Ctrl+V")
def fixLine(line: String): String =
if line contains ':' then line
else
val r = randomStr()
Terminal.put(s"\\n*** [warn] random field name ?$r added as colon is missing in line:\\n$line\\n")
s"?$r: $line"
def fixPair(s: String): (String, String) =
val indexOfColon = s.indexOf(':')
val (k, v) = s.splitAt(indexOfColon + 1)
(k.dropRight(1), v) //remove colon (guaranteed to exits by fixLine)
def parseFields(lines: String): Option[Secret] =
val xs = lines.split('\\n').filterNot(_.isEmpty).map(fixLine)
val kvs: Map[String, String] = xs.map(fixPair).collect {
case (k,v) if !v.isEmpty => (k.trim,v.trim)
}.toMap
def kvsWithId = if !kvs.isDefinedAt(Id) then kvs + (Id -> randomStr()) else kvs
if kvs.nonEmpty then Some(Secret(kvsWithId)) else None
def notifyIfUpdateAvailable(): Unit =
def isFirstVersionGreater(vs: Seq[(String, String)]): Boolean =
if vs.nonEmpty then
val (first, second) = (toIntOpt(vs.head._1).getOrElse(0), toIntOpt(vs.head._2).getOrElse(0))
if first == second then isFirstVersionGreater(vs.drop(1))
else first > second
else false
if Main.latestVersion.nonEmpty then
val vs = Main.latestVersion.split('.') zip Main.version.split('.')
if isFirstVersionGreater(vs.toIndexedSeq) then
Terminal.put(s"Keehive version ${Main.latestVersion} is available. Type 'update' to install.")
def generatePassword(): String =
val length = Settings.asInt("generatePasswordLength").getOrElse(20)
val chars = Settings("generatePasswordChars").getOrElse("0-9 A-Z a-z")
//Terminal.put(s"Generating $length characters that may include: $chars")
//Terminal.put(s"Password generation settings in file: ${Settings.fileName}")
Crypto.Password.generate(length, chars)
// ----------------- commands ---------------------------------------
def addRecord(arg: String): Unit =
val args = splitArg(arg)
val idMaybe = if args.nonEmpty then args.head else Terminal.get(Id + ": ")
val id = idMaybe.takeWhile(_ != ' ')
if id != idMaybe then notifyIdMustBeOneWord()
else if isInt(id) then notifyIdCannotBeInteger()
else
if !vault.isExisting(field = Id, value = id) then
val xs = userInput(EnterFields) + (Id -> id)
val n = vault.add(Secret(xs))
Terminal.put(s"New secret at last index: ${n - 1}")
setCompletions()
else notifyIdExists()
def deleteRecord(arg: String): Unit =
splitArg(arg) match
case Seq() => Terminal.put(s"Missing argument: index or id")
case Seq(ix) if isInt(ix) =>
val i = ix.toInt
if i >= 0 && i < vault.size then
if Terminal.isOk(s"Are you sure that you want to delete [$i]") then
vault.remove(i)
setCompletions()
Terminal.put(s"Record at old index [$i] removed.")
else Terminal.put(s"Delete aborted.")
else notifyIndexNotFound()
case Seq(ix1, ix2) if isInt(ix1) && isInt(ix2) =>
val (start, end) = (ix1.toInt, ix2.toInt)
if start >= 0 && start < vault.size && end > start && end < vault.size then
val n = end - start + 1
if Terminal.isOk(s"Are you sure that you want to delete $n records at [$start-$end]") then
vault.remove(start, n)
setCompletions()
Terminal.put(s"Record at old indices [$start-$end] removed.")
else Terminal.put(s"Delete aborted.")
else notifyIndexNotFound()
case Seq(id) =>
val i = vault.indexWhere(field = Id, value = id)
if i < 0 then notifyRecordNotFound()
else if Terminal.isOk(s"Are you sure that you want to delete id:$id") then
vault.remove(i)
setCompletions()
Terminal.put(s"Record at old index [$i] with id:$id removed.")
else Terminal.put(s"Delete aborted.")
case _ => Terminal.put(s"Too many arguments: $arg")
def editRecord(arg: String): Unit =
splitArg(arg) match
case Seq() => Terminal.put(s"Give index or id as argument!")
case args if args.size <= 2 =>
val i = if isInt(args.head) then args.head.toInt
else vault.indexWhere(field = Id, value = args.head)
if i >= 0 && i < vault.size then
val id = vault(i).get(Id)
Terminal.put(s"Edit record with id:$id\\n")
val default = vault(i).data
val fieldsToEdit: Seq[String] =
if args.size > 1 then args.drop(1)
else (vault(i).data.keySet - OldPw - Id).toSeq
val edited = userInput(fieldsToEdit, default) + (Id -> id)
val (enteredPw, existingPw) = (edited.getOrElse(Pw, ""), vault(i).get(Pw))
val appendOldPwMap: Map[String, String] =
if enteredPw != existingPw then Map(OldPw -> existingPw) else Map()
vault(i) = Secret(vault(i).data ++ edited ++ appendOldPwMap)
Terminal.put(s"\\nEdited record with id:$id")
listRecords(i.toString, isShowAll = false)
else notifyRecordNotFound()
case _ => Terminal.put(s"Too many arguments: $arg")
def copyNewPasswordToClipboard(): Unit = copyToClipboardAndNotify(generatePassword())
def listRecords(arg: String, isShowAll: Boolean): Unit =
val fieldsToExclude = if isShowAll then Seq() else SecretFields
splitArg(arg) match
case Seq() => listRange(0, vault.size, fieldsToExclude)
case Seq(ix) if isInt(ix) => showRecordByIndex(ix.toInt, fieldsToExclude)
case Seq(id) => showRecordById(id, fieldsToExclude)
case Seq(ix1, ix2) if isInt(ix1) && isInt(ix2) =>
val last = vault.size - 1
val (a, b) = (ix1.toInt min last max 0, ix2.toInt min last max 0)
listRange(fromIndex = a, untilIndex = b + 1, fieldsToExclude)
case _ => Terminal.put(s"too many arguments: $arg")
def copyRecord(arg: String): Unit =
splitArg(arg) match
case Seq() => Terminal.put(s"Give index or id as argument!")
case args if args.size <= 2 =>
val fieldToCopy = args.lift(1).getOrElse(Pw)
val i = if isInt(args.head) then args.head.toInt
else vault.indexStartsWith(field = Id, valueStartsWith = args.head)
if i >= 0 && i < vault.size then copyToClipboardAndNotify(vault(i).get(fieldToCopy))
else notifyRecordNotFound()
case _ => Terminal.put(s"Too many arguments: $arg")
def exportAllToClipboard(): Unit =
Clipboard.put(showAllRecordsAndFields)
Terminal.put(s"${vault.size} records copied to clipboard.")
def checkForDuplicates(fields: Seq[Secret] ): Seq[Secret] =
val newIds = fields.toSet[Secret].map(s => s.get(Id))
val existingIds = vault.toSet.map(s => s.get(Id))
val duplicates = newIds intersect existingIds
if duplicates.nonEmpty then
Terminal.put("\\n *** WARNING! Duplicate ids detected: " + duplicates.mkString(", "))
if Terminal.isOk("Do you want to remove all these ids in vault before importing?") then
vault.removeValuesOfField(duplicates.toSeq, Id)
else Terminal.put("Duplicates kept in vault.")
val pairs = fields.map(s => (s.get(Id), s))
val distinctPairs = pairs.toMap.toSeq
if pairs.size != distinctPairs.size then
if Terminal.isOk("Duplicates among import detected. Keep last in sequence?") then
distinctPairs.map(_._2)
else fields
else fields
def importFromClipboard(): Unit =
val items = Clipboard.get().split("\\n\\n").toSeq
val fields = items.filterNot(_.isEmpty).flatMap(parseFields)
val n = fields.size
Terminal.put(fields.map(_.get("id")).mkString(", "))
if Terminal.isOk(s"Do you want to append the $n records to your vault?") then
val fieldsToAppend = checkForDuplicates(fields)
vault.add(fieldsToAppend:_*)
setCompletions()
def checkForUpdateAndInstall(): Unit =
if Main.latestVersion.nonEmpty then
if Main.latestVersion != Main.version then
if Terminal.isOk(s"Version ${Main.latestVersion} is available. Download and install?") then
Main.install()
else Terminal.put("Installation aborted.")
else Terminal.put(s"Already up to date! Current version of keehive is ${Main.version}")
else Terminal.put("No information on latest version available.")
| bjornregnell/keehive | src/main/scala/keehive/AppController.scala | Scala | gpl-3.0 | 18,069 |
package latis.ops
import latis.dm._
import latis.dm.Scalar
import latis.dm.Text
import latis.util.iterator.MappingIterator
import latis.util.LatisServiceException
class TextAppender(name: String, suffix: String) extends Operation() {
/**
* Append 'suffix' to any Text Variable named 'name'.
*/
override def applyToScalar(s: Scalar): Option[Variable] = s.hasName(name) match {
case true => {
val t = s match {
case Text(str) => str + suffix
case _ => throw new LatisServiceException("Can only append to a Text Variable.")
}
val md = s.getMetadata + ("length", t.length.toString)
Some(Text(md, t))
}
case false => Some(s)
}
/**
* Override to apply Operation to domain as well as range.
*/
override def applyToSample(sample: Sample): Option[Sample] = {
val od = applyToVariable(sample.domain)
val or = applyToVariable(sample.range)
(od, or) match {
case (Some(d), Some(r)) => Some(Sample(d, r))
case _ => None //this will never happen
}
}
}
object TextAppender extends OperationFactory {
override def apply(args: Seq[String]): TextAppender = new TextAppender(args(0), args(1))
def apply(name: String, suffix: String): TextAppender = new TextAppender(name, suffix)
} | dlindhol/LaTiS | src/main/scala/latis/ops/TextAppender.scala | Scala | epl-1.0 | 1,285 |
/**
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trustedanalytics.sparktk.frame.internal.ops.statistics.descriptives
import org.trustedanalytics.sparktk.frame.DataTypes
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row
import org.scalatest.Matchers
import org.trustedanalytics.sparktk.testutils.TestingSparkContextWordSpec
/**
* Exercises the column statistics functions. Primarily checks that correct column indices and options are piped
* through to the underlying statistics engines. Thorough evaluation of the statistical operations is done by the
* tests for the respective statistics engines.
*/
class ColumnStatisticsITest extends TestingSparkContextWordSpec with Matchers {
trait ColumnStatisticsTest {
val epsilon = 0.000000001
// Input data
val row0 = Row("A", 1, 2.0f, 2, 3, 1.0f, 0, 0)
val row1 = Row("B", 1, 2.0f, 1, 3, 2.0f, 0, 0)
val row2 = Row("C", 1, 2.0f, 3, 2, 0.0f, 10, 0)
val row3 = Row("D", 1, 2.0f, 6, 1, 1.0f, 0, 0)
val row4 = Row("E", 1, 2.0f, 7, 1, 2.0f, 0, 0)
val rowRDD: RDD[Row] = sparkContext.parallelize(List(row0, row1, row2, row3, row4))
}
"ColumnStatistics.columnMode" should {
"with no net weight, return none as json" in new ColumnStatisticsTest() {
val testMode = ColumnStatistics.columnMode(0, DataTypes.string, Some(7, DataTypes.int32), None, rowRDD)
testMode.modes shouldBe Array.empty[String]
}
"support weighted mode" in new ColumnStatisticsTest() {
val testMode = ColumnStatistics.columnMode(0, DataTypes.string, Some(3, DataTypes.int32), None, rowRDD)
testMode.modes shouldBe Array("E")
}
}
"ColumnStatistics.columnSummaryStatistics" should {
"support unweighted summary statistics" in new ColumnStatisticsTest() {
val stats: ColumnSummaryStatisticsReturn = ColumnStatistics.columnSummaryStatistics(2,
DataTypes.float32,
None,
None,
rowRDD,
usePopulationVariance = false)
Math.abs(stats.mean - 2.0) should be < epsilon
}
"support weighted summary statistics" in new ColumnStatisticsTest() {
val stats: ColumnSummaryStatisticsReturn =
ColumnStatistics.columnSummaryStatistics(5, DataTypes.float32, Some(4), Some(DataTypes.int32), rowRDD, usePopulationVariance = false)
Math.abs(stats.mean - 1.2) should be < epsilon
}
}
"ColumnStatistics.columnMedian" should {
"support unweighted float median" in new ColumnStatisticsTest() {
val median = ColumnStatistics.columnMedian(2, DataTypes.float32, None, rowRDD)
median shouldBe Some(2.0)
}
"support weighted float median" in new ColumnStatisticsTest() {
val median =
ColumnStatistics.columnMedian(5, DataTypes.float32, Some(6, DataTypes.int32), rowRDD)
median shouldBe Some(0.0)
}
"support unweighted integer median" in new ColumnStatisticsTest() {
val median =
ColumnStatistics.columnMedian(4, DataTypes.int32, None, rowRDD)
median shouldBe Some(2)
}
"support weighted integer median" in new ColumnStatisticsTest() {
val median =
ColumnStatistics.columnMedian(4, DataTypes.int32, Some(1, DataTypes.int32), rowRDD)
median shouldBe Some(2)
}
"with no net weights should return none" in new ColumnStatisticsTest() {
val median = ColumnStatistics.columnMedian(0, DataTypes.string, Some(7, DataTypes.int32), rowRDD)
median shouldBe None
}
}
}
| ashaarunkumar/spark-tk | sparktk-core/src/test/scala/org/trustedanalytics/sparktk/frame/internal/ops/statistics/descriptives/ColumnStatisticsITest.scala | Scala | apache-2.0 | 4,130 |
/*
* Copyright (c) 2012. Alexandre Martins. All rights reserved.
*/
package pt.cnbc.wikimodels.client.snippet
import net.liftweb.common._
import net.liftweb.http._
import S._
import net.liftweb.util._
import Helpers._
import scala.xml._
import pt.cnbc.wikimodels.client.record._
import alexmsmartins.log.LoggerWrapper
import pt.cnbc.wikimodels.sbmlVisitors.SBMLStrictValidator
import pt.cnbc.wikimodels.dataModel._
import visitor.SBMLFromRecord
//implicits
import pt.cnbc.wikimodels.client.record.visitor.SBMLFromRecord._
package object screenUtil extends LoggerWrapper{
def genWarnsForSBMLModel() {
val mr = screenUtil.loadSBMLModelFromPathParam
SBMLStrictValidator.visitModel(mr).map(err => S.warning(err))
}
def loadSBMLModelFromPathParam:SBMLModelRecord = {
var mm:Box[SBMLModelRecord] = Empty
tryo(
SBMLModelRecord.readRestRec(debug("The modelMetaId in session after calling /model/modemetaid is: {}", S.param("modelMetaId").openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")))
) match {
case Full(m) => {mm = m}
case Failure(msg,_,_) => {
S.error(Text(msg))
S.redirectTo("/")
}
case _ => {
S.error("This should not have happened!")
error("This should not have happened!")
S.redirectTo("/")
}
}
mm.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
}
def loadCompartmentFromPathParam:CompartmentRecord = {
var mm:Box[CompartmentRecord] = Empty
tryo(
CompartmentRecord.readRestRec(debug("The compartmentMetaId in session after calling /model/modemetaid/compartment/compartmentMetaId is: {}", S.param("compartmentMetaId").openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")))
) match {
case Full(m) => {mm = m}
case Failure(msg,_,_) => {
S.error(msg)
S.redirectTo("/")
}
case _ => {
S.error("This should not have happened!")
error("This should not have happened!")
S.redirectTo("/")
}
}
mm.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
}
def loadSpeciesFromPathParam:SpeciesRecord = {
var mm:Box[SpeciesRecord] = Empty
tryo(
SpeciesRecord.readRestRec(debug("The speciesMetaId in session after calling /model/modemetaid/species/speciesMetaId is: {}", S.param("speciesMetaId").openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")))
) match {
case Full(m) => {mm = m}
case Failure(msg,_,_) => {
S.error(msg)
S.redirectTo("/")
}
case _ => {
S.error("This should not have happened!")
error("This should not have happened!")
S.redirectTo("/")
}
}
mm.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
}
def loadParameterFromPathParam:ParameterRecord = {
var mm:Box[ParameterRecord] = Empty
tryo(
ParameterRecord.readRestRec(debug("The parameterMetaId in session after calling /model/modemetaid/parameter/speciesMetaId is: {}", S.param("parameterMetaId").openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")))
) match {
case Full(m) => {mm = m}
case Failure(msg,_,_) => {
S.error(msg)
S.redirectTo("/")
}
case _ => {
S.error("This should not have happened!")
error("This should not have happened!")
S.redirectTo("/")
}
}
mm.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
}
def loadFunctionDefinitionFromPathParam:FunctionDefinitionRecord = {
var mm:Box[FunctionDefinitionRecord] = Empty
tryo(
FunctionDefinitionRecord.readRestRec(debug("The functionDefinitionMetaId in session after calling /model/modemetaid/functionDefinition/functionDMetaId is: {}", S.param("functionDefinitionMetaId").openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")))
) match {
case Full(m) => {mm = m}
case Failure(msg,_,_) => {
S.error(msg)
S.redirectTo("/")
}
case _ => {
S.error("This should not have happened!")
error("This should not have happened!")
S.redirectTo("/")
}
}
mm.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
}
def loadReactionFromPathParam:ReactionRecord = {
var mm:Box[ReactionRecord] = Empty
tryo(
ReactionRecord.readRestRec(debug("The reactionMetaId in session after calling /model/modemetaid/reaction/reactionMetaId is: {}", S.param("reactionMetaId").openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")))
) match {
case Full(m) => {mm = m}
case Failure(msg,_,_) => {
S.error(msg)
S.redirectTo("/")
}
case _ => {
S.error("This should not have happened!")
error("This should not have happened!")
S.redirectTo("/")
}
}
mm.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
}
def loadReactantFromPathParam:ReactantRecord = {
var mm:Box[ReactantRecord] = Empty
tryo(
{
val boxReactantRec = ReactantRecord.readRestRec(debug("The reactantMetaId in session after calling /model/modemetaid/reaction/reactionmetaid/reactant/reactantMetaId is: {}", S.param("reactantMetaId")
.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")))
val reactantRec = boxReactantRec.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
reactantRec.parent = Full(screenUtil.loadReactionFromPathParam)
Full(reactantRec)
}
) match {
case Full(m) => {mm = m}
case Failure(msg,_,_) => {
S.error(msg)
S.redirectTo("/")
}
case _ => {
S.error("This should not have happened!")
error("This should not have happened!")
S.redirectTo("/")
}
}
mm.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
}
def loadProductFromPathParam:ProductRecord = {
var mm:Box[ProductRecord] = Empty
tryo(
{
val boxProductRec = ProductRecord.readRestRec(debug("The productMetaId in session after calling /model/modemetaid/reaction/reactionmetaid/product/productMetaId is: {}", S.param("productMetaId")
.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")))
val productRec = boxProductRec.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
productRec.parent = Full(screenUtil.loadReactionFromPathParam)
Full(productRec)
}
) match {
case Full(m) => {mm = m}
case Failure(msg,_,_) => {
S.error(msg)
S.redirectTo("/")
}
case _ => {
S.error("This should not have happened!")
error("This should not have happened!")
S.redirectTo("/")
}
}
mm.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
}
def loadModifierFromPathParam:ModifierRecord = {
var mm:Box[ModifierRecord] = Empty
tryo(
{
val boxModifierRec = ModifierRecord.readRestRec(debug("The modifierMetaId in session after calling /model/modemetaid/reaction/reactionmetaid/modifier/modifierMetaId is: {}", S.param("modifierMetaId")
.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")))
val modifierRec = boxModifierRec.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
modifierRec.parent = Full(screenUtil.loadReactionFromPathParam)
Full(modifierRec)
}
) match {
case Full(m) => {mm = m}
case Failure(msg,_,_) => {
S.error(msg)
S.redirectTo("/")
}
case _ => {
S.error("This should not have happened!")
error("This should not have happened!")
S.redirectTo("/")
}
}
mm.openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 20/11/12
* Time: 5:01 PM */
class CreateModelScreen extends LiftScreen with LoggerWrapper {
object model extends ScreenVar(SBMLModelRecord.createRecord)
addFields(() => model.get.idO)
addFields(() => model.get.nameO)
addFields(() => model.get.notesO)
protected def finish() = {
trace("CreateModelScreen.finish() started executing!")
model.get.createRestRec()
S.notice("Model " + model.metaIdO.get + " was created successfully!")
for(warnings <- SBMLStrictValidator.visitModel(model.get))
S.warning(warnings)
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 26/11/12
* Time: 5:01 PM */
class EditModelScreen extends LiftScreen with LoggerWrapper {
object model extends ScreenVar(screenUtil.loadSBMLModelFromPathParam)
addFields(() => model.get.idO)
addFields(() => model.get.nameO)
addFields(() => model.get.notesO)
protected def finish() {
trace("EditModelScreen.finish() started executing!")
model.get.updateRestRec()
S.notice("Model " + model.get.metaIdO.get + " was saved successfully!")
for(warnings <- SBMLStrictValidator.visitModel(model.get))
S.warning(warnings)
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 20/11/12
* Time: 5:01 PM */
class CreateCompartmentScreen extends LiftScreen with LoggerWrapper {
object compartment extends ScreenVar(CompartmentRecord.createRecord)
compartment.parent = Full(screenUtil.loadSBMLModelFromPathParam)
addFields(() => compartment.get.idO)
addFields(() => compartment.get.nameO)
addFields(() => compartment.get.spatialDimensions0)
addFields(() => compartment.get.constantO)
addFields(() => compartment.get.sizeO )
addFields(() => compartment.get.outsideO)
addFields(() => compartment.get.notesO)
protected def finish() = {
trace("CreateCompartmentScreen.finish() started executing!")
compartment.get.createRestRec()
S.notice("Compartment " + compartment.get.metaIdO.get + " was created successfully!")
for(warnings <- SBMLStrictValidator.visitCompartment(compartment.get))
S.warning(warnings)
screenUtil.genWarnsForSBMLModel()
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 27/11/12
* Time: 14:37 PM */
class EditCompartmentScreen extends LiftScreen with LoggerWrapper {
object compartment extends ScreenVar(screenUtil.loadCompartmentFromPathParam)
compartment.parent = Full(screenUtil.loadSBMLModelFromPathParam)
addFields(() => compartment.get.idO)
addFields(() => compartment.get.nameO)
addFields(() => compartment.get.spatialDimensions0)
addFields(() => compartment.get.constantO)
addFields(() => compartment.get.sizeO )
addFields(() => compartment.get.outsideO)
addFields(() => compartment.get.notesO)
protected def finish() {
trace("EditCompartmentScreen.finish() started executing!")
debug("Compartment from screen is " + compartment.get.toXML)
compartment.get.updateRestRec()
S.notice("Compartment " + compartment.get.metaIdO.get + " was saved successfully!")
for(warnings <- SBMLStrictValidator.visitCompartment(compartment.get))
S.warning(warnings)
screenUtil.genWarnsForSBMLModel()
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 20/11/12
* Time: 5:01 PM */
class CreateSpeciesScreen extends LiftScreen with LoggerWrapper {
object species extends ScreenVar(SpeciesRecord.createRecord)
species.parent = Full(screenUtil.loadSBMLModelFromPathParam)
addFields(() => species.get.idO)
addFields(() => species.get.nameO)
addFields(() => species.get.compartmentO)
addFields(() => species.get.constantO)
addFields(() => species.get.initialAmountO )
addFields(() => species.get.initialConcentrationO)
addFields(() => species.get.boundaryConditionO)
addFields(() => species.get.notesO)
protected def finish() = {
trace("CreateSpeciesScreen.finish() started executing!")
species.get.createRestRec()
S.notice("Species " + species.get.metaIdO.get + " was created successfully!")
for(warnings <- SBMLStrictValidator.visitSpecies(species.get))
S.warning(warnings)
screenUtil.genWarnsForSBMLModel()
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 27/11/12
* Time: 14:37 PM */
class EditSpeciesScreen extends LiftScreen with LoggerWrapper {
object species extends ScreenVar(screenUtil.loadSpeciesFromPathParam)
species.parent = Full(screenUtil.loadSBMLModelFromPathParam)
addFields(() => species.get.idO)
addFields(() => species.get.nameO)
addFields(() => species.get.compartmentO)
addFields(() => species.get.constantO)
addFields(() => species.get.initialAmountO )
addFields(() => species.get.initialConcentrationO)
addFields(() => species.get.boundaryConditionO)
addFields(() => species.get.notesO)
protected def finish() {
trace("EditSpeciesScreen.finish() started executing!")
species.get.updateRestRec()
S.notice("Species " + species.get.metaIdO.get + " was saved successfully!")
for(warnings <- SBMLStrictValidator.visitSpecies(species.get))
S.warning(warnings)
screenUtil.genWarnsForSBMLModel()
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 20/11/12
* Time: 5:01 PM */
class CreateParameterScreen extends LiftScreen with LoggerWrapper {
object parameter extends ScreenVar(ParameterRecord.createRecord)
parameter.parent = Full(screenUtil.loadSBMLModelFromPathParam)
addFields(() => parameter.get.idO)
addFields(() => parameter.get.nameO)
addFields(() => parameter.get.valueO)
addFields(() => parameter.get.constantO)
addFields(() => parameter.get.notesO)
protected def finish() = {
trace("CreateParameterScreen.finish() started executing!")
parameter.get.createRestRec()
S.notice("Parameter " + parameter.get.metaIdO.get + " was created successfully!")
for(warnings <- SBMLStrictValidator.visitParameter(parameter.get))
S.warning(warnings)
S.redirectTo( parameter.relativeURL )
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 27/11/12
* Time: 14:37 PM */
class EditParameterScreen extends LiftScreen with LoggerWrapper {
object parameter extends ScreenVar(screenUtil.loadParameterFromPathParam)
parameter.parent = Full(screenUtil.loadSBMLModelFromPathParam)
addFields(() => parameter.get.idO)
addFields(() => parameter.get.nameO)
addFields(() => parameter.get.valueO)
addFields(() => parameter.get.constantO)
addFields(() => parameter.get.notesO)
protected def finish() {
trace("EditParameterScreen.finish() started executing!")
parameter.get.updateRestRec()
S.notice("Parameter " + parameter.get.metaIdO.get + " was saved successfully!")
for(warnings <- SBMLStrictValidator.visitParameter(parameter.get))
S.warning(warnings)
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 25/10/12
* Time: 7:01 PM */
class CreateFunctionDefinitionScreen extends LiftScreen with LoggerWrapper{
object function extends ScreenVar(FunctionDefinitionRecord.createRecord)
function.parent = Full(screenUtil.loadSBMLModelFromPathParam)
//override def screenTop = <b>A single screen with some input validation</b>
addFields(() => function.get.idO)
addFields(() => function.get.nameO)
addFields(() => function.get.mathO)
addFields(() => function.get.notesO)
def finish() = {
trace("CreateFunctionDefinitionScreen.finish() started executing!")
function.get.createRestRec()
S.notice("Function definition " + function.get.metaIdO.get + " was created successfully!")
for(warnings <- SBMLStrictValidator.visitFunctionDefinition(function.get))
S.warning(warnings)
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 20/11/12
* Time: 4:45 PM */
class EditFunctionDefinitionScreen extends LiftScreen with LoggerWrapper{
object function extends ScreenVar(screenUtil.loadFunctionDefinitionFromPathParam)
function.parent = Full(screenUtil.loadSBMLModelFromPathParam)
//override def screenTop = <b>A single screen with some input validation</b>
addFields(() => function.get.idO)
addFields(() => function.get.nameO)
addFields(() => function.get.mathO)
addFields(() => function.get.notesO)
protected def finish() {
trace("EditParameterScreen.finish() started executing!")
function.get.updateRestRec()
S.notice("Function definition " + function.get.metaIdO.get + " was saved successfully!")
for(warnings <- SBMLStrictValidator.visitFunctionDefinition(function.get))
S.warning(warnings)
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 25/10/12
* Time: 7:01 PM */
class CreateReactionScreen extends LiftScreen with LoggerWrapper{
object reaction extends ScreenVar(ReactionRecord.createRecord)
reaction.parent = Full(screenUtil.loadSBMLModelFromPathParam)
//override def screenTop = <b>A single screen with some input validation</b>
addFields(() => reaction.get.idO)
addFields(() => reaction.get.nameO)
addFields(() => reaction.get.notesO)
def finish() = {
trace("CreateReactionScreen.finish() started executing!")
reaction.get.createRestRec()
S.notice("Reaction " + reaction.get.metaIdO.get + " was created successfully!")
for(warnings <- SBMLStrictValidator.visitReaction(reaction.get))
S.warning(warnings)
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 20/11/12
* Time: 4:45 PM */
class EditReactionScreen extends LiftScreen with LoggerWrapper{
object reaction extends ScreenVar(
ReactionRecord.readRestRec(S.param("reactionMetaId").openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")).openOrThrowException("TODO: replacement for usage of deprecated openTheBox method")
)
//override def screenTop = <b>A single screen with some input validation</b>
addFields(() => reaction.get.idO)
addFields(() => reaction.get.nameO)
addFields(() => reaction.get.notesO)
protected def finish() {
trace("EditReactionScreen.finish() started executing!")
reaction.get.updateRestRec()
S.notice("Reaction " + reaction.get.metaIdO.get + " was saved successfully!")
for(warnings <- SBMLStrictValidator.visitReaction(reaction.get))
S.warning(warnings)
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 14/11/13
* Time: 17.40 PM */
class CreateReactantScreen extends LiftScreen with LoggerWrapper {
object reactant extends ScreenVar(ReactantRecord.createRecord)
reactant.parent = Full(screenUtil.loadReactionFromPathParam)
addFields(() => reactant.get.idO)
addFields(() => reactant.get.nameO)
addFields(() => reactant.get.notesO)
protected def finish() = {
trace("CreateReactantScreen.finish() started executing!")
reactant.get.createRestRec()
S.notice("Reactant " + reactant.get.metaIdO.get + " was created successfully!")
for(warnings <- SBMLStrictValidator.visitSpeciesReference(reactant.get))
S.warning(warnings)
screenUtil.genWarnsForSBMLModel()
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 14/11/13
* Time: 17.42 PM */
class EditReactantScreen extends LiftScreen with LoggerWrapper {
object reactant extends ScreenVar(screenUtil.loadReactantFromPathParam)
reactant.parent = Full(screenUtil.loadReactionFromPathParam)
addFields(() => reactant.get.idO)
addFields(() => reactant.get.nameO)
addFields(() => reactant.get.notesO)
protected def finish() {
trace("EditReactantScreen.finish() started executing!")
debug("Reactant from screen is " + reactant.get.toXML)
reactant.get.updateRestRec()
S.notice("Reactant " + reactant.get.metaIdO.get + " was saved successfully!")
for(warnings <- SBMLStrictValidator.visitSpeciesReference(reactant.get))
S.warning(warnings)
screenUtil.genWarnsForSBMLModel()
}
}
class CreateProductScreen extends LiftScreen with LoggerWrapper {
object Product extends ScreenVar(ProductRecord.createRecord)
Product.parent = Full(screenUtil.loadReactionFromPathParam)
addFields(() => Product.get.idO)
addFields(() => Product.get.nameO)
addFields(() => Product.get.notesO)
protected def finish() = {
trace("CreateProductScreen.finish() started executing!")
Product.get.createRestRec()
S.notice("Product " + Product.get.metaIdO.get + " was created successfully!")
for(warnings <- SBMLStrictValidator.visitSpeciesReference(Product.get))
S.warning(warnings)
screenUtil.genWarnsForSBMLModel()
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 27/11/12
* Time: 14:37 PM */
class EditProductScreen extends LiftScreen with LoggerWrapper {
object product extends ScreenVar(screenUtil.loadProductFromPathParam)
product.parent = Full(screenUtil.loadReactionFromPathParam)
addFields(() => product.get.idO)
addFields(() => product.get.nameO)
addFields(() => product.get.notesO)
protected def finish() {
trace("EditProductScreen.finish() started executing!")
debug("Product from screen is " + product.get.toXML)
product.get.updateRestRec()
S.notice("Product " + product.get.metaIdO.get + " was saved successfully!")
for(warnings <- SBMLStrictValidator.visitSpeciesReference(product.get))
S.warning(warnings)
screenUtil.genWarnsForSBMLModel()
}
}
class CreateModifierScreen extends LiftScreen with LoggerWrapper {
object modifier extends ScreenVar(ModifierRecord.createRecord)
modifier.parent = Full(screenUtil.loadReactionFromPathParam)
addFields(() => modifier.get.idO)
addFields(() => modifier.get.nameO)
addFields(() => modifier.get.notesO)
protected def finish() = {
trace("CreateModifierScreen.finish() started executing!")
modifier.get.createRestRec()
S.notice("Modifier " + modifier.get.metaIdO.get + " was created successfully!")
for(warnings <- SBMLStrictValidator.visitModifierSpeciesReference(modifier.get))
S.warning(warnings)
screenUtil.genWarnsForSBMLModel()
}
}
/** TODO: Please document.
* @author Alexandre Martins
* Date: 27/11/12
* Time: 14:37 PM */
class EditModifierScreen extends LiftScreen with LoggerWrapper {
object modifier extends ScreenVar(screenUtil.loadModifierFromPathParam)
modifier.parent = Full(screenUtil.loadReactionFromPathParam)
addFields(() => modifier.get.idO)
addFields(() => modifier.get.nameO)
addFields(() => modifier.get.notesO)
protected def finish() {
trace("EditModifierScreen.finish() started executing!")
debug("Modifier from screen is " + modifier.get.toXML)
modifier.get.updateRestRec()
S.notice("Modifier " + modifier.get.metaIdO.get + " was saved successfully!")
for(warnings <- SBMLStrictValidator.visitModifierSpeciesReference(modifier.get))
S.warning(warnings)
screenUtil.genWarnsForSBMLModel()
}
}
| alexmsmartins/WikiModels | wm_web_client/src/main/scala/pt/cnbc/wikimodels/client/snippet/SBMLScreens.scala | Scala | mit | 23,428 |
package adawg.minecraftbot.util
object CodeGenerators {
val ControlInputPrinterStrings = {
val labels = Seq("back", "forward", "left", "right", "jump", "sneak",
"mouse1", "mouse2", "mouse3")
val toggleNames = labels map {_ + "Toggle"}
val output1 = for (label <- labels) yield "val " + label + "Toggle = new scala.swing.CheckBox(\\"" + label + "\\")"
val output2 = for (label <- labels) yield label + "Toggle.selected = input." + label + " getOrElse false"
def appendWithComma(list: String, item: String) = list + ", " + item
val toggleList = toggleNames.foldLeft("")(appendWithComma)
val output3 = "guiComponent = Seq(" + toggleList + ")"
output1 ++ output2 ++ Seq(output3)
}
} | yanich/BTBot | forge/mcp/src/minecraft/adawg/minecraftbot/util/CodeGenerators.scala | Scala | bsd-3-clause | 724 |
package edu.gemini.pit.ui.util
import java.awt.event.ActionEvent
import javax.swing.{AbstractAction, Action, KeyStroke}
import edu.gemini.shared.Platform
import edu.gemini.ui.workspace.scala._
abstract class ShellAction[A](shell: RichShell[A], caption: String, key: Option[Int] = None, mask: Int = 0) extends AbstractAction(caption) with (() => Unit) {
key.foreach { k => putValue(Action.ACCELERATOR_KEY, KeyStroke.getKeyStroke(k, Platform.MENU_ACTION_MASK | mask)) }
final def actionPerformed(e: ActionEvent) {
apply()
}
def apply() {
println("TODO: implement " + getValue(Action.NAME))
}
def enabledWhen(f: => Boolean) {
shell.listen {
setEnabled(f)
}
}
}
| spakzad/ocs | bundle/edu.gemini.pit/src/main/scala/edu/gemini/pit/ui/util/ShellAction.scala | Scala | bsd-3-clause | 702 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.hbase.tools.status
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.hbase.data.HBaseDataStore
import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand
import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand.{HBaseParams, ToggleRemoteFilterParam}
import org.locationtech.geomesa.hbase.tools.status.HBaseGetTypeNamesCommand.GetTypeNamesParams
import org.locationtech.geomesa.tools.status.GetTypeNamesCommand
class HBaseGetTypeNamesCommand extends GetTypeNamesCommand[HBaseDataStore] with HBaseDataStoreCommand {
override val params = new GetTypeNamesParams
}
object HBaseGetTypeNamesCommand {
@Parameters(commandDescription = "List the feature types for a given catalog")
class GetTypeNamesParams extends HBaseParams with ToggleRemoteFilterParam
}
| locationtech/geomesa | geomesa-hbase/geomesa-hbase-tools/src/main/scala/org/locationtech/geomesa/hbase/tools/status/HBaseGetTypeNamesCommand.scala | Scala | apache-2.0 | 1,294 |
package com.dt.scala.forexpression
/**
* @author Wang Jialin
* Date 015/8/15
* Contact Information:
* WeChat: 18610086859
* QQ: 1740415547
* Email: [email protected]
* Tel: 18610086859
*/
object ForInAction {
def main(args: Array[String]) {
val lauren = Person("Lauren", false)
val rocky = Person("Rocky", true)
val vivian = Person("Vivian", false, lauren, rocky)
val persons = List(lauren, rocky, vivian)
val forResult = for {person <- persons; name = person.name; if !person.isMale; child <- person.children}
yield (person.name, child.name)
println(forResult)
val content =for(x <- List(1,2,3); y <- List("Hadoop","Spark","Flink")) yield(x,y)
println(content)
}
} | slieer/scala-tutorials | src/main/scala/com/dt/scala/forexpression/ForInAction.scala | Scala | apache-2.0 | 756 |
/*
* Copyright (c) 2014 Oculus Info Inc.
* http://www.oculusinfo.com/
*
* Released under the MIT License.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
* of the Software, and to permit persons to whom the Software is furnished to do
* so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oculusinfo.tilegen.datasets
import java.util.{List => JavaList}
import scala.collection.JavaConverters._
import com.oculusinfo.binning.impl.{AOITilePyramid, WebMercatorTilePyramid}
import com.oculusinfo.binning.{TilePyramid, TilePyramidFactory}
import com.oculusinfo.factory.ConfigurableFactory
import com.oculusinfo.factory.properties.BooleanProperty
/**
* A small wrapper class for a tile pyramid that allows for auto-bounds when appropriate
*
* This small bit of indirection allows us to defer the auto-bounds decision until such a
* time as we are actually capable of calculating the auto-bounds.
* @param base The base tile pyramid to be provided when auto-bounds is not appropriate
* @param autoBounds Whether using auto-bounds is appropriate, when possible
*/
class DeferredTilePyramid (base: TilePyramid, autoBounds: Boolean) {
def getTilePyramid (boundsFcn: () => (Double, Double, Double, Double)): TilePyramid = {
if (autoBounds && base.isInstanceOf[AOITilePyramid]) {
val (minX, maxX, minY, maxY) = boundsFcn()
return new AOITilePyramid(minX, minY, maxX, maxY)
} else {
base
}
}
}
/**
* A factory for deferred tile pyramids
*/
object DeferredTilePyramidFactory {
var AUTOBOUNDS_PROPERTY = new BooleanProperty("autobounds",
"If true, calculate tile pyramid bounds automatically; if false, use values given by properties",
true)
}
class DeferredTilePyramidFactory (parent: ConfigurableFactory[_], path: JavaList[String])
extends ConfigurableFactory[DeferredTilePyramid](classOf[DeferredTilePyramid], parent, path)
{
import DeferredTilePyramidFactory._
addProperty(AUTOBOUNDS_PROPERTY)
addChildFactory(new TilePyramidFactory(this, Seq[String]().asJava))
override protected def create: DeferredTilePyramid = {
new DeferredTilePyramid(produce(classOf[TilePyramid]), getPropertyValue(AUTOBOUNDS_PROPERTY))
}
}
| unchartedsoftware/aperture-tiles | tile-generation/src/main/scala/com/oculusinfo/tilegen/datasets/DeferredTilePyramid.scala | Scala | mit | 3,150 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.storage
import java.io.{Externalizable, ObjectInput, ObjectOutput}
import org.apache.spark.rpc.RpcEndpointRef
import org.apache.spark.util.Utils
private[spark] object BlockManagerMessages {
//////////////////////////////////////////////////////////////////////////////////
// Messages from the master to slaves.
//////////////////////////////////////////////////////////////////////////////////
sealed trait ToBlockManagerSlave
// Remove a block from the slaves that have it. This can only be used to remove
// blocks that the master knows about.
case class RemoveBlock(blockId: BlockId) extends ToBlockManagerSlave
// Replicate blocks that were lost due to executor failure
case class ReplicateBlock(blockId: BlockId, replicas: Seq[BlockManagerId], maxReplicas: Int)
extends ToBlockManagerSlave
// Remove all blocks belonging to a specific RDD.
case class RemoveRdd(rddId: Int) extends ToBlockManagerSlave
// Remove all blocks belonging to a specific shuffle.
case class RemoveShuffle(shuffleId: Int) extends ToBlockManagerSlave
// Remove all blocks belonging to a specific broadcast.
case class RemoveBroadcast(broadcastId: Long, removeFromDriver: Boolean = true)
extends ToBlockManagerSlave
/**
* Driver to Executor message to trigger a thread dump.
*/
case object TriggerThreadDump extends ToBlockManagerSlave
//////////////////////////////////////////////////////////////////////////////////
// Messages from slaves to the master.
//////////////////////////////////////////////////////////////////////////////////
sealed trait ToBlockManagerMaster
case class RegisterBlockManager(
blockManagerId: BlockManagerId,
maxOnHeapMemSize: Long,
maxOffHeapMemSize: Long,
sender: RpcEndpointRef)
extends ToBlockManagerMaster
case class UpdateBlockInfo(
var blockManagerId: BlockManagerId,
var blockId: BlockId,
var storageLevel: StorageLevel,
var memSize: Long,
var diskSize: Long)
extends ToBlockManagerMaster
with Externalizable {
def this() = this(null, null, null, 0, 0) // For deserialization only
override def writeExternal(out: ObjectOutput): Unit = Utils.tryOrIOException {
blockManagerId.writeExternal(out)
out.writeUTF(blockId.name)
storageLevel.writeExternal(out)
out.writeLong(memSize)
out.writeLong(diskSize)
}
override def readExternal(in: ObjectInput): Unit = Utils.tryOrIOException {
blockManagerId = BlockManagerId(in)
blockId = BlockId(in.readUTF())
storageLevel = StorageLevel(in)
memSize = in.readLong()
diskSize = in.readLong()
}
}
case class GetLocations(blockId: BlockId) extends ToBlockManagerMaster
case class GetLocationsAndStatus(blockId: BlockId) extends ToBlockManagerMaster
// The response message of `GetLocationsAndStatus` request.
case class BlockLocationsAndStatus(locations: Seq[BlockManagerId], status: BlockStatus) {
assert(locations.nonEmpty)
}
case class GetLocationsMultipleBlockIds(blockIds: Array[BlockId]) extends ToBlockManagerMaster
case class GetPeers(blockManagerId: BlockManagerId) extends ToBlockManagerMaster
case class GetExecutorEndpointRef(executorId: String) extends ToBlockManagerMaster
case class RemoveExecutor(execId: String) extends ToBlockManagerMaster
case object StopBlockManagerMaster extends ToBlockManagerMaster
case object GetMemoryStatus extends ToBlockManagerMaster
case object GetStorageStatus extends ToBlockManagerMaster
case class GetBlockStatus(blockId: BlockId, askSlaves: Boolean = true)
extends ToBlockManagerMaster
case class GetMatchingBlockIds(filter: BlockId => Boolean, askSlaves: Boolean = true)
extends ToBlockManagerMaster
case class BlockManagerHeartbeat(blockManagerId: BlockManagerId) extends ToBlockManagerMaster
case class HasExclusiveCachedBlocks(executorId: String) extends ToBlockManagerMaster
case class IsExecutorAlive(executorId: String) extends ToBlockManagerMaster
}
| LantaoJin/spark | core/src/main/scala/org/apache/spark/storage/BlockManagerMessages.scala | Scala | apache-2.0 | 4,872 |
import org.specs2.matcher.ShouldMatchers
import play.api.Application
import play.api.http.HeaderNames
import play.api.inject.bind
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.mvc.{ AnyContent, Cookies, Request }
import play.api.test.{ FakeRequest, Injecting, PlaySpecification, WithApplication }
import service.CustomProviders
class ApplicationSpec extends PlaySpecification with ShouldMatchers {
def app: Application = new GuiceApplicationBuilder()
.bindings(bind[CustomProviders].to(CustomProviders(Seq(new NaiveIdentityProvider))))
.build()
"Access secured index " in new WithApplication(app) with Injecting {
val controller = inject[controllers.Application]
// same thing we do in ApplicationScenario
val allCookies: Cookies =
cookies(route(app, FakeRequest(POST, "/auth/authenticate/naive").withTextBody("user")).get)
val authCookie = allCookies("id")
val req: Request[AnyContent] = FakeRequest().
withHeaders((HeaderNames.CONTENT_TYPE, "application/x-www-form-urlencoded")).
withCookies(authCookie)
val result = controller.index(req)
val actual: Int = status(result)
actual must be equalTo OK
}
} | jaliss/securesocial | samples/scala/demo/test/ApplicationSpec.scala | Scala | apache-2.0 | 1,194 |
package sample.blog.monix
import java.net.InetAddress
import com.datastax.driver.core._
import java.lang.{ Long ⇒ JLong }
import java.time._
import java.util.{ TimeZone, UUID }
import java.util.concurrent.CountDownLatch
import akka.persistence.query.Offset
import com.datastax.driver.core.utils.UUIDs
import com.datastax.driver.extras.codecs.jdk8.ZonedDateTimeCodec
import scala.concurrent.{ Await, ExecutionContext, Future }
import monix.eval.Task
import monix.execution.Ack
import monix.reactive.Observable
import monix.execution.Scheduler.Implicits.global
import scala.util.{ Failure, Success }
//https://www.beyondthelines.net/databases/querying-cassandra-from-scala/
//https://monix.io/blog/2018/03/19/monix-v3.0.0-RC1.html
object PsJournal {
def execute1(statement: Future[PreparedStatement], pId: String)(
implicit
executionContext: ExecutionContext, session: Session
): Future[ResultSet] = {
statement
.map(_.bind(pId).setFetchSize(1 << 5))
.flatMap(session.executeAsync(_))
}
def execute2(statement: Future[PreparedStatement], pId: String, pNum: JLong)(
implicit
executionContext: ExecutionContext, session: Session
): Future[ResultSet] = {
println(s"fetch $pId - $pNum")
statement
.map(_.bind(pId, pNum).setFetchSize(1 << 5))
.flatMap(session.executeAsync(_))
}
def query(cql: Future[PreparedStatement], pId: String, pNum: JLong)(
implicit
executionContext: ExecutionContext, cassandraSession: Session
): Observable[Row] = {
val obsPerPartition = Observable.fromAsyncStateAction[Future[ResultSet], ResultSet]({ nextRsF ⇒
Task.fromFuture(nextRsF).flatMap { rs ⇒
println("**** page ****")
Task((rs, rs.fetchMoreResults))
}
})(execute2(cql, pId, pNum))
obsPerPartition
.takeWhile(!_.isExhausted)
.flatMap { rs ⇒
val available = rs.getAvailableWithoutFetching
println(available)
Observable.fromIterable(
new Iterable[Row]() {
val iterator: Iterator[Row] =
Iterator.fill(available)(rs.one)
}
)
//import scala.collection.JavaConverters._
//Observable.fromIterator(rs.iterator().asScala)
}
}
def queryF(cqlF: Future[PreparedStatement], pId: String /*, offset: String*/ )(
implicit
ec: ExecutionContext, c: Session
): Unit = {
//val tz = TimeZone.getDefault.toZoneId
def loop(partitionData: () ⇒ Future[ResultSet]): Unit = {
partitionData().onComplete {
case Success(rs) ⇒
//println(Thread.currentThread.getName)
if (rs.isExhausted) println("done")
else {
val available = rs.getAvailableWithoutFetching
val page = IndexedSeq.fill(available)(rs.one)
//TimeZone.getTimeZone()
//val utc = ZoneOffset.UTC
//val plus4 = ZoneId.of("Europe/Moscow")
page.foreach { r ⇒
val dt = r.getTupleValue("when")
val ts = dt.getTimestamp(0)
val tz = TimeZone.getTimeZone(dt.getString(1))
val zoneDT = ts.toInstant.atZone(tz.toZoneId)
//val a = OffsetDateTime.ofInstant(ts.toInstant, utc)
//val zoneDT = ZonedDateTime.ofInstant(ts.toInstant, tz.toZoneId)
println(s"$ts - ${zoneDT}")
}
/*page.foreach { r =>
val whenUuid = r.getUUID("when")
val when = Instant.ofEpochMilli(UUIDs.unixTimestamp(whenUuid)).atZone(tz)
println(s"$when")
}*/
/*page.foreach { r =>
val timeUuid = r.getUUID("when")
//in UTC
//val inst = Instant.ofEpochMilli(UUIDs.unixTimestamp(timeUuid))
val inst = Instant.ofEpochMilli(UUIDs.unixTimestamp(timeUuid)).atZone(tz)
println(s"$timeUuid - $inst")
}*/
println(page(page.size - 1))
loop(() ⇒ asScalaFuture(rs.fetchMoreResults))
}
case Failure(ex) ⇒
ex.printStackTrace()
}
}
loop(
() ⇒ cqlF
.map(_.bind(pId /*UUID.fromString(offset)*/ ).setFetchSize(1 << 5))
.flatMap(c.executeAsync(_))
)
}
def main(args: Array[String]): Unit = {
val l = new CountDownLatch(1)
import monix.execution.Scheduler.Implicits.global
//https://docs.datastax.com/en/developer/java-driver/3.4/manual/query_timestamps/
val cluster = new Cluster.Builder()
.addContactPoints(InetAddress.getByName("192.168.77.42"))
.withTimestampGenerator(new AtomicMonotonicTimestampGenerator()) //give us RYW consistency, latest versions of driver does it
.withPort(9042)
.build()
//import com.datastax.driver.extras.codecs.jdk8.InstantCodec
//cluster.getConfiguration().getCodecRegistry().register(InstantCodec.instance)
/*
One problem with timestamp is that it does not store time zones.
ZonedDateTimeCodec addresses that, by mapping a ZonedDateTime to a tuple<timestamp,varchar>
CREATE TABLE blogs.timelineTs (tl_name text, when tuple<timestamp,varchar>, login text, message text, PRIMARY KEY (tl_name, when))
WITH CLUSTERING ORDER BY (when DESC);
*/
val tupleType = cluster.getMetadata.newTupleType(DataType.timestamp(), DataType.varchar())
cluster.getConfiguration().getCodecRegistry().register(new ZonedDateTimeCodec(tupleType))
implicit val session = cluster.connect
session.execute(
"INSERT INTO blogs.timelineTs (tl_name, when, login, message) VALUES (?, ?, ?, ?)",
ZonedDateTime.parse("2010-06-30T01:20:47.999+01:00"))
/*queryF(
cql"SELECT persistence_id, sequence_nr FROM blogs.blogs_journal where persistence_id = ? and partition_nr = ?",
"7-Patrik", 0l)*/
// creates an observable of row
//executeH("twitter", 1)(statement.map(_.bind(_)))
/*
val f = cql"INSERT INTO blogs.timelineTs (tl_name, when, login, message) VALUES (?, ?, ?, ?)"
.map(_.bind("tw", ZonedDateTime.now(), "haghard", "bla-bla1"))
.flatMap(session.executeAsync(_))
val f2 = cql"INSERT INTO blogs.timelineTs (tl_name, when, login, message) VALUES (?, ?, ?, ?)"
.map(_.bind("tw", ZonedDateTime.now(ZoneOffset.ofHours(2)), "haghard", "bla-bla1"))
.flatMap(session.executeAsync(_))
import scala.concurrent.duration._
Await.result(f, 3.seconds)
Await.result(f2, 3.seconds)
*/
queryF(cql"SELECT tl_name, when FROM blogs.timelineTs WHERE tl_name = ?", "tw")
//queryF(cql"SELECT tl_name, when FROM blogs.timeline WHERE tl_name = ?", "twitter")
//Instant.ofEpochMilli(UUIDs.unixTimestamp(timeUuid))
/*
queryF(cql"SELECT login, message, when FROM blogs.timeline WHERE tl_name = ? AND when > ?",
"twitter", "612b0650-9016-11e8-a994-6d2c86545d91")
*/
/*val obs = query(
cql"SELECT persistence_id, sequence_nr, timestamp FROM blogs.blogs_journal where persistence_id = ? and partition_nr = ?",
"7-Patrik", 0l)
obs.subscribe({ row =>
println(Offset.timeBasedUUID(row.getUUID("timestamp")))
//println(row.getLong("sequence_nr"))
Ack.Continue
}, { e: Throwable =>
e.printStackTrace
}, { () =>
println("done")
l.countDown
})*/
l.await(5L, java.util.concurrent.TimeUnit.SECONDS)
println("exit")
System.exit(0)
}
} | haghard/akka-pq | src/main/scala/sample/blog/monix/PsJournal.scala | Scala | apache-2.0 | 7,421 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.io.{File, IOException}
import java.net.SocketTimeoutException
import java.util
import java.util.concurrent._
import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger}
import com.yammer.metrics.core.Gauge
import kafka.api.KAFKA_0_9_0
import kafka.cluster.Broker
import kafka.common.{GenerateBrokerIdException, InconsistentBrokerIdException}
import kafka.controller.KafkaController
import kafka.coordinator.group.GroupCoordinator
import kafka.coordinator.transaction.TransactionCoordinator
import kafka.log.{LogConfig, LogManager}
import kafka.metrics.{KafkaMetricsGroup, KafkaMetricsReporter}
import kafka.network.{BlockingChannel, SocketServer}
import kafka.security.CredentialProvider
import kafka.security.auth.Authorizer
import kafka.utils._
import org.apache.kafka.clients.{ApiVersions, ManualMetadataUpdater, NetworkClient, NetworkClientUtils}
import org.apache.kafka.common.internals.ClusterResourceListeners
import org.apache.kafka.common.metrics.{JmxReporter, Metrics, _}
import org.apache.kafka.common.network._
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.common.requests.{ControlledShutdownRequest, ControlledShutdownResponse}
import org.apache.kafka.common.security.{JaasContext, JaasUtils}
import org.apache.kafka.common.utils.{AppInfoParser, Time}
import org.apache.kafka.common.{ClusterResource, Node}
import scala.collection.JavaConverters._
import scala.collection.{Map, mutable}
object KafkaServer {
// Copy the subset of properties that are relevant to Logs
// I'm listing out individual properties here since the names are slightly different in each Config class...
private[kafka] def copyKafkaConfigToLog(kafkaConfig: KafkaConfig): java.util.Map[String, Object] = {
val logProps = new util.HashMap[String, Object]()
logProps.put(LogConfig.SegmentBytesProp, kafkaConfig.logSegmentBytes)
logProps.put(LogConfig.SegmentMsProp, kafkaConfig.logRollTimeMillis)
logProps.put(LogConfig.SegmentJitterMsProp, kafkaConfig.logRollTimeJitterMillis)
logProps.put(LogConfig.SegmentIndexBytesProp, kafkaConfig.logIndexSizeMaxBytes)
logProps.put(LogConfig.FlushMessagesProp, kafkaConfig.logFlushIntervalMessages)
logProps.put(LogConfig.FlushMsProp, kafkaConfig.logFlushIntervalMs)
logProps.put(LogConfig.RetentionBytesProp, kafkaConfig.logRetentionBytes)
logProps.put(LogConfig.RetentionMsProp, kafkaConfig.logRetentionTimeMillis: java.lang.Long)
logProps.put(LogConfig.MaxMessageBytesProp, kafkaConfig.messageMaxBytes)
logProps.put(LogConfig.IndexIntervalBytesProp, kafkaConfig.logIndexIntervalBytes)
logProps.put(LogConfig.DeleteRetentionMsProp, kafkaConfig.logCleanerDeleteRetentionMs)
logProps.put(LogConfig.MinCompactionLagMsProp, kafkaConfig.logCleanerMinCompactionLagMs)
logProps.put(LogConfig.FileDeleteDelayMsProp, kafkaConfig.logDeleteDelayMs)
logProps.put(LogConfig.MinCleanableDirtyRatioProp, kafkaConfig.logCleanerMinCleanRatio)
logProps.put(LogConfig.CleanupPolicyProp, kafkaConfig.logCleanupPolicy)
logProps.put(LogConfig.MinInSyncReplicasProp, kafkaConfig.minInSyncReplicas)
logProps.put(LogConfig.CompressionTypeProp, kafkaConfig.compressionType)
logProps.put(LogConfig.UncleanLeaderElectionEnableProp, kafkaConfig.uncleanLeaderElectionEnable)
logProps.put(LogConfig.PreAllocateEnableProp, kafkaConfig.logPreAllocateEnable)
logProps.put(LogConfig.MessageFormatVersionProp, kafkaConfig.logMessageFormatVersion.version)
logProps.put(LogConfig.MessageTimestampTypeProp, kafkaConfig.logMessageTimestampType.name)
logProps.put(LogConfig.MessageTimestampDifferenceMaxMsProp, kafkaConfig.logMessageTimestampDifferenceMaxMs: java.lang.Long)
logProps
}
private[server] def metricConfig(kafkaConfig: KafkaConfig): MetricConfig = {
new MetricConfig()
.samples(kafkaConfig.metricNumSamples) // 设置sample个数
.recordLevel(Sensor.RecordingLevel.forName(kafkaConfig.metricRecordingLevel)) // 设置时间
.timeWindow(kafkaConfig.metricSampleWindowMs, TimeUnit.MILLISECONDS)
}
}
/**
* Represents the lifecycle of a single Kafka broker. Handles all functionality required
* to start up and shutdown a single Kafka node.
*/
class KafkaServer(val config: KafkaConfig, time: Time = Time.SYSTEM, threadNamePrefix: Option[String] = None, kafkaMetricsReporters: Seq[KafkaMetricsReporter] = List()) extends Logging with KafkaMetricsGroup {
private val startupComplete = new AtomicBoolean(false)
private val isShuttingDown = new AtomicBoolean(false)
private val isStartingUp = new AtomicBoolean(false)
private var shutdownLatch = new CountDownLatch(1)
private val jmxPrefix: String = "kafka.server"
var metrics: Metrics = null
val brokerState: BrokerState = new BrokerState
var apis: KafkaApis = null
var authorizer: Option[Authorizer] = None
var socketServer: SocketServer = null
var requestHandlerPool: KafkaRequestHandlerPool = null
var logManager: LogManager = null
var replicaManager: ReplicaManager = null
var adminManager: AdminManager = null
var dynamicConfigHandlers: Map[String, ConfigHandler] = null
var dynamicConfigManager: DynamicConfigManager = null
var credentialProvider: CredentialProvider = null
var groupCoordinator: GroupCoordinator = null
var transactionCoordinator: TransactionCoordinator = null
var kafkaController: KafkaController = null
val kafkaScheduler = new KafkaScheduler(config.backgroundThreads)
var kafkaHealthcheck: KafkaHealthcheck = null
var metadataCache: MetadataCache = null
var quotaManagers: QuotaFactory.QuotaManagers = null
var zkUtils: ZkUtils = null
val correlationId: AtomicInteger = new AtomicInteger(0)
val brokerMetaPropsFile = "meta.properties"
val brokerMetadataCheckpoints = config.logDirs.map(logDir => (logDir, new BrokerMetadataCheckpoint(new File(logDir + File.separator +brokerMetaPropsFile)))).toMap
private var _clusterId: String = null
private var _brokerTopicStats: BrokerTopicStats = null
def clusterId: String = _clusterId
private[kafka] def brokerTopicStats = _brokerTopicStats
newGauge(
"BrokerState",
new Gauge[Int] {
def value = brokerState.currentState
}
)
newGauge(
"ClusterId",
new Gauge[String] {
def value = clusterId
}
)
newGauge(
"yammer-metrics-count",
new Gauge[Int] {
def value = {
com.yammer.metrics.Metrics.defaultRegistry().allMetrics().size()
}
}
)
/**
* Start up API for bringing up a single instance of the Kafka server.
* Instantiates the LogManager, the SocketServer and the request handlers - KafkaRequestHandlers
*/
def startup() {
try {
info("starting")
if(isShuttingDown.get)
throw new IllegalStateException("Kafka server is still shutting down, cannot re-start!")
if(startupComplete.get)
return
val canStartup = isStartingUp.compareAndSet(false, true)
if (canStartup) {
brokerState.newState(Starting)
/* start scheduler */
kafkaScheduler.startup()
/* setup zookeeper */
zkUtils = initZk()
/* Get or create cluster_id */
_clusterId = getOrGenerateClusterId(zkUtils)
info(s"Cluster ID = $clusterId")
/* generate brokerId */
config.brokerId = getBrokerId
this.logIdent = "[Kafka Server " + config.brokerId + "], "
/* create and configure metrics */
val reporters = config.getConfiguredInstances(KafkaConfig.MetricReporterClassesProp, classOf[MetricsReporter],
Map[String, AnyRef](KafkaConfig.BrokerIdProp -> (config.brokerId.toString)).asJava)
reporters.add(new JmxReporter(jmxPrefix))
val metricConfig = KafkaServer.metricConfig(config)
metrics = new Metrics(metricConfig, reporters, time, true)
/* register broker metrics */
_brokerTopicStats = new BrokerTopicStats
quotaManagers = QuotaFactory.instantiate(config, metrics, time)
notifyClusterListeners(kafkaMetricsReporters ++ reporters.asScala)
/* start log manager */
logManager = LogManager(config, zkUtils, brokerState, kafkaScheduler, time, brokerTopicStats)
logManager.startup()
metadataCache = new MetadataCache(config.brokerId)
credentialProvider = new CredentialProvider(config.saslEnabledMechanisms)
socketServer = new SocketServer(config, metrics, time, credentialProvider)
socketServer.startup()
/* start replica manager */
replicaManager = createReplicaManager(isShuttingDown)
replicaManager.startup()
/* start kafka controller */
kafkaController = new KafkaController(config, zkUtils, time, metrics, threadNamePrefix)
kafkaController.startup()
adminManager = new AdminManager(config, metrics, metadataCache, zkUtils)
/* start group coordinator */
// Hardcode Time.SYSTEM for now as some Streams tests fail otherwise, it would be good to fix the underlying issue
groupCoordinator = GroupCoordinator(config, zkUtils, replicaManager, Time.SYSTEM)
groupCoordinator.startup()
/* start transaction coordinator, with a separate background thread scheduler for transaction expiration and log loading */
// Hardcode Time.SYSTEM for now as some Streams tests fail otherwise, it would be good to fix the underlying issue
transactionCoordinator = TransactionCoordinator(config, replicaManager, new KafkaScheduler(threads = 1, threadNamePrefix = "transaction-log-manager-"), zkUtils, metrics, metadataCache, Time.SYSTEM)
transactionCoordinator.startup()
/* Get the authorizer and initialize it if one is specified.*/
// 在启动时,会读取配置文件中authorizer.class的配置项,使用反射创建指定的Authorizer对象
// 之后authorizer对象会传递给KafkaApis。
// 其中会调用
authorizer = Option(config.authorizerClassName).filter(_.nonEmpty).map { authorizerClassName =>
val authZ = CoreUtils.createObject[Authorizer](authorizerClassName)
authZ.configure(config.originals())
authZ
}
/* start processing requests */
apis = new KafkaApis(socketServer.requestChannel, replicaManager, adminManager, groupCoordinator, transactionCoordinator,
kafkaController, zkUtils, config.brokerId, config, metadataCache, metrics, authorizer, quotaManagers,
brokerTopicStats, clusterId, time)
requestHandlerPool = new KafkaRequestHandlerPool(config.brokerId, socketServer.requestChannel, apis, time,
config.numIoThreads)
Mx4jLoader.maybeLoad()
/* start dynamic config manager */
dynamicConfigHandlers = Map[String, ConfigHandler](ConfigType.Topic -> new TopicConfigHandler(logManager, config, quotaManagers),
ConfigType.Client -> new ClientIdConfigHandler(quotaManagers),
ConfigType.User -> new UserConfigHandler(quotaManagers, credentialProvider),
ConfigType.Broker -> new BrokerConfigHandler(config, quotaManagers))
// Create the config manager. start listening to notifications
dynamicConfigManager = new DynamicConfigManager(zkUtils, dynamicConfigHandlers)
dynamicConfigManager.startup()
/* tell everyone we are alive */
val listeners = config.advertisedListeners.map { endpoint =>
if (endpoint.port == 0)
endpoint.copy(port = socketServer.boundPort(endpoint.listenerName))
else
endpoint
}
kafkaHealthcheck = new KafkaHealthcheck(config.brokerId, listeners, zkUtils, config.rack,
config.interBrokerProtocolVersion)
kafkaHealthcheck.startup()
// Now that the broker id is successfully registered via KafkaHealthcheck, checkpoint it
checkpointBrokerId(config.brokerId)
brokerState.newState(RunningAsBroker)
shutdownLatch = new CountDownLatch(1)
startupComplete.set(true)
isStartingUp.set(false)
AppInfoParser.registerAppInfo(jmxPrefix, config.brokerId.toString)
info("started")
}
}
catch {
case e: Throwable =>
fatal("Fatal error during KafkaServer startup. Prepare to shutdown", e)
isStartingUp.set(false)
shutdown()
throw e
}
}
private def notifyClusterListeners(clusterListeners: Seq[AnyRef]): Unit = {
val clusterResourceListeners = new ClusterResourceListeners
clusterResourceListeners.maybeAddAll(clusterListeners.asJava)
clusterResourceListeners.onUpdate(new ClusterResource(clusterId))
}
protected def createReplicaManager(isShuttingDown: AtomicBoolean): ReplicaManager =
new ReplicaManager(config, metrics, time, zkUtils, kafkaScheduler, logManager, isShuttingDown, quotaManagers.follower,
brokerTopicStats, metadataCache)
private def initZk(): ZkUtils = {
info(s"Connecting to zookeeper on ${config.zkConnect}")
val chrootIndex = config.zkConnect.indexOf("/")
val chrootOption = {
if (chrootIndex > 0) Some(config.zkConnect.substring(chrootIndex))
else None
}
val secureAclsEnabled = config.zkEnableSecureAcls
val isZkSecurityEnabled = JaasUtils.isZkSecurityEnabled()
if (secureAclsEnabled && !isZkSecurityEnabled)
throw new java.lang.SecurityException(s"${KafkaConfig.ZkEnableSecureAclsProp} is true, but the verification of the JAAS login file failed.")
chrootOption.foreach { chroot =>
val zkConnForChrootCreation = config.zkConnect.substring(0, chrootIndex)
val zkClientForChrootCreation = ZkUtils(zkConnForChrootCreation,
sessionTimeout = config.zkSessionTimeoutMs,
connectionTimeout = config.zkConnectionTimeoutMs,
secureAclsEnabled)
zkClientForChrootCreation.makeSurePersistentPathExists(chroot)
info(s"Created zookeeper path $chroot")
zkClientForChrootCreation.zkClient.close()
}
val zkUtils = ZkUtils(config.zkConnect,
sessionTimeout = config.zkSessionTimeoutMs,
connectionTimeout = config.zkConnectionTimeoutMs,
secureAclsEnabled)
zkUtils.setupCommonPaths()
zkUtils
}
def getOrGenerateClusterId(zkUtils: ZkUtils): String = {
zkUtils.getClusterId.getOrElse(zkUtils.createOrGetClusterId(CoreUtils.generateUuidAsBase64))
}
/**
* Performs controlled shutdown
*/
private def controlledShutdown() {
def node(broker: Broker): Node = {
val brokerEndPoint = broker.getBrokerEndPoint(config.interBrokerListenerName)
new Node(brokerEndPoint.id, brokerEndPoint.host, brokerEndPoint.port)
}
val socketTimeoutMs = config.controllerSocketTimeoutMs
def networkClientControlledShutdown(retries: Int): Boolean = {
val metadataUpdater = new ManualMetadataUpdater()
val networkClient = {
val channelBuilder = ChannelBuilders.clientChannelBuilder(
config.interBrokerSecurityProtocol,
JaasContext.Type.SERVER,
config,
config.interBrokerListenerName,
config.saslMechanismInterBrokerProtocol,
config.saslInterBrokerHandshakeRequestEnable)
val selector = new Selector(
NetworkReceive.UNLIMITED,
config.connectionsMaxIdleMs,
metrics,
time,
"kafka-server-controlled-shutdown",
Map.empty.asJava,
false,
channelBuilder
)
new NetworkClient(
selector,
metadataUpdater,
config.brokerId.toString,
1,
0,
0,
Selectable.USE_DEFAULT_BUFFER_SIZE,
Selectable.USE_DEFAULT_BUFFER_SIZE,
config.requestTimeoutMs,
time,
false,
new ApiVersions)
}
var shutdownSucceeded: Boolean = false
try {
var remainingRetries = retries
var prevController: Broker = null
var ioException = false
while (!shutdownSucceeded && remainingRetries > 0) {
remainingRetries = remainingRetries - 1
// 1. Find the controller and establish a connection to it.
// Get the current controller info. This is to ensure we use the most recent info to issue the
// controlled shutdown request
val controllerId = zkUtils.getController()
zkUtils.getBrokerInfo(controllerId) match {
case Some(broker) =>
// if this is the first attempt, if the controller has changed or if an exception was thrown in a previous
// attempt, connect to the most recent controller
if (ioException || broker != prevController) {
ioException = false
if (prevController != null)
networkClient.close(node(prevController).idString)
prevController = broker
metadataUpdater.setNodes(Seq(node(prevController)).asJava)
}
case None => //ignore and try again
}
// 2. issue a controlled shutdown to the controller
if (prevController != null) {
try {
if (!NetworkClientUtils.awaitReady(networkClient, node(prevController), time, socketTimeoutMs))
throw new SocketTimeoutException(s"Failed to connect within $socketTimeoutMs ms")
// send the controlled shutdown request
val controlledShutdownRequest = new ControlledShutdownRequest.Builder(config.brokerId)
val request = networkClient.newClientRequest(node(prevController).idString, controlledShutdownRequest,
time.milliseconds(), true)
val clientResponse = NetworkClientUtils.sendAndReceive(networkClient, request, time)
val shutdownResponse = clientResponse.responseBody.asInstanceOf[ControlledShutdownResponse]
if (shutdownResponse.error == Errors.NONE && shutdownResponse.partitionsRemaining.isEmpty) {
shutdownSucceeded = true
info("Controlled shutdown succeeded")
}
else {
info("Remaining partitions to move: %s".format(shutdownResponse.partitionsRemaining.asScala.mkString(",")))
info("Error code from controller: %d".format(shutdownResponse.error.code))
}
}
catch {
case ioe: IOException =>
ioException = true
warn("Error during controlled shutdown, possibly because leader movement took longer than the configured controller.socket.timeout.ms and/or request.timeout.ms: %s".format(ioe.getMessage))
// ignore and try again
}
}
if (!shutdownSucceeded) {
Thread.sleep(config.controlledShutdownRetryBackoffMs)
warn("Retrying controlled shutdown after the previous attempt failed...")
}
}
}
finally
networkClient.close()
shutdownSucceeded
}
def blockingChannelControlledShutdown(retries: Int): Boolean = {
var remainingRetries = retries
var channel: BlockingChannel = null
var prevController: Broker = null
var shutdownSucceeded: Boolean = false
try {
while (!shutdownSucceeded && remainingRetries > 0) {
remainingRetries = remainingRetries - 1
// 1. Find the controller and establish a connection to it.
// Get the current controller info. This is to ensure we use the most recent info to issue the
// controlled shutdown request
val controllerId = zkUtils.getController()
zkUtils.getBrokerInfo(controllerId) match {
case Some(broker) =>
if (channel == null || prevController == null || !prevController.equals(broker)) {
// if this is the first attempt or if the controller has changed, create a channel to the most recent
// controller
if (channel != null)
channel.disconnect()
val brokerEndPoint = broker.getBrokerEndPoint(config.interBrokerListenerName)
channel = new BlockingChannel(brokerEndPoint.host,
brokerEndPoint.port,
BlockingChannel.UseDefaultBufferSize,
BlockingChannel.UseDefaultBufferSize,
config.controllerSocketTimeoutMs)
channel.connect()
prevController = broker
}
case None => //ignore and try again
}
// 2. issue a controlled shutdown to the controller
if (channel != null) {
var response: NetworkReceive = null
try {
// send the controlled shutdown request
val request = new kafka.api.ControlledShutdownRequest(0, correlationId.getAndIncrement, None, config.brokerId)
channel.send(request)
response = channel.receive()
val shutdownResponse = kafka.api.ControlledShutdownResponse.readFrom(response.payload())
if (shutdownResponse.error == Errors.NONE && shutdownResponse.partitionsRemaining != null &&
shutdownResponse.partitionsRemaining.isEmpty) {
shutdownSucceeded = true
info ("Controlled shutdown succeeded")
}
else {
info("Remaining partitions to move: %s".format(shutdownResponse.partitionsRemaining.mkString(",")))
info("Error code from controller: %d".format(shutdownResponse.error.code))
}
}
catch {
case ioe: java.io.IOException =>
channel.disconnect()
channel = null
warn("Error during controlled shutdown, possibly because leader movement took longer than the configured controller.socket.timeout.ms and/or request.timeout.ms: %s".format(ioe.getMessage))
// ignore and try again
}
}
if (!shutdownSucceeded) {
Thread.sleep(config.controlledShutdownRetryBackoffMs)
warn("Retrying controlled shutdown after the previous attempt failed...")
}
}
}
finally {
if (channel != null) {
channel.disconnect()
channel = null
}
}
shutdownSucceeded
}
if (startupComplete.get() && config.controlledShutdownEnable) {
// We request the controller to do a controlled shutdown. On failure, we backoff for a configured period
// of time and try again for a configured number of retries. If all the attempt fails, we simply force
// the shutdown.
info("Starting controlled shutdown")
brokerState.newState(PendingControlledShutdown)
val shutdownSucceeded =
// Before 0.9.0.0, `ControlledShutdownRequest` did not contain `client_id` and it's a mandatory field in
// `RequestHeader`, which is used by `NetworkClient`
if (config.interBrokerProtocolVersion >= KAFKA_0_9_0)
networkClientControlledShutdown(config.controlledShutdownMaxRetries.intValue)
else blockingChannelControlledShutdown(config.controlledShutdownMaxRetries.intValue)
if (!shutdownSucceeded)
warn("Proceeding to do an unclean shutdown as all the controlled shutdown attempts failed")
}
}
/**
* Shutdown API for shutting down a single instance of the Kafka server.
* Shuts down the LogManager, the SocketServer and the log cleaner scheduler thread
*/
def shutdown() {
try {
info("shutting down")
if (isStartingUp.get)
throw new IllegalStateException("Kafka server is still starting up, cannot shut down!")
// To ensure correct behavior under concurrent calls, we need to check `shutdownLatch` first since it gets updated
// last in the `if` block. If the order is reversed, we could shutdown twice or leave `isShuttingDown` set to
// `true` at the end of this method.
if (shutdownLatch.getCount > 0 && isShuttingDown.compareAndSet(false, true)) {
CoreUtils.swallow(controlledShutdown())
brokerState.newState(BrokerShuttingDown)
if (socketServer != null)
CoreUtils.swallow(socketServer.shutdown())
if (requestHandlerPool != null)
CoreUtils.swallow(requestHandlerPool.shutdown())
CoreUtils.swallow(kafkaScheduler.shutdown())
if (apis != null)
CoreUtils.swallow(apis.close())
CoreUtils.swallow(authorizer.foreach(_.close()))
if (adminManager != null)
CoreUtils.swallow(adminManager.shutdown())
if (transactionCoordinator != null)
CoreUtils.swallow(transactionCoordinator.shutdown())
if (groupCoordinator != null)
CoreUtils.swallow(groupCoordinator.shutdown())
if (replicaManager != null)
CoreUtils.swallow(replicaManager.shutdown())
if (logManager != null)
CoreUtils.swallow(logManager.shutdown())
if (kafkaController != null)
CoreUtils.swallow(kafkaController.shutdown())
if (zkUtils != null)
CoreUtils.swallow(zkUtils.close())
if (metrics != null)
CoreUtils.swallow(metrics.close())
if (brokerTopicStats != null)
CoreUtils.swallow(brokerTopicStats.close())
brokerState.newState(NotRunning)
startupComplete.set(false)
isShuttingDown.set(false)
CoreUtils.swallow(AppInfoParser.unregisterAppInfo(jmxPrefix, config.brokerId.toString))
shutdownLatch.countDown()
info("shut down completed")
}
}
catch {
case e: Throwable =>
fatal("Fatal error during KafkaServer shutdown.", e)
isShuttingDown.set(false)
throw e
}
}
/**
* After calling shutdown(), use this API to wait until the shutdown is complete
*/
def awaitShutdown(): Unit = shutdownLatch.await()
def getLogManager(): LogManager = logManager
def boundPort(listenerName: ListenerName): Int = socketServer.boundPort(listenerName)
/**
* Generates new brokerId if enabled or reads from meta.properties based on following conditions
* <ol>
* <li> config has no broker.id provided and broker id generation is enabled, generates a broker.id based on Zookeeper's sequence
* <li> stored broker.id in meta.properties doesn't match in all the log.dirs throws InconsistentBrokerIdException
* <li> config has broker.id and meta.properties contains broker.id if they don't match throws InconsistentBrokerIdException
* <li> config has broker.id and there is no meta.properties file, creates new meta.properties and stores broker.id
* <ol>
*
* @return A brokerId.
*/
private def getBrokerId: Int = {
var brokerId = config.brokerId
val brokerIdSet = mutable.HashSet[Int]()
for (logDir <- config.logDirs) {
val brokerMetadataOpt = brokerMetadataCheckpoints(logDir).read()
brokerMetadataOpt.foreach { brokerMetadata =>
brokerIdSet.add(brokerMetadata.brokerId)
}
}
if(brokerIdSet.size > 1)
throw new InconsistentBrokerIdException(
s"Failed to match broker.id across log.dirs. This could happen if multiple brokers shared a log directory (log.dirs) " +
s"or partial data was manually copied from another broker. Found $brokerIdSet")
else if(brokerId >= 0 && brokerIdSet.size == 1 && brokerIdSet.last != brokerId)
throw new InconsistentBrokerIdException(
s"Configured broker.id $brokerId doesn't match stored broker.id ${brokerIdSet.last} in meta.properties. " +
s"If you moved your data, make sure your configured broker.id matches. " +
s"If you intend to create a new broker, you should remove all data in your data directories (log.dirs).")
else if(brokerIdSet.isEmpty && brokerId < 0 && config.brokerIdGenerationEnable) // generate a new brokerId from Zookeeper
brokerId = generateBrokerId
else if(brokerIdSet.size == 1) // pick broker.id from meta.properties
brokerId = brokerIdSet.last
brokerId
}
private def checkpointBrokerId(brokerId: Int) {
var logDirsWithoutMetaProps: List[String] = List()
for (logDir <- config.logDirs) {
val brokerMetadataOpt = brokerMetadataCheckpoints(logDir).read()
if(brokerMetadataOpt.isEmpty)
logDirsWithoutMetaProps ++= List(logDir)
}
for(logDir <- logDirsWithoutMetaProps) {
val checkpoint = brokerMetadataCheckpoints(logDir)
checkpoint.write(BrokerMetadata(brokerId))
}
}
private def generateBrokerId: Int = {
try {
zkUtils.getBrokerSequenceId(config.maxReservedBrokerId)
} catch {
case e: Exception =>
error("Failed to generate broker.id due to ", e)
throw new GenerateBrokerIdException("Failed to generate broker.id", e)
}
}
}
| YMCoding/kafka-0.11.0.0-src-with-comment | core/src/main/scala/kafka/server/KafkaServer.scala | Scala | apache-2.0 | 30,128 |
package com.nelly.core.datastructures
import scala.collection.mutable.{ArrayBuffer, HashMap}
/**
* * :Problem: PriorityQueue implementation does not reorder or maintain priority when the items stored within them
* are mutated.
*
* This data structure, is a hash map backed max heap.
* The hash map enable O(1) item lookup and O(logn) to rearrange a mutated object : In contrast,
* * if current implementation of Priority Queue supported this ability, it will cost O(n)
*
* * Use cases, include
* 1. Counting things and also been able to figure out the item with the most count efficiently
* * *
* @param ordering
* @tparam T
*/
class HashMapPriorityQueue[T](implicit ordering: Ordering[T]) {
val nodes = new ArrayBuffer[T]()
val indexedMap = new HashMap[T, Int]()
def size(): Int = nodes.length
def orderingId() : String = ordering.toString
/**
* * O(1)
* @param t
* @return
*/
def contains(t: T) : Boolean = indexedMap.get(t) match {
case Some(e) => true
case _ => false
}
/**
* * O(1)
* * This gets a stored version of this element if it exists otherwise None
* @param t
* @return
*/
def getStoredVersion(t: T) : Option[T] = indexedMap.get(t) match {
case Some(e) => Option(nodes(e))
case _ => None
}
/**
* * O(1)
* @return Some(T) if nodes has at least one element otherwise None
*/
def peek(): Option[T] = size() match {
case 0 => None
case _ => Option(nodes.head)
}
/**
* * O(n) Worst case, are elements are equal since we are not removing the elements which will necesitate calling
* * percolateDown/trickleUp
* *
In the case of max values with duplicates
<ordKey:45, id:aa>
/ \\
/ \\
<ordKey:45, id:aab> <ordKey:45, id:aac> => [<ordKey:45, id:aa>,<ordKey:45, id:aab>,<ordKey:45, id:aac>]
/ \\
/ \\
<ordKey:4, id:caa> <ordKey:5, id:baa>
*/
def peekValues(): Seq[T] = {
val items = size()
items match {
case 0 => Seq.empty
case _ => peekValues(0, Seq(),items)
}
}
private[this] def peekValues(start: Int, acc: Seq[T], items: Int): Seq[T] = if( 0 <= start && start < items) {
val compare = ordering.compare(nodes(0), nodes(start))
if( 0 == compare){
val nAcc = acc ++ Seq(nodes(start))
nAcc ++ peekValues(left(start), Seq(), items) ++ peekValues(right(start), Seq(), items)
} else acc
} else {
acc
}
/**
* * O(n)
* @return true if the structure is a valid heap; false otherwise.
*/
def isMaxHeap: Boolean = {
def isValidOrder(index: Int, root: T) :Boolean ={
val child = nodes(index)
val compare = ordering.compare(root, child)
if(compare < 0) false else true
}
val items = size()
Range(0, items).forall(
index => {
val root = nodes(index)
val leftChildIndex = left(index)
if(leftChildIndex < items){
isValidOrder(leftChildIndex, root)
} else {
val rightChildIndex = left(index)
if(rightChildIndex < items) isValidOrder(rightChildIndex, root) else true
}
}
)
}
def isEmpty(): Boolean = size == 0
/**
* removes max item from list and maintains max heap invariant
* @return max item
*/
def poll() :Option[T] = synchronized {
val items = size()
items match {
case 0 => None
case _ => {
val root = nodes(0)
indexedMap.remove(root)
if(items != 1){
val last = nodes.last
this.set(0, last)
percolateDown(0, items)
}
nodes.dropRight(1)
Option(root)
}
}
}
/**
* Inserts/updates the element into the heap
* @param elm the element to be inserted/updated
*/
def put(elm: T) :Unit= {
val items = size()
indexedMap.get(elm) match {
case Some(i) => {
if(i.toString.contains("announce")){
println(s" announce:: ${nodes(i)}")
}
set(i, elm)
repairHeap(i)
} //found in map
case None => { // not found
indexedMap.put(elm, items)
nodes += elm
trickleUp(items)
}
}
}
/**
* * THe object that has changed, call this method rearrange the heap
* @param culprit
* @return
*/
def repairHeap(culprit: T) :Boolean = {
indexedMap.get(culprit) match {
case Some(index) => { repairHeap(index); true }
case None => false
}
}
private[this] def repairHeap(i: Int){
percolateDown(i, size())
trickleUp(i)
}
private[this] def trickleUp(index: Int) :Unit = index match {
case 0 =>
case _ if index > 0 => {
val parentIndex = parent(index)
val currentNode = nodes(index)
val parentNode = nodes(parentIndex)
val compareTo = ordering.compare(parentNode, currentNode)
if(compareTo < 0){
swap(index, parentIndex)
trickleUp(parentIndex)
}
}
case _ =>
}
private[this] def percolateDown(index: Int, items: Int) :Unit = {
val leftChildIndex = left(index)
val rightChildIndex = right(index)
val root = nodes(index)
var largestIndex = index
var currentLargest = root
if(leftChildIndex < items){
val leftChild = nodes(leftChildIndex)
if(ordering.compare(leftChild, currentLargest) > 0){
currentLargest = leftChild
largestIndex = leftChildIndex
}
}
if(rightChildIndex < items){
val rightChild = nodes(rightChildIndex)
if(ordering.compare(rightChild, currentLargest) > 0){
currentLargest = rightChild
largestIndex = rightChildIndex
}
}
if(largestIndex != index){
swap(index, largestIndex)
percolateDown(largestIndex, items)
}
}
private[this] def parent(index: Int) :Int = ((index+1) / 2)-1
private[this] def left(index: Int) :Int = (2*(index+1))-1
private[this] def right(index: Int) :Int = 2*(index+1)
private[this] def set(index: Int , e: T) :Unit = synchronized{
nodes.update(index, e)
indexedMap.put(e, index)
}
private[this] def swap( a: Int, b: Int) :Unit = synchronized {
val o = nodes(a)
this.set(a,nodes(b))
this.set(b,o)
}
}
| ezinelony/http-log-monitor | core/src/main/scala/com/nelly/core/datastructures/HashMapPriorityQueue.scala | Scala | mit | 6,385 |
package org.jetbrains.plugins.scala
package debugger.evaluation
import com.intellij.debugger.codeinsight.RuntimeTypeEvaluator
import com.intellij.debugger.engine.ContextUtil
import com.intellij.debugger.engine.evaluation.expression.ExpressionEvaluator
import com.intellij.debugger.engine.evaluation.{CodeFragmentKind, EvaluationContextImpl, TextWithImportsImpl}
import com.intellij.debugger.impl.DebuggerContextImpl
import com.intellij.debugger.{DebuggerBundle, DebuggerInvocationUtil, EvaluatingComputable}
import com.intellij.openapi.application.{AccessToken, ReadAction}
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.progress.ProgressIndicator
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.Key
import com.intellij.psi._
import com.intellij.psi.impl.source.PsiImmediateClassType
import com.intellij.psi.search.GlobalSearchScope
import com.sun.jdi.{ClassType, Type, Value}
import org.jetbrains.annotations.Nullable
import org.jetbrains.plugins.scala.debugger.evaluation.ScalaRuntimeTypeEvaluator._
import org.jetbrains.plugins.scala.debugger.evaluation.util.DebuggerUtil
import org.jetbrains.plugins.scala.extensions.inReadAction
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScModifierListOwner
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScObject
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiManager
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.types.api.{ExtractClass, TypeSystem}
/**
* Nikolay.Tropin
* 8/8/13
*/
abstract class ScalaRuntimeTypeEvaluator(@Nullable editor: Editor, expression: PsiElement, context: DebuggerContextImpl, indicator: ProgressIndicator)
extends RuntimeTypeEvaluator(editor, expression, context, indicator) {
override def evaluate(evaluationContext: EvaluationContextImpl): PsiType = {
val project: Project = evaluationContext.getProject
val evaluator: ExpressionEvaluator = DebuggerInvocationUtil.commitAndRunReadAction(project, new EvaluatingComputable[ExpressionEvaluator] {
def compute: ExpressionEvaluator = {
val textWithImports = new TextWithImportsImpl(CodeFragmentKind.CODE_BLOCK, expression.getText)
val codeFragment = new ScalaCodeFragmentFactory().createCodeFragment(textWithImports, expression, project)
ScalaEvaluatorBuilder.build(codeFragment, ContextUtil.getSourcePosition(evaluationContext))
}
})
val value: Value = evaluator.evaluate(evaluationContext)
if (value != null) {
inReadAction {
Option(getCastableRuntimeType(project, value)).map(new PsiImmediateClassType(_, PsiSubstitutor.EMPTY)).orNull
}
} else throw EvaluationException(DebuggerBundle.message("evaluation.error.surrounded.expression.null"))
}
}
object ScalaRuntimeTypeEvaluator {
val KEY: Key[ScExpression => ScType] = Key.create("SCALA_RUNTIME_TYPE_EVALUATOR")
def getCastableRuntimeType(project: Project, value: Value): PsiClass = {
val unwrapped = DebuggerUtil.unwrapScalaRuntimeObjectRef(value)
val jdiType: Type = unwrapped.asInstanceOf[Value].`type`
var psiClass: PsiClass = findPsiClass(project, jdiType)
if (psiClass != null) {
return psiClass
}
jdiType match {
case classType: ClassType =>
val superclass: ClassType = classType.superclass
val stdTypeNames = Seq("java.lang.Object", "scala.Any", "scala.AnyRef", "scala.AnyVal")
if (superclass != null && !stdTypeNames.contains(superclass.name)) {
psiClass = findPsiClass(project, superclass)
if (psiClass != null) {
return psiClass
}
}
import scala.collection.JavaConversions._
classType.interfaces.map(findPsiClass(project, _)).find(_ != null).orNull
case _ => null
}
}
private def findPsiClass(project: Project, jdiType: Type): PsiClass = {
val token: AccessToken = ReadAction.start
try {
ScalaPsiManager.instance(project).getCachedClass(GlobalSearchScope.allScope(project), jdiType.name()).orNull
}
finally {
token.finish()
}
}
def isSubtypeable(scType: ScType)
(implicit typeSystem: TypeSystem): Boolean = {
scType match {
case ExtractClass(psiClass) =>
psiClass match {
case _: ScObject => false
case owner: ScModifierListOwner => !owner.hasFinalModifier
case _ if scType.isInstanceOf[PsiPrimitiveType] => false
case _ => !psiClass.hasModifierProperty(PsiModifier.FINAL)
}
case _ => false
}
}
}
| whorbowicz/intellij-scala | src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaRuntimeTypeEvaluator.scala | Scala | apache-2.0 | 4,673 |
package com.github.gdefacci.briscola.presentation.player
import com.github.gdefacci.briscola.service.player.PlayerService
import com.github.gdefacci.briscola.web.util.ToPresentation
import com.github.gdefacci.briscola.player._
import com.github.gdefacci.briscola.presentation
import com.github.gdefacci.briscola.web.util.WebSocketChannel
import com.github.gdefacci.briscola.presentation.RoutesServletConfig
object PlayersModule {
def plan(
routesServletConfig: RoutesServletConfig,
playerRoutes: PlayerRoutes,
playerService: PlayerService,
toPresentation: ToPresentation[PlayerError],
playerPresentationAdapter:PlayerPresentationAdapter): PlayersPlan = {
import playerPresentationAdapter._
new PlayersPlan(routesServletConfig.players, playerRoutes, playerService, toPresentation)
}
def stateChangeFilter(playerPresentationAdapter:PlayerPresentationAdapter) = {
import playerPresentationAdapter._
new PlayersStateChangeFilter
}
def channel(playerService: PlayerService,
playersStateChangeFilter: PlayersStateChangeFilter)(
implicit enc: argonaut.EncodeJson[presentation.EventAndState[PlayerEvent, Iterable[Player]]]): WebSocketChannel[PlayerId] =
WebSocketChannel(playerService.changes, playersStateChangeFilter)(enc)
} | gdefacci/briscola | ddd-briscola-web/src/main/scala/com/github/gdefacci/briscola/presentation/player/PlayersModule.scala | Scala | bsd-3-clause | 1,286 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import java.util.TimeZone
import org.scalatest.Matchers
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.{Cross, Inner}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.physical.{HashPartitioning, Partitioning,
RangePartitioning, RoundRobinPartitioning}
import org.apache.spark.sql.types._
class AnalysisSuite extends AnalysisTest with Matchers {
import org.apache.spark.sql.catalyst.analysis.TestRelations._
test("union project *") {
val plan = (1 to 120)
.map(_ => testRelation)
.fold[LogicalPlan](testRelation) { (a, b) =>
a.select(UnresolvedStar(None)).select('a).union(b.select(UnresolvedStar(None)))
}
assertAnalysisSuccess(plan)
}
test("check project's resolved") {
assert(Project(testRelation.output, testRelation).resolved)
assert(!Project(Seq(UnresolvedAttribute("a")), testRelation).resolved)
val explode = Explode(AttributeReference("a", IntegerType, nullable = true)())
assert(!Project(Seq(Alias(explode, "explode")()), testRelation).resolved)
assert(!Project(Seq(Alias(count(Literal(1)), "count")()), testRelation).resolved)
}
test("analyze project") {
checkAnalysis(
Project(Seq(UnresolvedAttribute("a")), testRelation),
Project(testRelation.output, testRelation))
checkAnalysis(
Project(Seq(UnresolvedAttribute("TbL.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Project(testRelation.output, testRelation))
assertAnalysisError(
Project(Seq(UnresolvedAttribute("tBl.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Seq("cannot resolve"))
checkAnalysis(
Project(Seq(UnresolvedAttribute("TbL.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Project(testRelation.output, testRelation),
caseSensitive = false)
checkAnalysis(
Project(Seq(UnresolvedAttribute("tBl.a")),
SubqueryAlias("TbL", UnresolvedRelation(TableIdentifier("TaBlE")))),
Project(testRelation.output, testRelation),
caseSensitive = false)
}
test("resolve sort references - filter/limit") {
val a = testRelation2.output(0)
val b = testRelation2.output(1)
val c = testRelation2.output(2)
// Case 1: one missing attribute is in the leaf node and another is in the unary node
val plan1 = testRelation2
.where('a > "str").select('a, 'b)
.where('b > "str").select('a)
.sortBy('b.asc, 'c.desc)
val expected1 = testRelation2
.where(a > "str").select(a, b, c)
.where(b > "str").select(a, b, c)
.sortBy(b.asc, c.desc)
.select(a)
checkAnalysis(plan1, expected1)
// Case 2: all the missing attributes are in the leaf node
val plan2 = testRelation2
.where('a > "str").select('a)
.where('a > "str").select('a)
.sortBy('b.asc, 'c.desc)
val expected2 = testRelation2
.where(a > "str").select(a, b, c)
.where(a > "str").select(a, b, c)
.sortBy(b.asc, c.desc)
.select(a)
checkAnalysis(plan2, expected2)
}
test("resolve sort references - join") {
val a = testRelation2.output(0)
val b = testRelation2.output(1)
val c = testRelation2.output(2)
val h = testRelation3.output(3)
// Case: join itself can resolve all the missing attributes
val plan = testRelation2.join(testRelation3)
.where('a > "str").select('a, 'b)
.sortBy('c.desc, 'h.asc)
val expected = testRelation2.join(testRelation3)
.where(a > "str").select(a, b, c, h)
.sortBy(c.desc, h.asc)
.select(a, b)
checkAnalysis(plan, expected)
}
test("resolve sort references - aggregate") {
val a = testRelation2.output(0)
val b = testRelation2.output(1)
val c = testRelation2.output(2)
val alias_a3 = count(a).as("a3")
val alias_b = b.as("aggOrder")
// Case 1: when the child of Sort is not Aggregate,
// the sort reference is handled by the rule ResolveSortReferences
val plan1 = testRelation2
.groupBy('a, 'c, 'b)('a, 'c, count('a).as("a3"))
.select('a, 'c, 'a3)
.orderBy('b.asc)
val expected1 = testRelation2
.groupBy(a, c, b)(a, c, alias_a3, b)
.select(a, c, alias_a3.toAttribute, b)
.orderBy(b.asc)
.select(a, c, alias_a3.toAttribute)
checkAnalysis(plan1, expected1)
// Case 2: when the child of Sort is Aggregate,
// the sort reference is handled by the rule ResolveAggregateFunctions
val plan2 = testRelation2
.groupBy('a, 'c, 'b)('a, 'c, count('a).as("a3"))
.orderBy('b.asc)
val expected2 = testRelation2
.groupBy(a, c, b)(a, c, alias_a3, alias_b)
.orderBy(alias_b.toAttribute.asc)
.select(a, c, alias_a3.toAttribute)
checkAnalysis(plan2, expected2)
}
test("resolve relations") {
assertAnalysisError(UnresolvedRelation(TableIdentifier("tAbLe")), Seq())
checkAnalysis(UnresolvedRelation(TableIdentifier("TaBlE")), testRelation)
checkAnalysis(
UnresolvedRelation(TableIdentifier("tAbLe")), testRelation, caseSensitive = false)
checkAnalysis(
UnresolvedRelation(TableIdentifier("TaBlE")), testRelation, caseSensitive = false)
}
test("divide should be casted into fractional types") {
val plan = caseInsensitiveAnalyzer.execute(
testRelation2.select(
'a / Literal(2) as 'div1,
'a / 'b as 'div2,
'a / 'c as 'div3,
'a / 'd as 'div4,
'e / 'e as 'div5))
val pl = plan.asInstanceOf[Project].projectList
assert(pl(0).dataType == DoubleType)
assert(pl(1).dataType == DoubleType)
assert(pl(2).dataType == DoubleType)
assert(pl(3).dataType == DoubleType)
assert(pl(4).dataType == DoubleType)
}
test("pull out nondeterministic expressions from RepartitionByExpression") {
val plan = RepartitionByExpression(Seq(Rand(33)), testRelation, numPartitions = 10)
val projected = Alias(Rand(33), "_nondeterministic")()
val expected =
Project(testRelation.output,
RepartitionByExpression(Seq(projected.toAttribute),
Project(testRelation.output :+ projected, testRelation),
numPartitions = 10))
checkAnalysis(plan, expected)
}
test("pull out nondeterministic expressions from Sort") {
val plan = Sort(Seq(SortOrder(Rand(33), Ascending)), false, testRelation)
val projected = Alias(Rand(33), "_nondeterministic")()
val expected =
Project(testRelation.output,
Sort(Seq(SortOrder(projected.toAttribute, Ascending)), false,
Project(testRelation.output :+ projected, testRelation)))
checkAnalysis(plan, expected)
}
test("SPARK-9634: cleanup unnecessary Aliases in LogicalPlan") {
val a = testRelation.output.head
var plan = testRelation.select(((a + 1).as("a+1") + 2).as("col"))
var expected = testRelation.select((a + 1 + 2).as("col"))
checkAnalysis(plan, expected)
plan = testRelation.groupBy(a.as("a1").as("a2"))((min(a).as("min_a") + 1).as("col"))
expected = testRelation.groupBy(a)((min(a) + 1).as("col"))
checkAnalysis(plan, expected)
// CreateStruct is a special case that we should not trim Alias for it.
plan = testRelation.select(CreateStruct(Seq(a, (a + 1).as("a+1"))).as("col"))
expected = testRelation.select(CreateNamedStruct(Seq(
Literal(a.name), a,
Literal("a+1"), (a + 1))).as("col"))
checkAnalysis(plan, expected)
}
test("Analysis may leave unnecassary aliases") {
val att1 = testRelation.output.head
var plan = testRelation.select(
CreateStruct(Seq(att1, ((att1.as("aa")) + 1).as("a_plus_1"))).as("col"),
att1
)
val prevPlan = getAnalyzer(true).execute(plan)
plan = prevPlan.select(CreateArray(Seq(
CreateStruct(Seq(att1, (att1 + 1).as("a_plus_1"))).as("col1"),
/** alias should be eliminated by [[CleanupAliases]] */
"col".attr.as("col2")
)).as("arr"))
plan = getAnalyzer(true).execute(plan)
val expectedPlan = prevPlan.select(
CreateArray(Seq(
CreateNamedStruct(Seq(
Literal(att1.name), att1,
Literal("a_plus_1"), (att1 + 1))),
'col.struct(prevPlan.output(0).dataType.asInstanceOf[StructType]).notNull
)).as("arr")
)
checkAnalysis(plan, expectedPlan)
}
test("SPARK-10534: resolve attribute references in order by clause") {
val a = testRelation2.output(0)
val c = testRelation2.output(2)
val plan = testRelation2.select('c).orderBy(Floor('a).asc)
val expected = testRelation2.select(c, a)
.orderBy(Floor(Cast(a, DoubleType, Option(TimeZone.getDefault().getID))).asc).select(c)
checkAnalysis(plan, expected)
}
test("self intersect should resolve duplicate expression IDs") {
val plan = testRelation.intersect(testRelation)
assertAnalysisSuccess(plan)
}
test("SPARK-8654: invalid CAST in NULL IN(...) expression") {
val plan = Project(Alias(In(Literal(null), Seq(Literal(1), Literal(2))), "a")() :: Nil,
LocalRelation()
)
assertAnalysisSuccess(plan)
}
test("SPARK-8654: different types in inlist but can be converted to a common type") {
val plan = Project(Alias(In(Literal(null), Seq(Literal(1), Literal(1.2345))), "a")() :: Nil,
LocalRelation()
)
assertAnalysisSuccess(plan)
}
test("SPARK-8654: check type compatibility error") {
val plan = Project(Alias(In(Literal(null), Seq(Literal(true), Literal(1))), "a")() :: Nil,
LocalRelation()
)
assertAnalysisError(plan, Seq("data type mismatch: Arguments must be same type"))
}
test("SPARK-11725: correctly handle null inputs for ScalaUDF") {
val string = testRelation2.output(0)
val double = testRelation2.output(2)
val short = testRelation2.output(4)
val nullResult = Literal.create(null, StringType)
def checkUDF(udf: Expression, transformed: Expression): Unit = {
checkAnalysis(
Project(Alias(udf, "")() :: Nil, testRelation2),
Project(Alias(transformed, "")() :: Nil, testRelation2)
)
}
// non-primitive parameters do not need special null handling
val udf1 = ScalaUDF((s: String) => "x", StringType, string :: Nil)
val expected1 = udf1
checkUDF(udf1, expected1)
// only primitive parameter needs special null handling
val udf2 = ScalaUDF((s: String, d: Double) => "x", StringType, string :: double :: Nil)
val expected2 = If(IsNull(double), nullResult, udf2)
checkUDF(udf2, expected2)
// special null handling should apply to all primitive parameters
val udf3 = ScalaUDF((s: Short, d: Double) => "x", StringType, short :: double :: Nil)
val expected3 = If(
IsNull(short) || IsNull(double),
nullResult,
udf3)
checkUDF(udf3, expected3)
// we can skip special null handling for primitive parameters that are not nullable
// TODO: this is disabled for now as we can not completely trust `nullable`.
val udf4 = ScalaUDF(
(s: Short, d: Double) => "x",
StringType,
short :: double.withNullability(false) :: Nil)
val expected4 = If(
IsNull(short),
nullResult,
udf4)
// checkUDF(udf4, expected4)
}
test("SPARK-11863 mixture of aliases and real columns in order by clause - tpcds 19,55,71") {
val a = testRelation2.output(0)
val c = testRelation2.output(2)
val alias1 = a.as("a1")
val alias2 = c.as("a2")
val alias3 = count(a).as("a3")
val plan = testRelation2
.groupBy('a, 'c)('a.as("a1"), 'c.as("a2"), count('a).as("a3"))
.orderBy('a1.asc, 'c.asc)
val expected = testRelation2
.groupBy(a, c)(alias1, alias2, alias3)
.orderBy(alias1.toAttribute.asc, alias2.toAttribute.asc)
.select(alias1.toAttribute, alias2.toAttribute, alias3.toAttribute)
checkAnalysis(plan, expected)
}
test("Eliminate the unnecessary union") {
val plan = Union(testRelation :: Nil)
val expected = testRelation
checkAnalysis(plan, expected)
}
test("SPARK-12102: Ignore nullablity when comparing two sides of case") {
val relation = LocalRelation('a.struct('x.int), 'b.struct('x.int.withNullability(false)))
val plan = relation.select(CaseWhen(Seq((Literal(true), 'a.attr)), 'b).as("val"))
assertAnalysisSuccess(plan)
}
test("Keep attribute qualifiers after dedup") {
val input = LocalRelation('key.int, 'value.string)
val query =
Project(Seq($"x.key", $"y.key"),
Join(
Project(Seq($"x.key"), SubqueryAlias("x", input)),
Project(Seq($"y.key"), SubqueryAlias("y", input)),
Cross, None))
assertAnalysisSuccess(query)
}
private def assertExpressionType(
expression: Expression,
expectedDataType: DataType): Unit = {
val afterAnalyze =
Project(Seq(Alias(expression, "a")()), OneRowRelation()).analyze.expressions.head
if (!afterAnalyze.dataType.equals(expectedDataType)) {
fail(
s"""
|data type of expression $expression doesn't match expected:
|Actual data type:
|${afterAnalyze.dataType}
|
|Expected data type:
|${expectedDataType}
""".stripMargin)
}
}
test("SPARK-15776: test whether Divide expression's data type can be deduced correctly by " +
"analyzer") {
assertExpressionType(sum(Divide(1, 2)), DoubleType)
assertExpressionType(sum(Divide(1.0, 2)), DoubleType)
assertExpressionType(sum(Divide(1, 2.0)), DoubleType)
assertExpressionType(sum(Divide(1.0, 2.0)), DoubleType)
assertExpressionType(sum(Divide(1, 2.0f)), DoubleType)
assertExpressionType(sum(Divide(1.0f, 2)), DoubleType)
assertExpressionType(sum(Divide(1, Decimal(2))), DecimalType(31, 11))
assertExpressionType(sum(Divide(Decimal(1), 2)), DecimalType(31, 11))
assertExpressionType(sum(Divide(Decimal(1), 2.0)), DoubleType)
assertExpressionType(sum(Divide(1.0, Decimal(2.0))), DoubleType)
}
test("SPARK-18058: union and set operations shall not care about the nullability" +
" when comparing column types") {
val firstTable = LocalRelation(
AttributeReference("a",
StructType(Seq(StructField("a", IntegerType, nullable = true))), nullable = false)())
val secondTable = LocalRelation(
AttributeReference("a",
StructType(Seq(StructField("a", IntegerType, nullable = false))), nullable = false)())
val unionPlan = Union(firstTable, secondTable)
assertAnalysisSuccess(unionPlan)
val r1 = Except(firstTable, secondTable)
val r2 = Intersect(firstTable, secondTable)
assertAnalysisSuccess(r1)
assertAnalysisSuccess(r2)
}
test("resolve as with an already existed alias") {
checkAnalysis(
Project(Seq(UnresolvedAttribute("tbl2.a")),
SubqueryAlias("tbl", testRelation).as("tbl2")),
Project(testRelation.output, testRelation),
caseSensitive = false)
checkAnalysis(SubqueryAlias("tbl", testRelation).as("tbl2"), testRelation)
}
test("SPARK-20311 range(N) as alias") {
def rangeWithAliases(args: Seq[Int], outputNames: Seq[String]): LogicalPlan = {
SubqueryAlias("t", UnresolvedTableValuedFunction("range", args.map(Literal(_)), outputNames))
.select(star())
}
assertAnalysisSuccess(rangeWithAliases(3 :: Nil, "a" :: Nil))
assertAnalysisSuccess(rangeWithAliases(1 :: 4 :: Nil, "b" :: Nil))
assertAnalysisSuccess(rangeWithAliases(2 :: 6 :: 2 :: Nil, "c" :: Nil))
assertAnalysisError(
rangeWithAliases(3 :: Nil, "a" :: "b" :: Nil),
Seq("Number of given aliases does not match number of output columns. "
+ "Function name: range; number of aliases: 2; number of output columns: 1."))
}
test("SPARK-20841 Support table column aliases in FROM clause") {
def tableColumnsWithAliases(outputNames: Seq[String]): LogicalPlan = {
UnresolvedSubqueryColumnAliases(
outputNames,
SubqueryAlias("t", UnresolvedRelation(TableIdentifier("TaBlE3")))
).select(star())
}
assertAnalysisSuccess(tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: Nil))
assertAnalysisError(
tableColumnsWithAliases("col1" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 1; number of columns: 4."))
assertAnalysisError(
tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: "col5" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 5; number of columns: 4."))
}
test("SPARK-20962 Support subquery column aliases in FROM clause") {
def tableColumnsWithAliases(outputNames: Seq[String]): LogicalPlan = {
UnresolvedSubqueryColumnAliases(
outputNames,
SubqueryAlias(
"t",
UnresolvedRelation(TableIdentifier("TaBlE3")))
).select(star())
}
assertAnalysisSuccess(tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: Nil))
assertAnalysisError(
tableColumnsWithAliases("col1" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 1; number of columns: 4."))
assertAnalysisError(
tableColumnsWithAliases("col1" :: "col2" :: "col3" :: "col4" :: "col5" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 5; number of columns: 4."))
}
test("SPARK-20963 Support aliases for join relations in FROM clause") {
def joinRelationWithAliases(outputNames: Seq[String]): LogicalPlan = {
val src1 = LocalRelation('id.int, 'v1.string).as("s1")
val src2 = LocalRelation('id.int, 'v2.string).as("s2")
UnresolvedSubqueryColumnAliases(
outputNames,
SubqueryAlias(
"dst",
src1.join(src2, Inner, Option(Symbol("s1.id") === Symbol("s2.id"))))
).select(star())
}
assertAnalysisSuccess(joinRelationWithAliases("col1" :: "col2" :: "col3" :: "col4" :: Nil))
assertAnalysisError(
joinRelationWithAliases("col1" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 1; number of columns: 4."))
assertAnalysisError(
joinRelationWithAliases("col1" :: "col2" :: "col3" :: "col4" :: "col5" :: Nil),
Seq("Number of column aliases does not match number of columns. " +
"Number of column aliases: 5; number of columns: 4."))
}
test("SPARK-22614 RepartitionByExpression partitioning") {
def checkPartitioning[T <: Partitioning](numPartitions: Int, exprs: Expression*): Unit = {
val partitioning = RepartitionByExpression(exprs, testRelation2, numPartitions).partitioning
assert(partitioning.isInstanceOf[T])
}
checkPartitioning[HashPartitioning](numPartitions = 10, exprs = Literal(20))
checkPartitioning[HashPartitioning](numPartitions = 10, exprs = 'a.attr, 'b.attr)
checkPartitioning[RangePartitioning](numPartitions = 10,
exprs = SortOrder(Literal(10), Ascending))
checkPartitioning[RangePartitioning](numPartitions = 10,
exprs = SortOrder('a.attr, Ascending), SortOrder('b.attr, Descending))
checkPartitioning[RoundRobinPartitioning](numPartitions = 10, exprs = Seq.empty: _*)
intercept[IllegalArgumentException] {
checkPartitioning(numPartitions = 0, exprs = Literal(20))
}
intercept[IllegalArgumentException] {
checkPartitioning(numPartitions = -1, exprs = Literal(20))
}
intercept[IllegalArgumentException] {
checkPartitioning(numPartitions = 10, exprs = SortOrder('a.attr, Ascending), 'b.attr)
}
}
test("SPARK-20392: analysis barrier") {
// [[AnalysisBarrier]] will be removed after analysis
checkAnalysis(
Project(Seq(UnresolvedAttribute("tbl.a")),
AnalysisBarrier(SubqueryAlias("tbl", testRelation))),
Project(testRelation.output, SubqueryAlias("tbl", testRelation)))
// Verify we won't go through a plan wrapped in a barrier.
// Since we wrap an unresolved plan and analyzer won't go through it. It remains unresolved.
val barrier = AnalysisBarrier(Project(Seq(UnresolvedAttribute("tbl.b")),
SubqueryAlias("tbl", testRelation)))
assertAnalysisError(barrier, Seq("cannot resolve '`tbl.b`'"))
}
}
| ron8hu/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala | Scala | apache-2.0 | 21,431 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming
import java.util.UUID
import java.util.concurrent.TimeUnit._
import scala.collection.JavaConverters._
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.GenerateUnsafeProjection
import org.apache.spark.sql.catalyst.plans.logical.EventTimeWatermark
import org.apache.spark.sql.catalyst.plans.physical.{AllTuples, ClusteredDistribution, Distribution, Partitioning}
import org.apache.spark.sql.catalyst.streaming.InternalOutputModes._
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.execution.streaming.state._
import org.apache.spark.sql.streaming.{OutputMode, StateOperatorProgress}
import org.apache.spark.sql.types._
import org.apache.spark.util.{CompletionIterator, NextIterator, Utils}
/** Used to identify the state store for a given operator. */
case class StatefulOperatorStateInfo(
checkpointLocation: String,
queryRunId: UUID,
operatorId: Long,
storeVersion: Long,
numPartitions: Int) {
override def toString(): String = {
s"state info [ checkpoint = $checkpointLocation, runId = $queryRunId, " +
s"opId = $operatorId, ver = $storeVersion, numPartitions = $numPartitions]"
}
}
/**
* An operator that reads or writes state from the [[StateStore]].
* The [[StatefulOperatorStateInfo]] should be filled in by `prepareForExecution` in
* [[IncrementalExecution]].
*/
trait StatefulOperator extends SparkPlan {
def stateInfo: Option[StatefulOperatorStateInfo]
protected def getStateInfo: StatefulOperatorStateInfo = {
stateInfo.getOrElse {
throw new IllegalStateException("State location not present for execution")
}
}
}
/** An operator that reads from a StateStore. */
trait StateStoreReader extends StatefulOperator {
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
}
/** An operator that writes to a StateStore. */
trait StateStoreWriter extends StatefulOperator { self: SparkPlan =>
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"numRowsDroppedByWatermark" -> SQLMetrics.createMetric(sparkContext,
"number of rows which are dropped by watermark"),
"numTotalStateRows" -> SQLMetrics.createMetric(sparkContext, "number of total state rows"),
"numUpdatedStateRows" -> SQLMetrics.createMetric(sparkContext, "number of updated state rows"),
"allUpdatesTimeMs" -> SQLMetrics.createTimingMetric(sparkContext, "time to update"),
"allRemovalsTimeMs" -> SQLMetrics.createTimingMetric(sparkContext, "time to remove"),
"commitTimeMs" -> SQLMetrics.createTimingMetric(sparkContext, "time to commit changes"),
"stateMemory" -> SQLMetrics.createSizeMetric(sparkContext, "memory used by state")
) ++ stateStoreCustomMetrics
/**
* Get the progress made by this stateful operator after execution. This should be called in
* the driver after this SparkPlan has been executed and metrics have been updated.
*/
def getProgress(): StateOperatorProgress = {
val customMetrics = stateStoreCustomMetrics
.map(entry => entry._1 -> longMetric(entry._1).value)
val javaConvertedCustomMetrics: java.util.HashMap[String, java.lang.Long] =
new java.util.HashMap(customMetrics.mapValues(long2Long).toMap.asJava)
new StateOperatorProgress(
numRowsTotal = longMetric("numTotalStateRows").value,
numRowsUpdated = longMetric("numUpdatedStateRows").value,
memoryUsedBytes = longMetric("stateMemory").value,
numRowsDroppedByWatermark = longMetric("numRowsDroppedByWatermark").value,
javaConvertedCustomMetrics
)
}
/** Records the duration of running `body` for the next query progress update. */
protected def timeTakenMs(body: => Unit): Long = Utils.timeTakenMs(body)._2
/**
* Set the SQL metrics related to the state store.
* This should be called in that task after the store has been updated.
*/
protected def setStoreMetrics(store: StateStore): Unit = {
val storeMetrics = store.metrics
longMetric("numTotalStateRows") += storeMetrics.numKeys
longMetric("stateMemory") += storeMetrics.memoryUsedBytes
storeMetrics.customMetrics.foreach { case (metric, value) =>
longMetric(metric.name) += value
}
}
private def stateStoreCustomMetrics: Map[String, SQLMetric] = {
val provider = StateStoreProvider.create(conf.stateStoreProviderClass)
provider.supportedCustomMetrics.map {
metric => (metric.name, metric.createSQLMetric(sparkContext))
}.toMap
}
protected def applyRemovingRowsOlderThanWatermark(
iter: Iterator[InternalRow],
predicateDropRowByWatermark: BasePredicate): Iterator[InternalRow] = {
iter.filterNot { row =>
val shouldDrop = predicateDropRowByWatermark.eval(row)
if (shouldDrop) longMetric("numRowsDroppedByWatermark") += 1
shouldDrop
}
}
/**
* Should the MicroBatchExecution run another batch based on this stateful operator and the
* current updated metadata.
*/
def shouldRunAnotherBatch(newMetadata: OffsetSeqMetadata): Boolean = false
}
/** An operator that supports watermark. */
trait WatermarkSupport extends UnaryExecNode {
/** The keys that may have a watermark attribute. */
def keyExpressions: Seq[Attribute]
/** The watermark value. */
def eventTimeWatermark: Option[Long]
/** Generate an expression that matches data older than the watermark */
lazy val watermarkExpression: Option[Expression] = {
WatermarkSupport.watermarkExpression(
child.output.find(_.metadata.contains(EventTimeWatermark.delayKey)),
eventTimeWatermark)
}
/** Predicate based on keys that matches data older than the watermark */
lazy val watermarkPredicateForKeys: Option[BasePredicate] = watermarkExpression.flatMap { e =>
if (keyExpressions.exists(_.metadata.contains(EventTimeWatermark.delayKey))) {
Some(Predicate.create(e, keyExpressions))
} else {
None
}
}
/** Predicate based on the child output that matches data older than the watermark. */
lazy val watermarkPredicateForData: Option[BasePredicate] =
watermarkExpression.map(Predicate.create(_, child.output))
protected def removeKeysOlderThanWatermark(store: StateStore): Unit = {
if (watermarkPredicateForKeys.nonEmpty) {
store.getRange(None, None).foreach { rowPair =>
if (watermarkPredicateForKeys.get.eval(rowPair.key)) {
store.remove(rowPair.key)
}
}
}
}
protected def removeKeysOlderThanWatermark(
storeManager: StreamingAggregationStateManager,
store: StateStore): Unit = {
if (watermarkPredicateForKeys.nonEmpty) {
storeManager.keys(store).foreach { keyRow =>
if (watermarkPredicateForKeys.get.eval(keyRow)) {
storeManager.remove(store, keyRow)
}
}
}
}
}
object WatermarkSupport {
/** Generate an expression on given attributes that matches data older than the watermark */
def watermarkExpression(
optionalWatermarkExpression: Option[Expression],
optionalWatermarkMs: Option[Long]): Option[Expression] = {
if (optionalWatermarkExpression.isEmpty || optionalWatermarkMs.isEmpty) return None
val watermarkAttribute = optionalWatermarkExpression.get
// If we are evicting based on a window, use the end of the window. Otherwise just
// use the attribute itself.
val evictionExpression =
if (watermarkAttribute.dataType.isInstanceOf[StructType]) {
LessThanOrEqual(
GetStructField(watermarkAttribute, 1),
Literal(optionalWatermarkMs.get * 1000))
} else {
LessThanOrEqual(
watermarkAttribute,
Literal(optionalWatermarkMs.get * 1000))
}
Some(evictionExpression)
}
}
/**
* For each input tuple, the key is calculated and the value from the [[StateStore]] is added
* to the stream (in addition to the input tuple) if present.
*/
case class StateStoreRestoreExec(
keyExpressions: Seq[Attribute],
stateInfo: Option[StatefulOperatorStateInfo],
stateFormatVersion: Int,
child: SparkPlan)
extends UnaryExecNode with StateStoreReader {
private[sql] val stateManager = StreamingAggregationStateManager.createStateManager(
keyExpressions, child.output, stateFormatVersion)
override protected def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
child.execute().mapPartitionsWithReadStateStore(
getStateInfo,
keyExpressions.toStructType,
stateManager.getStateValueSchema,
indexOrdinal = None,
session.sessionState,
Some(session.streams.stateStoreCoordinator)) { case (store, iter) =>
val hasInput = iter.hasNext
if (!hasInput && keyExpressions.isEmpty) {
// If our `keyExpressions` are empty, we're getting a global aggregation. In that case
// the `HashAggregateExec` will output a 0 value for the partial merge. We need to
// restore the value, so that we don't overwrite our state with a 0 value, but rather
// merge the 0 with existing state.
store.iterator().map(_.value)
} else {
iter.flatMap { row =>
val key = stateManager.getKey(row.asInstanceOf[UnsafeRow])
val restoredRow = stateManager.get(store, key)
val outputRows = Option(restoredRow).toSeq :+ row
numOutputRows += outputRows.size
outputRows
}
}
}
}
override def output: Seq[Attribute] = child.output
override def outputPartitioning: Partitioning = child.outputPartitioning
override def requiredChildDistribution: Seq[Distribution] = {
if (keyExpressions.isEmpty) {
AllTuples :: Nil
} else {
ClusteredDistribution(keyExpressions, stateInfo.map(_.numPartitions)) :: Nil
}
}
override protected def withNewChildInternal(newChild: SparkPlan): StateStoreRestoreExec =
copy(child = newChild)
}
/**
* For each input tuple, the key is calculated and the tuple is `put` into the [[StateStore]].
*/
case class StateStoreSaveExec(
keyExpressions: Seq[Attribute],
stateInfo: Option[StatefulOperatorStateInfo] = None,
outputMode: Option[OutputMode] = None,
eventTimeWatermark: Option[Long] = None,
stateFormatVersion: Int,
child: SparkPlan)
extends UnaryExecNode with StateStoreWriter with WatermarkSupport {
private[sql] val stateManager = StreamingAggregationStateManager.createStateManager(
keyExpressions, child.output, stateFormatVersion)
override protected def doExecute(): RDD[InternalRow] = {
metrics // force lazy init at driver
assert(outputMode.nonEmpty,
"Incorrect planning in IncrementalExecution, outputMode has not been set")
child.execute().mapPartitionsWithStateStore(
getStateInfo,
keyExpressions.toStructType,
stateManager.getStateValueSchema,
indexOrdinal = None,
session.sessionState,
Some(session.streams.stateStoreCoordinator)) { (store, iter) =>
val numOutputRows = longMetric("numOutputRows")
val numUpdatedStateRows = longMetric("numUpdatedStateRows")
val allUpdatesTimeMs = longMetric("allUpdatesTimeMs")
val allRemovalsTimeMs = longMetric("allRemovalsTimeMs")
val commitTimeMs = longMetric("commitTimeMs")
outputMode match {
// Update and output all rows in the StateStore.
case Some(Complete) =>
allUpdatesTimeMs += timeTakenMs {
while (iter.hasNext) {
val row = iter.next().asInstanceOf[UnsafeRow]
stateManager.put(store, row)
numUpdatedStateRows += 1
}
}
allRemovalsTimeMs += 0
commitTimeMs += timeTakenMs {
stateManager.commit(store)
}
setStoreMetrics(store)
stateManager.values(store).map { valueRow =>
numOutputRows += 1
valueRow
}
// Update and output only rows being evicted from the StateStore
// Assumption: watermark predicates must be non-empty if append mode is allowed
case Some(Append) =>
allUpdatesTimeMs += timeTakenMs {
val filteredIter = applyRemovingRowsOlderThanWatermark(iter,
watermarkPredicateForData.get)
while (filteredIter.hasNext) {
val row = filteredIter.next().asInstanceOf[UnsafeRow]
stateManager.put(store, row)
numUpdatedStateRows += 1
}
}
val removalStartTimeNs = System.nanoTime
val rangeIter = stateManager.iterator(store)
new NextIterator[InternalRow] {
override protected def getNext(): InternalRow = {
var removedValueRow: InternalRow = null
while(rangeIter.hasNext && removedValueRow == null) {
val rowPair = rangeIter.next()
if (watermarkPredicateForKeys.get.eval(rowPair.key)) {
stateManager.remove(store, rowPair.key)
removedValueRow = rowPair.value
}
}
if (removedValueRow == null) {
finished = true
null
} else {
numOutputRows += 1
removedValueRow
}
}
override protected def close(): Unit = {
allRemovalsTimeMs += NANOSECONDS.toMillis(System.nanoTime - removalStartTimeNs)
commitTimeMs += timeTakenMs { stateManager.commit(store) }
setStoreMetrics(store)
}
}
// Update and output modified rows from the StateStore.
case Some(Update) =>
new NextIterator[InternalRow] {
// Filter late date using watermark if specified
private[this] val baseIterator = watermarkPredicateForData match {
case Some(predicate) => applyRemovingRowsOlderThanWatermark(iter, predicate)
case None => iter
}
private val updatesStartTimeNs = System.nanoTime
override protected def getNext(): InternalRow = {
if (baseIterator.hasNext) {
val row = baseIterator.next().asInstanceOf[UnsafeRow]
stateManager.put(store, row)
numOutputRows += 1
numUpdatedStateRows += 1
row
} else {
finished = true
null
}
}
override protected def close(): Unit = {
allUpdatesTimeMs += NANOSECONDS.toMillis(System.nanoTime - updatesStartTimeNs)
// Remove old aggregates if watermark specified
allRemovalsTimeMs += timeTakenMs {
removeKeysOlderThanWatermark(stateManager, store)
}
commitTimeMs += timeTakenMs { stateManager.commit(store) }
setStoreMetrics(store)
}
}
case _ => throw QueryExecutionErrors.invalidStreamingOutputModeError(outputMode)
}
}
}
override def output: Seq[Attribute] = child.output
override def outputPartitioning: Partitioning = child.outputPartitioning
override def requiredChildDistribution: Seq[Distribution] = {
if (keyExpressions.isEmpty) {
AllTuples :: Nil
} else {
ClusteredDistribution(keyExpressions, stateInfo.map(_.numPartitions)) :: Nil
}
}
override def shouldRunAnotherBatch(newMetadata: OffsetSeqMetadata): Boolean = {
(outputMode.contains(Append) || outputMode.contains(Update)) &&
eventTimeWatermark.isDefined &&
newMetadata.batchWatermarkMs > eventTimeWatermark.get
}
override protected def withNewChildInternal(newChild: SparkPlan): StateStoreSaveExec =
copy(child = newChild)
}
/** Physical operator for executing streaming Deduplicate. */
case class StreamingDeduplicateExec(
keyExpressions: Seq[Attribute],
child: SparkPlan,
stateInfo: Option[StatefulOperatorStateInfo] = None,
eventTimeWatermark: Option[Long] = None)
extends UnaryExecNode with StateStoreWriter with WatermarkSupport {
/** Distribute by grouping attributes */
override def requiredChildDistribution: Seq[Distribution] =
ClusteredDistribution(keyExpressions, stateInfo.map(_.numPartitions)) :: Nil
override protected def doExecute(): RDD[InternalRow] = {
metrics // force lazy init at driver
child.execute().mapPartitionsWithStateStore(
getStateInfo,
keyExpressions.toStructType,
child.output.toStructType,
indexOrdinal = None,
session.sessionState,
Some(session.streams.stateStoreCoordinator),
// We won't check value row in state store since the value StreamingDeduplicateExec.EMPTY_ROW
// is unrelated to the output schema.
Map(StateStoreConf.FORMAT_VALIDATION_CHECK_VALUE_CONFIG -> "false")) { (store, iter) =>
val getKey = GenerateUnsafeProjection.generate(keyExpressions, child.output)
val numOutputRows = longMetric("numOutputRows")
val numTotalStateRows = longMetric("numTotalStateRows")
val numUpdatedStateRows = longMetric("numUpdatedStateRows")
val allUpdatesTimeMs = longMetric("allUpdatesTimeMs")
val allRemovalsTimeMs = longMetric("allRemovalsTimeMs")
val commitTimeMs = longMetric("commitTimeMs")
val baseIterator = watermarkPredicateForData match {
case Some(predicate) => applyRemovingRowsOlderThanWatermark(iter, predicate)
case None => iter
}
val updatesStartTimeNs = System.nanoTime
val result = baseIterator.filter { r =>
val row = r.asInstanceOf[UnsafeRow]
val key = getKey(row)
val value = store.get(key)
if (value == null) {
store.put(key, StreamingDeduplicateExec.EMPTY_ROW)
numUpdatedStateRows += 1
numOutputRows += 1
true
} else {
// Drop duplicated rows
false
}
}
CompletionIterator[InternalRow, Iterator[InternalRow]](result, {
allUpdatesTimeMs += NANOSECONDS.toMillis(System.nanoTime - updatesStartTimeNs)
allRemovalsTimeMs += timeTakenMs { removeKeysOlderThanWatermark(store) }
commitTimeMs += timeTakenMs { store.commit() }
setStoreMetrics(store)
})
}
}
override def output: Seq[Attribute] = child.output
override def outputPartitioning: Partitioning = child.outputPartitioning
override def shouldRunAnotherBatch(newMetadata: OffsetSeqMetadata): Boolean = {
eventTimeWatermark.isDefined && newMetadata.batchWatermarkMs > eventTimeWatermark.get
}
override protected def withNewChildInternal(newChild: SparkPlan): StreamingDeduplicateExec =
copy(child = newChild)
}
object StreamingDeduplicateExec {
private val EMPTY_ROW =
UnsafeProjection.create(Array[DataType](NullType)).apply(InternalRow.apply(null))
}
| maropu/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/statefulOperators.scala | Scala | apache-2.0 | 20,261 |
package org.scalacheck.time
/** Stub trait since ScalaJs does not have native support for java.time types. */
private[scalacheck] trait JavaTimeShrink
| rickynils/scalacheck | js/src/main/scala/org/scalacheck/time/JavaTimeShrink.scala | Scala | bsd-3-clause | 152 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributors:
* Beineng Ma <[email protected]>
*/
package com.thenetcircle.event_bus.story.builder
import com.thenetcircle.event_bus.TestBase
import com.thenetcircle.event_bus.story.tasks.operators.{FilterOperatorBuilder, FilterOperatorSettings}
class FilterOperatorBuilderTest extends TestBase {
behavior of "FilterOperatorBuilder"
val builder = new FilterOperatorBuilder
it should "build proper FilterOperator with empty config" in {
val task = storyBuilder.buildTaskWithBuilder("""{}""".stripMargin)(builder)
val settings: FilterOperatorSettings = task.settings
settings.eventNameWhiteList shouldEqual Seq.empty[String]
settings.eventNameBlackList shouldEqual Seq.empty[String]
settings.channelWhiteList shouldEqual Seq.empty[String]
settings.channelBlackList shouldEqual Seq.empty[String]
settings.allowedTransportModes shouldEqual Seq("ASYNC", "BOTH", "NONE")
settings.onlyExtras shouldEqual Map.empty[String, String]
}
it should "build proper FilterOperator with proper config" in {
val task = storyBuilder.buildTaskWithBuilder("""{
| "event-name-white-list": ["user\\\\..*", "wio\\\\..*"],
| "event-name-black-list": ["image\\\\..*"],
| "channel-white-list": ["membership", "forum"],
| "channel-black-list": ["quick\\\\-.*"],
| "allowed-transport-modes": ["SYNC-PLUS", "SYNC", "BOTH"],
| "only-extras": {
| "actorId": "1234",
| "generatorId": "tnc-event-dispatcher"
| }
|}""".stripMargin)(builder)
val settings: FilterOperatorSettings = task.settings
settings.eventNameWhiteList shouldEqual Seq("user\\\\..*", "wio\\\\..*")
settings.eventNameBlackList shouldEqual Seq("image\\\\..*")
settings.channelWhiteList shouldEqual Seq("membership", "forum")
settings.channelBlackList shouldEqual Seq("quick\\\\-.*")
settings.allowedTransportModes shouldEqual Seq("SYNC-PLUS", "SYNC", "BOTH")
settings.onlyExtras shouldEqual Map("actorId" -> "1234", "generatorId" -> "tnc-event-dispatcher")
}
}
| thenetcircle/event-bus | core/src/test/scala/com/thenetcircle/event_bus/story/builder/FilterOperatorBuilderTest.scala | Scala | apache-2.0 | 2,630 |
package mr.merc.map.generator
import mr.merc.map.hex.TerrainHexField
import scala.util.Random
trait MapGenerator {
def generateMap(width: Int, height: Int, seed: Int = Random.nextInt()): TerrainHexField
} | RenualdMarch/merc | src/main/scala/mr/merc/map/generator/MapGenerator.scala | Scala | gpl-3.0 | 208 |
/* Copyright (C) 2008-2010 Univ of Massachusetts Amherst, Computer Science Dept
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://code.google.com/p/factorie/
This software is provided under the terms of the Eclipse Public License 1.0
as published by http://www.opensource.org. For further information,
see the file `LICENSE.txt' included with this distribution. */
package cc.factorie.util
import scala.collection.mutable.ArrayBuffer
/** Lightweight subscribe/publish mechanism, implemented as a
simple list of 0-argument functions that be registered and then called.
Useful for providing notifications from deep within some processing.
@author Andrew McCallum
@see Hooks1
@see Hooks2
@see Hooks3
*/
class Hooks0 extends ArrayBuffer[()=>Unit] {
def apply : Unit = this.foreach(_.apply)
}
/** Lightweight subscribe/publish mechanism, implemented as a
simple list of 1-argument functions that be registered and then called.
Useful for providing notifications from deep within some processing.
@author Andrew McCallum
@see Hooks0
@see Hooks2
@see Hooks3
*/
class Hooks1[A] extends ArrayBuffer[A=>Unit] {
def apply(a:A) : Unit = this.foreach(_.apply(a))
}
/** Lightweight subscribe/publish mechanism, implemented as a
simple list of 2-argument functions that be registered and then called.
Useful for providing notifications from deep within some processing.
@author Andrew McCallum
@see Hooks0
@see Hooks1
@see Hooks3
*/
class Hooks2[A,B] extends ArrayBuffer[(A,B)=>Unit] {
def apply(a:A,b:B) : Unit = this.foreach(_.apply(a,b))
}
/** Lightweight subscribe/publish mechanism, implemented as a
simple list of 3-argument functions that be registered and then called.
Useful for providing notifications from deep within some processing.
@author Andrew McCallum
@see Hooks0
@see Hooks1
@see Hooks2
*/
class Hooks3[A,B,C] extends ArrayBuffer[(A,B,C)=>Unit] {
def apply(a:A,b:B,c:C) : Unit = this.foreach(_.apply(a,b,c))
}
| andrewmilkowski/factorie | src/main/scala/cc/factorie/util/Hooks.scala | Scala | epl-1.0 | 2,113 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fuberlin.wiwiss.silk.plugins.distance.numeric
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FlatSpec
import org.scalatest.matchers.ShouldMatchers
import de.fuberlin.wiwiss.silk.testutil.approximatelyEqualTo
@RunWith(classOf[JUnitRunner])
class GeographicDistanceMetricTest extends FlatSpec with ShouldMatchers {
"GeographicDistanceMetric" should "return 0.0 if the coordinates are equal" in {
val metric = new GeographicDistanceMetric()
metric.evaluate("37.807981 -122.264609", "37.807981 -122.264609") should be(approximatelyEqualTo(0.0))
metric.evaluate("POINT(-0.124722 51.5081)", "POINT(-0.124722 51.5081)") should be(approximatelyEqualTo(0.0))
}
"GeographicDistanceMetric" should "return the distance of London and Berlin in kilometers" in {
val metric = new GeographicDistanceMetric("km")
metric.evaluate("POINT(-0.1167 51.5000)", "POINT(13.4000 52.5167)") should be(approximatelyEqualTo(930.60))
}
"GeographicDistanceMetric" should "return the distance of London and Berlin in meters" in {
val metric = new GeographicDistanceMetric("m")
metric.evaluate("POINT(-0.1167 51.5000)", "POINT(13.4000 52.5167)") should be(approximatelyEqualTo(930600.26))
}
} | fusepoolP3/p3-silk | silk-core/src/test/scala/de/fuberlin/wiwiss/silk/plugins/distance/numeric/GeographicDistanceMetricTest.scala | Scala | apache-2.0 | 1,828 |
/* __ *\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
package scala
package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
import WrapAsScala._
import WrapAsJava._
/** Adapters for Java/Scala collections API. */
private[collection] trait Wrappers {
trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] {
val underlying: Iterable[A]
def size = underlying.size
override def iterator = IteratorWrapper(underlying.iterator)
override def isEmpty = underlying.isEmpty
}
case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] {
def hasNext = underlying.hasNext
def next() = underlying.next()
def hasMoreElements = underlying.hasNext
def nextElement() = underlying.next()
override def remove() = throw new UnsupportedOperationException
}
class ToIteratorWrapper[A](underlying : Iterator[A]) {
def asJava = new IteratorWrapper(underlying)
}
case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] {
def hasNext = underlying.hasNext
def next() = underlying.next
}
case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] {
def hasNext = underlying.hasMoreElements
def next() = underlying.nextElement
}
case class IterableWrapper[A](underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] { }
case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] {
def iterator = underlying.iterator
def newBuilder[B] = new mutable.ArrayBuffer[B]
}
case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] {
def iterator = underlying.iterator
override def size = underlying.size
override def isEmpty = underlying.isEmpty
def newBuilder[B] = new mutable.ArrayBuffer[B]
}
case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
def get(i: Int) = underlying(i)
}
case class MutableSeqWrapper[A](underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
def get(i: Int) = underlying(i)
override def set(i: Int, elem: A) = {
val p = underlying(i)
underlying(i) = elem
p
}
}
case class MutableBufferWrapper[A](underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
def get(i: Int) = underlying(i)
override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p }
override def add(elem: A) = { underlying append elem; true }
override def remove(i: Int) = underlying remove i
}
case class JListWrapper[A](underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
def length = underlying.size
override def isEmpty = underlying.isEmpty
override def iterator: Iterator[A] = underlying.iterator
def apply(i: Int) = underlying.get(i)
def update(i: Int, elem: A) = underlying.set(i, elem)
def +=:(elem: A) = { underlying.subList(0, 0) add elem; this }
def +=(elem: A): this.type = { underlying add elem; this }
def insertAll(i: Int, elems: Traversable[A]) = {
val ins = underlying.subList(0, i)
elems.seq.foreach(ins.add(_))
}
def remove(i: Int) = underlying.remove(i)
def clear() = underlying.clear()
def result = this
// Note: Clone cannot just call underlying.clone because in Java, only specific collections
// expose clone methods. Generically, they're protected.
override def clone(): JListWrapper[A] = JListWrapper(new ju.ArrayList[A](underlying))
}
@SerialVersionUID(1L)
class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] with Serializable { self =>
// Note various overrides to avoid performance gotchas.
override def contains(o: Object): Boolean = {
try { underlying.contains(o.asInstanceOf[A]) }
catch { case cce: ClassCastException => false }
}
override def isEmpty = underlying.isEmpty
def size = underlying.size
def iterator = new ju.Iterator[A] {
val ui = underlying.iterator
var prev: Option[A] = None
def hasNext = ui.hasNext
def next = { val e = ui.next(); prev = Some(e); e }
override def remove() = prev match {
case Some(e) =>
underlying match {
case ms: mutable.Set[a] =>
ms remove e
prev = None
case _ =>
throw new UnsupportedOperationException("remove")
}
case _ =>
throw new IllegalStateException("next must be called at least once before remove")
}
}
}
case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) {
override def add(elem: A) = {
val sz = underlying.size
underlying += elem
sz < underlying.size
}
override def remove(elem: AnyRef) =
try underlying remove elem.asInstanceOf[A]
catch { case ex: ClassCastException => false }
override def clear() = underlying.clear()
}
case class JSetWrapper[A](underlying: ju.Set[A]) extends mutable.AbstractSet[A] with mutable.Set[A] with mutable.SetLike[A, JSetWrapper[A]] {
override def size = underlying.size
def iterator = underlying.iterator
def contains(elem: A): Boolean = underlying.contains(elem)
def +=(elem: A): this.type = { underlying add elem; this }
def -=(elem: A): this.type = { underlying remove elem; this }
override def add(elem: A): Boolean = underlying add elem
override def remove(elem: A): Boolean = underlying remove elem
override def clear() = underlying.clear()
override def empty = JSetWrapper(new ju.HashSet[A])
// Note: Clone cannot just call underlying.clone because in Java, only specific collections
// expose clone methods. Generically, they're protected.
override def clone() =
new JSetWrapper[A](new ju.LinkedHashSet[A](underlying))
}
@SerialVersionUID(1L)
class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] with Serializable { self =>
override def size = underlying.size
override def get(key: AnyRef): B = try {
underlying get key.asInstanceOf[A] match {
case None => null.asInstanceOf[B]
case Some(v) => v
}
} catch {
case ex: ClassCastException => null.asInstanceOf[B]
}
override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
def size = self.size
def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
val ui = underlying.iterator
var prev : Option[A] = None
def hasNext = ui.hasNext
def next() = {
val (k, v) = ui.next()
prev = Some(k)
new ju.Map.Entry[A, B] {
import scala.util.hashing.byteswap32
def getKey = k
def getValue = v
def setValue(v1 : B) = self.put(k, v1)
override def hashCode = byteswap32(k.##) + (byteswap32(v.##) << 16)
override def equals(other: Any) = other match {
case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
case _ => false
}
}
}
override def remove() {
prev match {
case Some(k) =>
underlying match {
case mm: mutable.Map[a, _] =>
mm remove k
prev = None
case _ =>
throw new UnsupportedOperationException("remove")
}
case _ =>
throw new IllegalStateException("next must be called at least once before remove")
}
}
}
}
override def containsKey(key: AnyRef): Boolean = try {
// Note: Subclass of collection.Map with specific key type may redirect generic
// contains to specific contains, which will throw a ClassCastException if the
// wrong type is passed. This is why we need a type cast to A inside a try/catch.
underlying.contains(key.asInstanceOf[A])
} catch {
case ex: ClassCastException => false
}
}
case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) extends MapWrapper[A, B](underlying) {
override def put(k: A, v: B) = underlying.put(k, v) match {
case Some(v1) => v1
case None => null.asInstanceOf[B]
}
override def remove(k: AnyRef): B = try {
underlying remove k.asInstanceOf[A] match {
case None => null.asInstanceOf[B]
case Some(v) => v
}
} catch {
case ex: ClassCastException => null.asInstanceOf[B]
}
override def clear() = underlying.clear()
}
trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]] extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] {
def underlying: ju.Map[A, B]
override def size = underlying.size
def get(k: A) = {
val v = underlying get k
if (v != null)
Some(v)
else if (underlying containsKey k)
Some(null.asInstanceOf[B])
else
None
}
def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
def -=(key: A): this.type = { underlying remove key; this }
override def put(k: A, v: B): Option[B] = Option(underlying.put(k, v))
override def update(k: A, v: B) { underlying.put(k, v) }
override def remove(k: A): Option[B] = Option(underlying remove k)
def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] {
val ui = underlying.entrySet.iterator
def hasNext = ui.hasNext
def next() = { val e = ui.next(); (e.getKey, e.getValue) }
}
override def clear() = underlying.clear()
override def empty: Repr = null.asInstanceOf[Repr]
}
/** Wraps a Java map as a Scala one. If the map is to support concurrent access,
* use [[JConcurrentMapWrapper]] instead. If the wrapped map is synchronized
* (e.g. from `java.util.Collections.synchronizedMap`), it is your responsibility
* to wrap all non-atomic operations with `underlying.synchronized`.
* This includes `get`, as `java.util.Map`'s API does not allow for an
* atomic `get` when `null` values may be present.
*/
case class JMapWrapper[A, B](underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
override def empty = JMapWrapper(new ju.HashMap[A, B])
}
class ConcurrentMapWrapper[A, B](override val underlying: concurrent.Map[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
override def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
case Some(v) => v
case None => null.asInstanceOf[B]
}
override def remove(k: AnyRef, v: AnyRef) = try {
underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
} catch {
case ex: ClassCastException =>
false
}
override def replace(k: A, v: B): B = underlying.replace(k, v) match {
case Some(v) => v
case None => null.asInstanceOf[B]
}
override def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
}
/** Wraps a concurrent Java map as a Scala one. Single-element concurrent
* access is supported; multi-element operations such as maps and filters
* are not guaranteed to be atomic.
*/
case class JConcurrentMapWrapper[A, B](underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with concurrent.Map[A, B] {
override def get(k: A) = Option(underlying get k)
override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B])
def putIfAbsent(k: A, v: B): Option[B] = Option(underlying.putIfAbsent(k, v))
def remove(k: A, v: B): Boolean = underlying.remove(k, v)
def replace(k: A, v: B): Option[B] = Option(underlying.replace(k, v))
def replace(k: A, oldvalue: B, newvalue: B): Boolean =
underlying.replace(k, oldvalue, newvalue)
}
case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B]) extends ju.Dictionary[A, B] {
def size: Int = underlying.size
def isEmpty: Boolean = underlying.isEmpty
def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator)
def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator)
def get(key: AnyRef) = try {
underlying get key.asInstanceOf[A] match {
case None => null.asInstanceOf[B]
case Some(v) => v
}
} catch {
case ex: ClassCastException => null.asInstanceOf[B]
}
def put(key: A, value: B): B = underlying.put(key, value) match {
case Some(v) => v
case None => null.asInstanceOf[B]
}
override def remove(key: AnyRef) = try {
underlying remove key.asInstanceOf[A] match {
case None => null.asInstanceOf[B]
case Some(v) => v
}
} catch {
case ex: ClassCastException => null.asInstanceOf[B]
}
}
case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B]) extends mutable.AbstractMap[A, B] with mutable.Map[A, B] {
override def size: Int = underlying.size
def get(k: A) = Option(underlying get k)
def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
def -=(key: A): this.type = { underlying remove key; this }
override def put(k: A, v: B): Option[B] = Option(underlying.put(k, v))
override def update(k: A, v: B) { underlying.put(k, v) }
override def remove(k: A): Option[B] = Option(underlying remove k)
def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k))
override def clear() = underlying.clear()
}
case class JPropertiesWrapper(underlying: ju.Properties) extends mutable.AbstractMap[String, String]
with mutable.Map[String, String]
with mutable.MapLike[String, String, JPropertiesWrapper] {
override def size = underlying.size
def get(k: String) = {
val v = underlying get k
if (v != null) Some(v.asInstanceOf[String]) else None
}
def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this }
def -=(key: String): this.type = { underlying remove key; this }
override def put(k: String, v: String): Option[String] = {
val r = underlying.put(k, v)
if (r != null) Some(r.asInstanceOf[String]) else None
}
override def update(k: String, v: String) { underlying.put(k, v) }
override def remove(k: String): Option[String] = {
val r = underlying remove k
if (r != null) Some(r.asInstanceOf[String]) else None
}
def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] {
val ui = underlying.entrySet.iterator
def hasNext = ui.hasNext
def next() = {
val e = ui.next()
(e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String])
}
}
override def clear() = underlying.clear()
override def empty = JPropertiesWrapper(new ju.Properties)
def getProperty(key: String) = underlying.getProperty(key)
def getProperty(key: String, defaultValue: String) =
underlying.getProperty(key, defaultValue)
def setProperty(key: String, value: String) =
underlying.setProperty(key, value)
}
}
@SerialVersionUID(0 - 5857859809262781311L)
object Wrappers extends Wrappers with Serializable
| felixmulder/scala | src/library/scala/collection/convert/Wrappers.scala | Scala | bsd-3-clause | 16,097 |
package org.openapitools.server.model
/**
* @param `class` for example: ''null''
* @param href for example: ''null''
*/
final case class Link (
`class`: Option[String],
href: Option[String]
)
| cliffano/swaggy-jenkins | clients/scala-akka-http-server/generated/src/main/scala/org/openapitools/server/model/Link.scala | Scala | mit | 203 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.controller.test
import java.time.Instant
import java.util.Base64
import scala.concurrent.Future
import scala.concurrent.duration.FiniteDuration
import org.junit.runner.RunWith
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
import org.scalatest.junit.JUnitRunner
import akka.http.scaladsl.marshallers.sprayjson.SprayJsonSupport._
import akka.http.scaladsl.model.FormData
import akka.http.scaladsl.model.HttpEntity
import akka.http.scaladsl.model.MediaTypes
import akka.http.scaladsl.model.StatusCodes._
import akka.http.scaladsl.model.HttpCharsets
import akka.http.scaladsl.model.HttpHeader
import akka.http.scaladsl.model.HttpResponse
import akka.http.scaladsl.model.Uri.Query
import akka.http.scaladsl.server.Route
import akka.http.scaladsl.model.HttpMethods
import akka.http.scaladsl.model.headers.{`Access-Control-Request-Headers`, `Content-Type`, RawHeader}
import akka.http.scaladsl.model.ContentTypes
import akka.http.scaladsl.model.ContentType
import akka.http.scaladsl.model.MediaType
import spray.json._
import spray.json.DefaultJsonProtocol._
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.WhiskConfig
import org.apache.openwhisk.core.controller._
import org.apache.openwhisk.core.entitlement.EntitlementProvider
import org.apache.openwhisk.core.entitlement.Privilege
import org.apache.openwhisk.core.entitlement.Resource
import org.apache.openwhisk.core.entity._
import org.apache.openwhisk.core.entity.size._
import org.apache.openwhisk.core.loadBalancer.LoadBalancer
import org.apache.openwhisk.http.ErrorResponse
import org.apache.openwhisk.http.Messages
import scala.collection.immutable.Set
/**
* Tests web actions API.
*
* Unit tests of the controller service as a standalone component.
* These tests exercise a fresh instance of the service object in memory -- these
* tests do NOT communication with a whisk deployment.
*
* @Idioglossia
* "using Specification DSL to write unit tests, as in should, must, not, be"
* "using Specs2RouteTest DSL to chain HTTP requests for unit testing, as in ~>"
*/
@RunWith(classOf[JUnitRunner])
class WebActionsApiCommonTests extends FlatSpec with Matchers {
"extension splitter" should "split action name and extension" in {
Seq(".http", ".json", ".text", ".html", ".svg").foreach { ext =>
Seq(s"t$ext", s"tt$ext", s"t.wxyz$ext", s"tt.wxyz$ext").foreach { s =>
Seq(true, false).foreach { enforce =>
val (n, e) = WhiskWebActionsApi.mediaTranscoderForName(s, enforce)
val i = s.lastIndexOf(".")
n shouldBe s.substring(0, i)
e.get.extension shouldBe ext
}
}
}
Seq(s"t", "tt", "abcde", "abcdef", "t.wxyz").foreach { s =>
val (n, e) = WhiskWebActionsApi.mediaTranscoderForName(s, false)
n shouldBe s
e.get.extension shouldBe ".http"
}
Seq(s"t", "tt", "abcde", "abcdef", "t.wxyz").foreach { s =>
val (n, e) = WhiskWebActionsApi.mediaTranscoderForName(s, true)
n shouldBe s
e shouldBe empty
}
}
}
@RunWith(classOf[JUnitRunner])
class WebActionsApiTests extends FlatSpec with Matchers with WebActionsApiBaseTests {
override lazy val webInvokePathSegments = Seq("web")
override lazy val webApiDirectives = new WebApiDirectives()
"properties" should "match verion" in {
webApiDirectives.method shouldBe "__ow_method"
webApiDirectives.headers shouldBe "__ow_headers"
webApiDirectives.path shouldBe "__ow_path"
webApiDirectives.namespace shouldBe "__ow_user"
webApiDirectives.query shouldBe "__ow_query"
webApiDirectives.body shouldBe "__ow_body"
webApiDirectives.statusCode shouldBe "statusCode"
webApiDirectives.enforceExtension shouldBe false
webApiDirectives.reservedProperties shouldBe {
Set("__ow_method", "__ow_headers", "__ow_path", "__ow_user", "__ow_query", "__ow_body")
}
}
}
trait WebActionsApiBaseTests extends ControllerTestCommon with BeforeAndAfterEach with WhiskWebActionsApi {
val uuid = UUID()
val systemId = Subject()
val systemKey = BasicAuthenticationAuthKey(uuid, Secret())
val systemIdentity =
Future.successful(
Identity(systemId, Namespace(EntityName(systemId.asString), uuid), systemKey, rights = Privilege.ALL))
val namespace = EntityPath(systemId.asString)
val proxyNamespace = namespace.addPath(EntityName("proxy"))
override lazy val entitlementProvider = new TestingEntitlementProvider(whiskConfig, loadBalancer)
protected val testRoutePath = webInvokePathSegments.mkString("/", "/", "")
def aname() = MakeName.next("web_action_tests")
behavior of "Web actions API"
var failActivation = 0 // toggle to cause action to fail
var failThrottleForSubject: Option[Subject] = None // toggle to cause throttle to fail for subject
var failCheckEntitlement = false // toggle to cause entitlement to fail
var actionResult: Option[JsObject] = None
var testParametersInInvokeAction = true // toggle to test parameter in invokeAction
var requireAuthenticationKey = "example-web-action-api-key"
var invocationCount = 0
var invocationsAllowed = 0
lazy val testFixturesToGc = {
implicit val tid = transid()
Seq(
stubPackage,
stubAction(namespace, EntityName("export_c")),
stubAction(proxyNamespace, EntityName("export_c")),
stubAction(proxyNamespace, EntityName("raw_export_c"))).map { f =>
put(entityStore, f, garbageCollect = false)
}
}
override def beforeAll() = {
testFixturesToGc.foreach(f => ())
}
override def beforeEach() = {
invocationCount = 0
invocationsAllowed = 0
}
override def afterEach() = {
failActivation = 0
failThrottleForSubject = None
failCheckEntitlement = false
actionResult = None
testParametersInInvokeAction = true
assert(invocationsAllowed == invocationCount, "allowed invoke count did not match actual")
cleanup()
}
override def afterAll() = {
implicit val tid = transid()
testFixturesToGc.foreach(delete(entityStore, _))
}
val allowedMethodsWithEntity = {
val nonModifierMethods = Seq(Get, Options)
val modifierMethods = Seq(Post, Put, Delete, Patch)
modifierMethods ++ nonModifierMethods
}
val allowedMethods = {
allowedMethodsWithEntity ++ Seq(Head)
}
val pngSample = "iVBORw0KGgoAAAANSUhEUgAAAAoAAAAGCAYAAAD68A/GAAAA/klEQVQYGWNgAAEHBxaG//+ZQMyyn581Pfas+cRQnf1LfF" +
"Ljf+62smUgcUbt0FA2Zh7drf/ffMy9vLn3RurrW9e5hCU11i2azfD4zu1/DHz8TAy/foUxsXBrFzHzC7r8+M9S1vn1qxQT07dDjL" +
"9fdemrqKxlYGT6z8AIMo6hgeUfA0PUvy9fGFh5GWK3z7vNxSWt++jX99+8SoyiGQwsW38w8PJEM7x5v5SJ8f+/xv8MDAzffv9hev" +
"fkWjiXBGMpMx+j2awovjcMjFztDO8+7GF49LkbZDCDeXLTWnZO7qDfn1/+5jbw/8pjYWS4wZLztXnuEuYTk2M+MzIw/AcA36Vewa" +
"D6fzsAAAAASUVORK5CYII="
// there is only one package that is predefined 'proxy'
val stubPackage = WhiskPackage(
EntityPath(systemId.asString),
EntityName("proxy"),
parameters = Parameters("x", JsString("X")) ++ Parameters("z", JsString("z")))
val packages = Seq(stubPackage)
val defaultActionParameters = {
Parameters("y", JsString("Y")) ++ Parameters("z", JsString("Z")) ++ Parameters("empty", JsNull)
}
// action names that start with 'export_' will automatically have an web-export annotation added by the test harness
protected def stubAction(namespace: EntityPath,
name: EntityName,
customOptions: Boolean = true,
requireAuthentication: Boolean = false,
requireAuthenticationAsBoolean: Boolean = true) = {
val annotations = Parameters(Annotations.FinalParamsAnnotationName, JsTrue)
WhiskAction(
namespace,
name,
jsDefault("??"),
defaultActionParameters,
annotations = {
if (name.asString.startsWith("export_")) {
annotations ++
Parameters("web-export", JsTrue) ++ {
if (requireAuthentication) {
Parameters(
"require-whisk-auth",
(if (requireAuthenticationAsBoolean) JsTrue else JsString(requireAuthenticationKey)))
} else Parameters()
} ++ {
if (customOptions) {
Parameters("web-custom-options", JsTrue)
} else Parameters()
}
} else if (name.asString.startsWith("raw_export_")) {
annotations ++
Parameters("web-export", JsTrue) ++
Parameters("raw-http", JsTrue) ++ {
if (requireAuthentication) {
Parameters(
"require-whisk-auth",
(if (requireAuthenticationAsBoolean) JsTrue else JsString(requireAuthenticationKey)))
} else Parameters()
} ++ {
if (customOptions) {
Parameters("web-custom-options", JsTrue)
} else Parameters()
}
} else annotations
})
}
// there is only one identity defined for the fully qualified name of the web action: 'systemId'
override protected def getIdentity(namespace: EntityName)(implicit transid: TransactionId): Future[Identity] = {
if (namespace.asString == systemId.asString) {
systemIdentity
} else {
logging.info(this, s"namespace has no identity")
Future.failed(RejectRequest(BadRequest))
}
}
override protected[controller] def invokeAction(
user: Identity,
action: WhiskActionMetaData,
payload: Option[JsObject],
waitForResponse: Option[FiniteDuration],
cause: Option[ActivationId])(implicit transid: TransactionId): Future[Either[ActivationId, WhiskActivation]] = {
invocationCount = invocationCount + 1
if (failActivation == 0) {
// construct a result stub that includes:
// 1. the package name for the action (to confirm that this resolved to systemId)
// 2. the action name (to confirm that this resolved to the expected action)
// 3. the payload received by the action which consists of the action.params + payload
val result = actionResult getOrElse JsObject(
"pkg" -> action.namespace.toJson,
"action" -> action.name.toJson,
"content" -> action.parameters.merge(payload).get)
val activation = WhiskActivation(
action.namespace,
action.name,
user.subject,
ActivationId.generate(),
start = Instant.now,
end = Instant.now,
response = {
actionResult.flatMap { r =>
r.fields.get("application_error").map { e =>
ActivationResponse.applicationError(e)
} orElse r.fields.get("developer_error").map { e =>
ActivationResponse.developerError(e, None)
} orElse r.fields.get("whisk_error").map { e =>
ActivationResponse.whiskError(e)
} orElse None // for clarity
} getOrElse ActivationResponse.success(Some(result))
})
// check that action parameters were merged with package
// all actions have default parameters (see stubAction)
if (testParametersInInvokeAction) {
if (!action.namespace.defaultPackage) {
action.parameters shouldBe (stubPackage.parameters ++ defaultActionParameters)
} else {
action.parameters shouldBe defaultActionParameters
}
action.parameters.get("z") shouldBe defaultActionParameters.get("z")
}
Future.successful(Right(activation))
} else if (failActivation == 1) {
Future.successful(Left(ActivationId.generate()))
} else {
Future.failed(new IllegalStateException("bad activation"))
}
}
def metaPayload(method: String,
params: JsObject,
identity: Option[Identity],
path: String = "",
body: Option[JsObject] = None,
pkgName: String = null,
headers: List[HttpHeader] = List.empty) = {
val packageActionParams = Option(pkgName)
.filter(_ != null)
.flatMap(n => packages.find(_.name == EntityName(n)))
.map(_.parameters)
.getOrElse(Parameters())
(packageActionParams ++ defaultActionParameters).merge {
Some {
JsObject(
params.fields ++
body.map(_.fields).getOrElse(Map.empty) ++
Context(webApiDirectives, HttpMethods.getForKey(method.toUpperCase).get, headers, path, Query.Empty)
.metadata(identity))
}
}.get
}
def confirmErrorWithTid(error: JsObject, message: Option[String] = None) = {
error.fields.size shouldBe 2
error.fields.get("error") shouldBe defined
message.foreach { m =>
error.fields.get("error").get shouldBe JsString(m)
}
error.fields.get("code") shouldBe defined
error.fields.get("code").get shouldBe an[JsString]
}
Seq(None, Some(WhiskAuthHelpers.newIdentity())).foreach { creds =>
it should s"not match invalid routes (auth? ${creds.isDefined})" in {
implicit val tid = transid()
// none of these should match a route
Seq("a", "a/b", "/a", s"$systemId/c", s"$systemId/export_c").foreach { path =>
allowedMethods.foreach { m =>
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(NotFound)
}
}
}
}
it should s"reject requests when Identity, package or action lookup fail or missing annotation (auth? ${creds.isDefined})" in {
implicit val tid = transid()
put(entityStore, stubAction(namespace, EntityName("c")))
// the first of these fails in the identity lookup,
// the second in the package lookup (does not exist),
// the third fails the annotation check (no web-export annotation because name doesn't start with export_c)
// the fourth fails the action lookup
Seq("guest/proxy/c", s"$systemId/doesnotexist/c", s"$systemId/default/c", s"$systemId/proxy/export_fail")
.foreach { path =>
allowedMethods.foreach { m =>
m(s"$testRoutePath/${path}.json") ~> Route.seal(routes(creds)) ~> check {
status should be(NotFound)
}
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
if (webApiDirectives.enforceExtension) {
status should be(NotAcceptable)
confirmErrorWithTid(
responseAs[JsObject],
Some(Messages.contentTypeExtensionNotSupported(WhiskWebActionsApi.allowedExtensions)))
} else {
status should be(NotFound)
}
}
}
}
}
it should s"reject requests when whisk authentication is required but none given (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val entityName = MakeName.next("export")
val action =
stubAction(
proxyNamespace,
entityName,
customOptions = false,
requireAuthentication = true,
requireAuthenticationAsBoolean = true)
val path = action.fullyQualifiedName(false)
put(entityStore, action)
allowedMethods.foreach { m =>
m(s"$testRoutePath/${path}.json") ~> Route.seal(routes(creds)) ~> check {
if (m === Options) {
status should be(OK) // options response is always present regardless of auth
header("Access-Control-Allow-Origin").get.toString shouldBe "Access-Control-Allow-Origin: *"
header("Access-Control-Allow-Methods").get.toString shouldBe "Access-Control-Allow-Methods: OPTIONS, GET, DELETE, POST, PUT, HEAD, PATCH"
header("Access-Control-Request-Headers") shouldBe empty
} else if (creds.isEmpty) {
status should be(Unauthorized) // if user is not authenticated, reject all requests
} else {
invocationsAllowed += 1
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> entityName.asString.toJson,
"content" -> metaPayload(m.method.name.toLowerCase, JsObject.empty, creds, pkgName = "proxy"))
response
.fields("content")
.asJsObject
.fields(webApiDirectives.namespace) shouldBe creds.get.namespace.name.toJson
}
}
}
}
it should s"reject requests when x-authentication is required but none given (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val entityName = MakeName.next("export")
val action =
stubAction(
proxyNamespace,
entityName,
customOptions = false,
requireAuthentication = true,
requireAuthenticationAsBoolean = false)
val path = action.fullyQualifiedName(false)
put(entityStore, action)
allowedMethods.foreach { m =>
// web action require-whisk-auth is set, but the header X-Require-Whisk-Auth value does not match
m(s"$testRoutePath/${path}.json") ~> addHeader(
WhiskAction.requireWhiskAuthHeader,
requireAuthenticationKey + "-bad") ~> Route
.seal(routes(creds)) ~> check {
if (m == Options) {
status should be(OK) // options should always respond
header("Access-Control-Allow-Origin").get.toString shouldBe "Access-Control-Allow-Origin: *"
header("Access-Control-Allow-Methods").get.toString shouldBe "Access-Control-Allow-Methods: OPTIONS, GET, DELETE, POST, PUT, HEAD, PATCH"
header("Access-Control-Request-Headers") shouldBe empty
} else {
status should be(Unauthorized)
}
}
// web action require-whisk-auth is set, but the header X-Require-Whisk-Auth value is not set
m(s"$testRoutePath/${path}.json") ~> Route.seal(routes(creds)) ~> check {
if (m == Options) {
status should be(OK) // options should always respond
header("Access-Control-Allow-Origin").get.toString shouldBe "Access-Control-Allow-Origin: *"
header("Access-Control-Allow-Methods").get.toString shouldBe "Access-Control-Allow-Methods: OPTIONS, GET, DELETE, POST, PUT, HEAD, PATCH"
header("Access-Control-Request-Headers") shouldBe empty
} else {
status should be(Unauthorized)
}
}
m(s"$testRoutePath/${path}.json") ~> addHeader(WhiskAction.requireWhiskAuthHeader, requireAuthenticationKey) ~> Route
.seal(routes(creds)) ~> check {
if (m == Options) {
status should be(OK) // options should always respond
header("Access-Control-Allow-Origin").get.toString shouldBe "Access-Control-Allow-Origin: *"
header("Access-Control-Allow-Methods").get.toString shouldBe "Access-Control-Allow-Methods: OPTIONS, GET, DELETE, POST, PUT, HEAD, PATCH"
header("Access-Control-Request-Headers") shouldBe empty
} else {
invocationsAllowed += 1
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> entityName.asString.toJson,
"content" -> metaPayload(
m.method.name.toLowerCase,
JsObject.empty,
creds,
pkgName = "proxy",
headers = List(RawHeader(WhiskAction.requireWhiskAuthHeader, requireAuthenticationKey))))
if (creds.isDefined) {
response
.fields("content")
.asJsObject
.fields(webApiDirectives.namespace) shouldBe creds.get.namespace.name.toJson
}
}
}
}
}
it should s"invoke action that times out and provide a code (auth? ${creds.isDefined})" in {
implicit val tid = transid()
failActivation = 1
allowedMethods.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$systemId/proxy/export_c.json") ~> Route.seal(routes(creds)) ~> check {
status should be(Accepted)
val response = responseAs[JsObject]
confirmErrorWithTid(response, Some("Response not yet ready."))
}
}
}
it should s"invoke action that errors and respond with error and code (auth? ${creds.isDefined})" in {
implicit val tid = transid()
failActivation = 2
allowedMethods.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$systemId/proxy/export_c.json") ~> Route.seal(routes(creds)) ~> check {
status should be(InternalServerError)
val response = responseAs[JsObject]
confirmErrorWithTid(response)
}
}
}
it should s"invoke action and merge query parameters (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.json?a=b&c=d").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
m.method.name.toLowerCase,
Map("a" -> "b", "c" -> "d").toJson.asJsObject,
creds,
pkgName = "proxy"))
}
}
}
}
it should s"invoke action and merge body parameters (auth? ${creds.isDefined})" in {
implicit val tid = transid()
// both of these should produce full result objects (trailing slash is ok)
Seq(s"$systemId/proxy/export_c.json", s"$systemId/proxy/export_c.json/").foreach { path =>
allowedMethodsWithEntity.foreach { m =>
val content = JsObject("extra" -> "read all about it".toJson, "yummy" -> true.toJson)
val p = if (path.endsWith("/")) "/" else ""
invocationsAllowed += 1
m(s"$testRoutePath/$path", content) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
m.method.name.toLowerCase,
JsObject.empty,
creds,
body = Some(content),
path = p,
pkgName = "proxy",
headers = List(`Content-Type`(ContentTypes.`application/json`))))
}
}
}
}
it should s"invoke action which receives an empty entity (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq("", JsArray.empty.compactPrint, JsObject.empty.compactPrint, JsNull.compactPrint).foreach { arg =>
Seq(s"$systemId/proxy/export_c.json").foreach { path =>
allowedMethodsWithEntity.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$path", HttpEntity(ContentTypes.`application/json`, arg)) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
m.method.name.toLowerCase,
if (arg.nonEmpty && arg != "{}") JsObject(webApiDirectives.body -> arg.parseJson) else JsObject.empty,
creds,
pkgName = "proxy",
headers = List(`Content-Type`(ContentTypes.`application/json`))))
}
}
}
}
}
it should s"invoke action and merge query and body parameters (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.json?a=b&c=d").foreach { path =>
allowedMethodsWithEntity.foreach { m =>
val content = JsObject("extra" -> "read all about it".toJson, "yummy" -> true.toJson)
invocationsAllowed += 1
m(s"$testRoutePath/$path", content) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
m.method.name.toLowerCase,
Map("a" -> "b", "c" -> "d").toJson.asJsObject,
creds,
body = Some(content),
pkgName = "proxy",
headers = List(`Content-Type`(ContentTypes.`application/json`))))
}
}
}
}
it should s"invoke action in default package (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/default/export_c.json").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(m.method.name.toLowerCase, JsObject.empty, creds))
}
}
}
}
it should s"invoke action in a binding of private package (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val provider = WhiskPackage(EntityPath(systemId.asString), aname(), None, stubPackage.parameters)
val reference = WhiskPackage(EntityPath(systemId.asString), aname(), provider.bind)
val action = stubAction(provider.fullPath, EntityName("export_c"))
put(entityStore, provider)
put(entityStore, reference)
put(entityStore, action)
Seq(s"$systemId/${reference.name}/export_c.json").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
}
}
}
}
it should s"invoke action in a binding of public package (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val provider = WhiskPackage(EntityPath("guest"), aname(), None, stubPackage.parameters, publish = true)
val reference = WhiskPackage(EntityPath(systemId.asString), aname(), provider.bind)
val action = stubAction(provider.fullPath, EntityName("export_c"))
put(entityStore, provider)
put(entityStore, reference)
put(entityStore, action)
Seq(s"$systemId/${reference.name}/export_c.json").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
}
}
}
}
it should s"invoke action relative to a binding where the action doesn't exist (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val provider = WhiskPackage(EntityPath("guest"), aname(), None, stubPackage.parameters, publish = true)
val reference = WhiskPackage(EntityPath(systemId.asString), aname(), provider.bind)
put(entityStore, provider)
put(entityStore, reference)
// action is not created
Seq(s"$systemId/${reference.name}/export_c.json").foreach { path =>
allowedMethods.foreach { m =>
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(NotFound)
}
}
}
}
it should s"invoke action in non-existing binding (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val provider = WhiskPackage(EntityPath("guest"), aname(), None, stubPackage.parameters, publish = true)
val action = stubAction(provider.fullPath, EntityName("export_c"))
val reference = WhiskPackage(EntityPath(systemId.asString), aname(), provider.bind)
put(entityStore, provider)
put(entityStore, action)
// reference is not created
Seq(s"$systemId/${reference.name}/export_c.json").foreach { path =>
allowedMethods.foreach { m =>
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(NotFound)
}
}
}
}
it should s"not inherit annotations of package binding (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val provider = WhiskPackage(EntityPath("guest"), aname(), None, stubPackage.parameters, publish = true)
val reference = WhiskPackage(
EntityPath(systemId.asString),
aname(),
provider.bind,
annotations = Parameters("web-export", JsFalse))
val action = stubAction(provider.fullPath, EntityName("export_c"))
put(entityStore, provider)
put(entityStore, reference)
put(entityStore, action)
Seq(s"$systemId/${reference.name}/export_c.json").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
}
}
}
}
it should s"reject request that tries to override final parameters of action in package binding (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val provider = WhiskPackage(EntityPath("guest"), aname(), None, publish = true)
val reference = WhiskPackage(EntityPath(systemId.asString), aname(), provider.bind, stubPackage.parameters)
val action = stubAction(provider.fullPath, EntityName("export_c"))
put(entityStore, provider)
put(entityStore, reference)
put(entityStore, action)
val contentX = JsObject("x" -> "overriden".toJson)
val contentZ = JsObject("z" -> "overriden".toJson)
allowedMethodsWithEntity.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$systemId/${reference.name}/export_c.json?x=overriden") ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[ErrorResponse].error shouldBe Messages.parametersNotAllowed
}
m(s"$testRoutePath/$systemId/${reference.name}/export_c.json?y=overriden") ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[ErrorResponse].error shouldBe Messages.parametersNotAllowed
}
m(s"$testRoutePath/$systemId/${reference.name}/export_c.json", contentX) ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[ErrorResponse].error shouldBe Messages.parametersNotAllowed
}
m(s"$testRoutePath/$systemId/${reference.name}/export_c.json?y=overriden", contentZ) ~> Route.seal(
routes(creds)) ~> check {
status should be(BadRequest)
responseAs[ErrorResponse].error shouldBe Messages.parametersNotAllowed
}
m(s"$testRoutePath/$systemId/${reference.name}/export_c.json?empty=overriden") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"guest/${provider.name}".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
m.method.name.toLowerCase,
Map("empty" -> "overriden").toJson.asJsObject,
creds,
pkgName = "proxy"))
}
}
}
it should s"match precedence order for merging parameters (auth? ${creds.isDefined})" in {
implicit val tid = transid()
testParametersInInvokeAction = false
val provider = WhiskPackage(
EntityPath("guest"),
aname(),
None,
Parameters("a", JsString("A")) ++ Parameters("b", JsString("b")),
publish = true)
val reference = WhiskPackage(
EntityPath(systemId.asString),
aname(),
provider.bind,
Parameters("a", JsString("a")) ++ Parameters("c", JsString("c")))
// stub action has defaultActionParameters
val action = stubAction(provider.fullPath, EntityName("export_c"))
put(entityStore, provider)
put(entityStore, reference)
put(entityStore, action)
Seq(s"$systemId/${reference.name}/export_c.json").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"guest/${provider.name}".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
m.method.name.toLowerCase,
Map("a" -> "a", "b" -> "b", "c" -> "c").toJson.asJsObject,
creds))
}
}
}
}
it should s"pass the unmatched segment to the action (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.json/content").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject].fields("content")
response shouldBe metaPayload(
m.method.name.toLowerCase,
JsObject.empty,
creds,
path = "/content",
pkgName = "proxy")
}
}
}
}
it should s"respond with error when expected text property does not exist (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.text").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(NotFound)
confirmErrorWithTid(responseAs[JsObject], Some(Messages.propertyNotFound))
// ensure that error message is pretty printed as { error, code }
responseAs[String].linesIterator should have size 4
}
}
}
}
it should s"use action status code and headers to terminate an http response (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
actionResult = Some(
JsObject(
"headers" -> JsObject("location" -> "http://openwhisk.org".toJson),
webApiDirectives.statusCode -> Found.intValue.toJson))
invocationsAllowed += 1
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(Found)
header("location").get.toString shouldBe "location: http://openwhisk.org"
}
}
}
}
it should s"use default field projection for extension (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> JsObject("location" -> "http://openwhisk.org".toJson),
webApiDirectives.statusCode -> Found.intValue.toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(Found)
header("location").get.toString shouldBe "location: http://openwhisk.org"
}
}
}
Seq(s"$systemId/proxy/export_c.text").foreach { path =>
allowedMethods.foreach { m =>
val text = "default text"
invocationsAllowed += 1
actionResult = Some(JsObject("text" -> JsString(text)))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
contentType shouldBe MediaTypes.`text/plain`.withCharset(HttpCharsets.`UTF-8`)
val response = responseAs[String]
response shouldBe text
}
}
}
Seq(s"$systemId/proxy/export_c.json").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(JsObject("foobar" -> JsString("foobar")))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe actionResult.get
// ensure response is pretty printed
responseAs[String] shouldBe {
"""{
| "foobar": "foobar"
|}""".stripMargin
}
}
}
}
Seq(s"$systemId/proxy/export_c.html").foreach { path =>
allowedMethods.foreach { m =>
val html = "<html>hi</htlml>"
invocationsAllowed += 1
actionResult = Some(JsObject("html" -> JsString(html)))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
contentType shouldBe MediaTypes.`text/html`.withCharset(HttpCharsets.`UTF-8`)
val response = responseAs[String]
response shouldBe html
}
}
}
Seq(s"$systemId/proxy/export_c.svg").foreach { path =>
allowedMethods.foreach { m =>
val svg = """<svg><circle cx="3" cy="3" r="3" fill="blue"/></svg>"""
invocationsAllowed += 1
actionResult = Some(JsObject("svg" -> JsString(svg)))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
//contentType shouldBe MediaTypes.`image/svg+xml`.withCharset(HttpCharsets.`UTF-8`)
val response = responseAs[String]
response shouldBe svg
}
}
}
}
it should s"handle http web action and provide defaults (auth? ${creds.isDefined})" in {
implicit val tid = transid()
def confirmEmptyResponse() = {
status should be(NoContent)
response.entity shouldBe HttpEntity.Empty
withClue(headers) {
headers.length shouldBe 1
headers.exists(_.is(ActivationIdHeader)) should be(true)
}
}
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
Set(JsObject.empty, JsObject("body" -> "".toJson), JsObject("body" -> JsNull)).foreach { bodyResult =>
allowedMethods.foreach { m =>
invocationsAllowed += 2
actionResult = Some(bodyResult)
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
withClue(s"failed for: $bodyResult") {
confirmEmptyResponse()
}
}
// repeat with accept header, which should be ignored for content-negotiation
m(s"$testRoutePath/$path") ~> addHeader("Accept", "application/json") ~> Route.seal(routes(creds)) ~> check {
withClue(s"with accept header, failed for: $bodyResult") {
confirmEmptyResponse()
}
}
}
}
}
}
it should s"handle all JSON values with .text extension (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(JsObject("a" -> "A".toJson), JsArray("a".toJson), JsString("a"), JsTrue, JsNumber(1), JsNull)
.foreach { jsval =>
val path = s"$systemId/proxy/export_c.text"
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(JsObject("body" -> jsval))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
responseAs[String] shouldBe {
jsval match {
case _: JsObject => jsval.prettyPrint
case _: JsArray => jsval.prettyPrint
case JsString(s) => s
case JsBoolean(b) => b.toString
case JsNumber(n) => n.toString
case _ => "null"
}
}
}
}
}
}
it should s"handle http web action with JSON object as string response (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
Seq(OK, Created).foreach { statusCode =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> JsObject("content-type" -> "application/json".toJson),
webApiDirectives.statusCode -> statusCode.intValue.toJson,
"body" -> JsObject("field" -> "value".toJson).compactPrint.toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(statusCode)
mediaType shouldBe MediaTypes.`application/json`
responseAs[JsObject] shouldBe JsObject("field" -> "value".toJson)
}
}
}
}
}
it should s"handle http web action with partially specified result (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
// omit status code
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> JsObject("content-type" -> "application/json".toJson),
"body" -> JsObject("field" -> "value".toJson)))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
responseAs[JsObject] shouldBe JsObject("field" -> "value".toJson)
}
}
// omit status code and headers
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(JsObject("body" -> JsObject("field" -> "value".toJson).compactPrint.toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
responseAs[String] shouldBe actionResult.get.fields("body").convertTo[String]
contentType shouldBe MediaTypes.`text/html`.withCharset(HttpCharsets.`UTF-8`)
}
}
// omit headers only
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
webApiDirectives.statusCode -> Created.intValue.toJson,
"body" -> JsObject("field" -> "value".toJson).compactPrint.toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(Created)
responseAs[String] shouldBe actionResult.get.fields("body").convertTo[String]
contentType shouldBe MediaTypes.`text/html`.withCharset(HttpCharsets.`UTF-8`)
}
}
// omit body and headers
Seq(OK, Created, NoContent).foreach { statusCode =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(JsObject(webApiDirectives.statusCode -> statusCode.intValue.toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(statusCode)
headers.size shouldBe 1
headers.exists(_.is(ActivationIdHeader)) should be(true)
response.entity shouldBe HttpEntity.Empty
}
}
}
// omit body but include headers
Seq(OK, Created, NoContent).foreach { statusCode =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> JsObject("Set-Cookie" -> "a=b".toJson, "content-type" -> "application/json".toJson),
webApiDirectives.statusCode -> statusCode.intValue.toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(statusCode)
headers should contain(RawHeader("Set-Cookie", "a=b"))
headers.exists(_.is(ActivationIdHeader)) should be(true)
response.entity shouldBe HttpEntity.Empty
}
}
}
}
}
it should s"handle http web action with no body when status code is set (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
// omit body and headers, but add accept header on the request
Seq(OK, Created, NoContent).foreach { statusCode =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(JsObject(webApiDirectives.statusCode -> statusCode.intValue.toJson))
m(s"$testRoutePath/$path") ~> addHeader("Accept", "application/json") ~> Route.seal(routes(creds)) ~> check {
status should be(statusCode)
headers.size shouldBe 1
headers.exists(_.is(ActivationIdHeader)) should be(true)
response.entity shouldBe HttpEntity.Empty
}
}
}
// omit body but include headers, and add accept header on the request
Seq(OK, Created, NoContent).foreach { statusCode =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> JsObject("Set-Cookie" -> "a=b".toJson, "content-type" -> "application/json".toJson),
webApiDirectives.statusCode -> statusCode.intValue.toJson))
m(s"$testRoutePath/$path") ~> addHeader("Accept", "application/json") ~> Route.seal(routes(creds)) ~> check {
status should be(statusCode)
headers should contain(RawHeader("Set-Cookie", "a=b"))
headers.exists(_.is(ActivationIdHeader)) should be(true)
response.entity shouldBe HttpEntity.Empty
}
}
}
}
}
it should s"handle http web action with JSON object response (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(
(JsObject("content-type" -> "application/json".toJson), OK),
(JsObject.empty, OK),
(JsObject("content-type" -> "text/html".toJson), BadRequest)).foreach {
case (headers, expectedCode) =>
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> headers,
webApiDirectives.statusCode -> OK.intValue.toJson,
"body" -> JsObject("field" -> "value".toJson)))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(expectedCode)
if (expectedCode == OK) {
header("content-type").map(_.toString shouldBe "content-type: application/json")
responseAs[JsObject] shouldBe JsObject("field" -> "value".toJson)
} else {
confirmErrorWithTid(responseAs[JsObject], Some(Messages.httpContentTypeError))
}
}
}
}
}
}
it should s"handle http web action with base64 encoded known '+json' response (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> JsObject("content-type" -> "application/json-patch+json".toJson),
webApiDirectives.statusCode -> OK.intValue.toJson,
"body" -> Base64.getEncoder.encodeToString {
JsObject("field" -> "value".toJson).compactPrint.getBytes
}.toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
mediaType.value shouldBe "application/json-patch+json"
responseAs[String].parseJson shouldBe JsObject("field" -> "value".toJson)
}
}
}
}
it should s"handle http web action for known '+json' response (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(
(JsObject("content-type" -> "application/json-patch+json".toJson), OK),
(JsObject("content-type" -> "text/html".toJson), BadRequest)).foreach {
case (headers, expectedCode) =>
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> headers,
webApiDirectives.statusCode -> OK.intValue.toJson,
"body" -> JsObject("field" -> "value".toJson)))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(expectedCode)
if (expectedCode == OK) {
mediaType.value shouldBe "application/json-patch+json"
responseAs[String].parseJson shouldBe JsObject("field" -> "value".toJson)
} else {
confirmErrorWithTid(responseAs[JsObject], Some(Messages.httpContentTypeError))
}
}
}
}
}
}
it should s"handle http web action for unknown '+json' response (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(
(JsObject("content-type" -> "application/hal+json".toJson), OK),
(JsObject("content-type" -> "text/html".toJson), BadRequest)).foreach {
case (headers, expectedCode) =>
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> headers,
webApiDirectives.statusCode -> OK.intValue.toJson,
"body" -> JsObject("field" -> "value".toJson)))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(expectedCode)
if (expectedCode == OK) {
mediaType.value shouldBe "application/hal+json"
responseAs[String].parseJson shouldBe JsObject("field" -> "value".toJson)
} else {
confirmErrorWithTid(responseAs[JsObject], Some(Messages.httpContentTypeError))
}
}
}
}
}
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(JsObject(webApiDirectives.statusCode -> OK.intValue.toJson, "body" -> JsNumber(3)))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
header("content-type").map(_.toString shouldBe "content-type: application/json")
responseAs[String].toInt shouldBe 3
}
}
}
}
it should s"handle http web action with base64 encoded binary response (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val expectedEntity = HttpEntity(ContentType(MediaTypes.`image/png`), Base64.getDecoder().decode(pngSample))
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> JsObject(`Content-Type`.lowercaseName -> MediaTypes.`image/png`.toString.toJson),
"body" -> pngSample.toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
response.entity shouldBe expectedEntity
}
}
}
}
it should s"handle http web action with html/text response (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult =
Some(JsObject(webApiDirectives.statusCode -> OK.intValue.toJson, "body" -> "hello world".toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
responseAs[String] shouldBe "hello world"
}
}
}
}
it should s"allow web action with incorrect application/json header and text response (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> JsObject("content-type" -> "application/json".toJson),
webApiDirectives.statusCode -> OK.intValue.toJson,
"body" -> "hello world".toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
mediaType shouldBe MediaTypes.`application/json`
headers.size shouldBe 1
headers.exists(_.is(ActivationIdHeader)) should be(true)
responseAs[String] shouldBe "hello world"
}
}
}
}
it should s"reject http web action with invalid content-type header (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"headers" -> JsObject("content-type" -> "xyzbar".toJson),
webApiDirectives.statusCode -> OK.intValue.toJson,
"body" -> "hello world".toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
confirmErrorWithTid(responseAs[JsObject], Some(Messages.httpUnknownContentType))
}
}
}
}
it should s"handle an activation that results in application error (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(
JsObject(
"application_error" -> JsObject(
webApiDirectives.statusCode -> OK.intValue.toJson,
"body" -> "no hello for you".toJson)))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
responseAs[String] shouldBe "no hello for you"
}
}
}
}
it should s"handle an activation that results in application error that does not match .json extension (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.json").foreach { path =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(JsObject("application_error" -> "bad response type".toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
confirmErrorWithTid(responseAs[JsObject], Some(Messages.invalidMedia(MediaTypes.`application/json`)))
}
}
}
}
it should s"handle an activation that results in developer or system error (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.json", s"$systemId/proxy/export_c.text")
.foreach { path =>
Seq("developer_error", "whisk_error").foreach { e =>
allowedMethods.foreach { m =>
invocationsAllowed += 1
actionResult = Some(JsObject(e -> "bad response type".toJson))
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
if (e == "application_error") {
confirmErrorWithTid(responseAs[JsObject], Some(Messages.invalidMedia(MediaTypes.`application/json`)))
} else {
confirmErrorWithTid(responseAs[JsObject], Some(Messages.errorProcessingRequest))
}
}
}
}
}
}
it should s"support formdata (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.json").foreach { path =>
val form = FormData(Map("field1" -> "value1", "field2" -> "value2"))
invocationsAllowed += 1
Post(s"$testRoutePath/$path", form.toEntity) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
responseAs[JsObject].fields("content").asJsObject.fields("field1") shouldBe JsString("value1")
responseAs[JsObject].fields("content").asJsObject.fields("field2") shouldBe JsString("value2")
}
}
}
it should s"reject requests when entity size exceeds allowed limit (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.json").foreach { path =>
val largeEntity = "a" * (allowedActivationEntitySize.toInt + 1)
val content = s"""{"a":"$largeEntity"}"""
Post(s"$testRoutePath/$path", content.parseJson.asJsObject) ~> Route.seal(routes(creds)) ~> check {
status should be(PayloadTooLarge)
val expectedErrorMsg = Messages.entityTooBig(
SizeError(fieldDescriptionForSizeError, (largeEntity.length + 8).B, allowedActivationEntitySize.B))
confirmErrorWithTid(responseAs[JsObject], Some(expectedErrorMsg))
}
val form = FormData(Map("a" -> largeEntity))
Post(s"$testRoutePath/$path", form) ~> Route.seal(routes(creds)) ~> check {
status should be(PayloadTooLarge)
val expectedErrorMsg = Messages.entityTooBig(
SizeError(fieldDescriptionForSizeError, (largeEntity.length + 2).B, allowedActivationEntitySize.B))
confirmErrorWithTid(responseAs[JsObject], Some(expectedErrorMsg))
}
}
}
it should s"reject unknown extensions (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(
s"$systemId/proxy/export_c.xyz",
s"$systemId/proxy/export_c.xyz/",
s"$systemId/proxy/export_c.xyz/content",
s"$systemId/proxy/export_c.xyzz",
s"$systemId/proxy/export_c.xyzz/",
s"$systemId/proxy/export_c.xyzz/content").foreach { path =>
allowedMethods.foreach { m =>
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
if (webApiDirectives.enforceExtension) {
status should be(NotAcceptable)
confirmErrorWithTid(
responseAs[JsObject],
Some(Messages.contentTypeExtensionNotSupported(WhiskWebActionsApi.allowedExtensions)))
} else {
status should be(NotFound)
}
}
}
}
}
it should s"reject request that tries to override reserved properties (auth? ${creds.isDefined})" in {
implicit val tid = transid()
allowedMethodsWithEntity.foreach { m =>
webApiDirectives.reservedProperties.foreach { p =>
m(s"$testRoutePath/$systemId/proxy/export_c.json?$p=YYY") ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[ErrorResponse].error shouldBe Messages.parametersNotAllowed
}
m(s"$testRoutePath/$systemId/proxy/export_c.json", JsObject(p -> "YYY".toJson)) ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[ErrorResponse].error shouldBe Messages.parametersNotAllowed
}
}
}
}
it should s"reject request that tries to override final parameters (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val contentX = JsObject("x" -> "overriden".toJson)
val contentZ = JsObject("z" -> "overriden".toJson)
allowedMethodsWithEntity.foreach { m =>
invocationsAllowed += 1
m(s"$testRoutePath/$systemId/proxy/export_c.json?x=overriden") ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[ErrorResponse].error shouldBe Messages.parametersNotAllowed
}
m(s"$testRoutePath/$systemId/proxy/export_c.json?y=overriden") ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[ErrorResponse].error shouldBe Messages.parametersNotAllowed
}
m(s"$testRoutePath/$systemId/proxy/export_c.json", contentX) ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[ErrorResponse].error shouldBe Messages.parametersNotAllowed
}
m(s"$testRoutePath/$systemId/proxy/export_c.json?y=overriden", contentZ) ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
responseAs[ErrorResponse].error shouldBe Messages.parametersNotAllowed
}
m(s"$testRoutePath/$systemId/proxy/export_c.json?empty=overriden") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
m.method.name.toLowerCase,
Map("empty" -> "overriden").toJson.asJsObject,
creds,
pkgName = "proxy"))
}
}
}
it should s"inline body when receiving entity that is not a JsObject (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val str = "1,2,3"
invocationsAllowed = 3
Post(s"$testRoutePath/$systemId/proxy/export_c.json", HttpEntity(ContentTypes.`text/html(UTF-8)`, str)) ~> Route
.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
Post.method.name.toLowerCase,
JsObject(webApiDirectives.body -> str.toJson),
creds,
pkgName = "proxy",
headers = List(`Content-Type`(ContentTypes.`text/html(UTF-8)`))))
}
Post(s"$testRoutePath/$systemId/proxy/export_c.json?a=b&c=d") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
Post.method.name.toLowerCase,
Map("a" -> "b", "c" -> "d").toJson.asJsObject,
creds,
pkgName = "proxy"))
}
Post(s"$testRoutePath/$systemId/proxy/export_c.json?a=b&c=d", JsObject.empty) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
Post.method.name.toLowerCase,
Map("a" -> "b", "c" -> "d").toJson.asJsObject,
creds,
pkgName = "proxy",
headers = List(`Content-Type`(ContentTypes.`application/json`))))
}
}
it should s"throttle subject owning namespace for web action (auth? ${creds.isDefined})" in {
implicit val tid = transid()
// this should fail for exceeding quota
Seq(s"$systemId/proxy/export_c.text/content/z").foreach { path =>
allowedMethods.foreach { m =>
failThrottleForSubject = Some(systemId)
m(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(TooManyRequests)
confirmErrorWithTid(responseAs[JsObject], Some(Messages.tooManyRequests(2, 1)))
}
failThrottleForSubject = None
}
}
}
it should s"respond with custom options (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
invocationsAllowed += 1 // custom options means action is invoked
actionResult =
Some(JsObject("headers" -> JsObject("Access-Control-Allow-Methods" -> "OPTIONS, GET, PATCH".toJson)))
// the added headers should be ignored
Options(s"$testRoutePath/$path") ~> addHeader(`Access-Control-Request-Headers`("x-custom-header")) ~> Route
.seal(routes(creds)) ~> check {
header("Access-Control-Allow-Origin") shouldBe empty
header("Access-Control-Allow-Methods").get.toString shouldBe "Access-Control-Allow-Methods: OPTIONS, GET, PATCH"
header("Access-Control-Request-Headers") shouldBe empty
}
}
}
it should s"respond with custom options even when authentication is required but missing (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val entityName = MakeName.next("export")
val action =
stubAction(
proxyNamespace,
entityName,
customOptions = true,
requireAuthentication = true,
requireAuthenticationAsBoolean = true)
val path = action.fullyQualifiedName(false)
put(entityStore, action)
invocationsAllowed += 1 // custom options means action is invoked
actionResult =
Some(JsObject("headers" -> JsObject("Access-Control-Allow-Methods" -> "OPTIONS, GET, PATCH".toJson)))
// the added headers should be ignored
Options(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
header("Access-Control-Allow-Origin") shouldBe empty
header("Access-Control-Allow-Methods").get.toString shouldBe "Access-Control-Allow-Methods: OPTIONS, GET, PATCH"
header("Access-Control-Request-Headers") shouldBe empty
}
}
it should s"support multiple values for headers (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
invocationsAllowed += 1
actionResult =
Some(JsObject("headers" -> JsObject("Set-Cookie" -> JsArray(JsString("a=b"), JsString("c=d; Path = /")))))
Options(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
headers should contain allOf (RawHeader("Set-Cookie", "a=b"),
RawHeader("Set-Cookie", "c=d; Path = /"))
}
}
}
it should s"invoke action and respond with default options headers (auth? ${creds.isDefined})" in {
implicit val tid = transid()
put(entityStore, stubAction(proxyNamespace, EntityName("export_without_custom_options"), false))
Seq(s"$systemId/proxy/export_without_custom_options.http", s"$systemId/proxy/export_without_custom_options.json")
.foreach { path =>
Seq(`Access-Control-Request-Headers`("x-custom-header"), RawHeader("x-custom-header", "value")).foreach {
testHeader =>
allowedMethods.foreach { m =>
if (m != Options) invocationsAllowed += 1 // options verb does not invoke an action
m(s"$testRoutePath/$path") ~> addHeader(testHeader) ~> Route.seal(routes(creds)) ~> check {
header("Access-Control-Allow-Origin").get.toString shouldBe "Access-Control-Allow-Origin: *"
header("Access-Control-Allow-Methods").get.toString shouldBe "Access-Control-Allow-Methods: OPTIONS, GET, DELETE, POST, PUT, HEAD, PATCH"
if (testHeader.name == `Access-Control-Request-Headers`.name) {
header("Access-Control-Allow-Headers").get.toString shouldBe "Access-Control-Allow-Headers: x-custom-header"
} else {
header("Access-Control-Allow-Headers").get.toString shouldBe "Access-Control-Allow-Headers: Authorization, Origin, X-Requested-With, Content-Type, Accept, User-Agent"
}
}
}
}
}
}
it should s"invoke action with head verb (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
invocationsAllowed += 1
actionResult = Some(JsObject("headers" -> JsObject("location" -> "http://openwhisk.org".toJson)))
Head(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
header("location").get.toString shouldBe "location: http://openwhisk.org"
}
}
}
it should s"handle html web action with text/xml response (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.html").foreach { path =>
val html = """<html><body>test</body></html>"""
val xml = """<?xml version="1.0" encoding="UTF-8"?><note><from>test</from></note>"""
invocationsAllowed += 2
actionResult = Some(JsObject("html" -> xml.toJson))
Seq((html, MediaTypes.`text/html`), (xml, MediaTypes.`text/html`)).foreach {
case (res, expectedMediaType) =>
actionResult = Some(JsObject("html" -> res.toJson))
Get(s"$testRoutePath/$path") ~> addHeader("Accept", expectedMediaType.value) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
contentType shouldBe ContentTypes.`text/html(UTF-8)`
responseAs[String] shouldBe res
mediaType shouldBe expectedMediaType
}
}
}
}
it should s"not fail a raw http action when query or body parameters overlap with final action parameters (auth? ${creds.isDefined})" in {
implicit val tid = transid()
invocationsAllowed = 2
val queryString = "x=overriden&key2=value2"
Post(s"$testRoutePath/$systemId/proxy/raw_export_c.json?$queryString") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "raw_export_c".toJson,
"content" -> metaPayload(
Post.method.name.toLowerCase,
Map(webApiDirectives.body -> "".toJson, webApiDirectives.query -> queryString.toJson).toJson.asJsObject,
creds,
pkgName = "proxy"))
}
Post(
s"$testRoutePath/$systemId/proxy/raw_export_c.json",
JsObject("x" -> "overriden".toJson, "key2" -> "value2".toJson)) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "raw_export_c".toJson,
"content" -> metaPayload(
Post.method.name.toLowerCase,
Map(webApiDirectives.query -> "".toJson, webApiDirectives.body -> Base64.getEncoder.encodeToString {
JsObject("x" -> JsString("overriden"), "key2" -> JsString("value2")).compactPrint.getBytes
}.toJson).toJson.asJsObject,
creds,
pkgName = "proxy",
headers = List(`Content-Type`(ContentTypes.`application/json`))))
}
}
it should s"invoke raw action ensuring body and query arguments are set properly (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val queryString = "key1=value1&key2=value2"
Seq(
"1,2,3",
JsObject("a" -> "A".toJson, "b" -> "B".toJson).prettyPrint,
JsObject("a" -> "A".toJson, "b" -> "B".toJson).compactPrint).foreach { str =>
Post(
s"$testRoutePath/$systemId/proxy/raw_export_c.json?$queryString",
HttpEntity(ContentTypes.`application/json`, str)) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
invocationsAllowed += 1
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "raw_export_c".toJson,
"content" -> metaPayload(
Post.method.name.toLowerCase,
Map(webApiDirectives.body -> Base64.getEncoder.encodeToString {
str.getBytes
}.toJson, webApiDirectives.query -> queryString.toJson).toJson.asJsObject,
creds,
pkgName = "proxy",
headers = List(`Content-Type`(ContentTypes.`application/json`))))
}
}
}
it should s"invoke raw action ensuring body and query arguments are empty strings when not specified in request (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Post(s"$testRoutePath/$systemId/proxy/raw_export_c.json") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
invocationsAllowed += 1
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "raw_export_c".toJson,
"content" -> metaPayload(
Post.method.name.toLowerCase,
Map(webApiDirectives.body -> "".toJson, webApiDirectives.query -> "".toJson).toJson.asJsObject,
creds,
pkgName = "proxy"))
}
}
it should s"reject invocation of web action with invalid accept header (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
actionResult = Some(JsObject("body" -> "Plain text".toJson))
invocationsAllowed += 1
Get(s"$testRoutePath/$path") ~> addHeader("Accept", "application/json") ~> Route.seal(routes(creds)) ~> check {
status should be(NotAcceptable)
response shouldBe HttpResponse(
NotAcceptable,
entity = "Resource representation is only available with these types:\\ntext/html; charset=UTF-8")
}
}
}
it should s"reject invocation of web action which has no entitlement (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.http").foreach { path =>
actionResult = Some(JsObject("body" -> "Plain text".toJson))
failCheckEntitlement = true
Get(s"$testRoutePath/$path") ~> Route.seal(routes(creds)) ~> check {
status should be(Forbidden)
}
}
}
it should s"not invoke an action more than once when determining entity type (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq(s"$systemId/proxy/export_c.html").foreach { path =>
val html = """<html><body>test</body></html>"""
val xml = """<?xml version="1.0" encoding="UTF-8"?><note><from>test</from></note>"""
invocationsAllowed += 1
actionResult = Some(JsObject("html" -> xml.toJson))
Get(s"$testRoutePath/$path") ~> addHeader("Accept", MediaTypes.`text/xml`.value) ~> Route.seal(routes(creds)) ~> check {
status should be(NotAcceptable)
}
}
withClue(s"allowed invoke count did not match actual") {
invocationsAllowed shouldBe invocationCount
}
}
it should s"invoke web action ensuring JSON value body arguments are received as is (auth? ${creds.isDefined})" in {
implicit val tid = transid()
Seq("this is a string".toJson, JsArray(1.toJson, "str str".toJson, false.toJson), true.toJson, 99.toJson)
.foreach { str =>
invocationsAllowed += 1
Post(
s"$testRoutePath/$systemId/proxy/export_c.json",
HttpEntity(ContentTypes.`application/json`, str.compactPrint)) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
Post.method.name.toLowerCase,
Map(webApiDirectives.body -> str).toJson.asJsObject,
creds,
pkgName = "proxy",
headers = List(`Content-Type`(ContentTypes.`application/json`))))
}
}
}
it should s"invoke web action ensuring binary body is base64 encoded (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val entity = HttpEntity(ContentType(MediaTypes.`image/png`), Base64.getDecoder().decode(pngSample))
invocationsAllowed += 1
Post(s"$testRoutePath/$systemId/proxy/export_c.json", entity) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
val response = responseAs[JsObject]
response shouldBe JsObject(
"pkg" -> s"$systemId/proxy".toJson,
"action" -> "export_c".toJson,
"content" -> metaPayload(
Post.method.name.toLowerCase,
Map(webApiDirectives.body -> pngSample.toJson).toJson.asJsObject,
creds,
pkgName = "proxy",
headers = List(RawHeader(`Content-Type`.lowercaseName, MediaTypes.`image/png`.toString))))
}
}
it should s"allowed string based status code (auth? ${creds.isDefined})" in {
implicit val tid = transid()
invocationsAllowed += 2
actionResult = Some(JsObject(webApiDirectives.statusCode -> JsString("200")))
Head(s"$testRoutePath/$systemId/proxy/export_c.http") ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
}
actionResult = Some(JsObject(webApiDirectives.statusCode -> JsString("xyz")))
Head(s"$testRoutePath/$systemId/proxy/export_c.http") ~> Route.seal(routes(creds)) ~> check {
status should be(BadRequest)
}
}
it should s"support json (including +json subtypes) (auth? ${creds.isDefined})" in {
implicit val tid = transid()
val path = s"$systemId/proxy/export_c.json"
val entity = JsObject("field1" -> "value1".toJson)
Seq(
ContentType(MediaType.applicationWithFixedCharset("cloudevents+json", HttpCharsets.`UTF-8`)),
ContentTypes.`application/json`).foreach { ct =>
invocationsAllowed += 1
Post(s"$testRoutePath/$path", HttpEntity(ct, entity.compactPrint)) ~> Route.seal(routes(creds)) ~> check {
status should be(OK)
responseAs[JsObject].fields("content").asJsObject.fields("field1") shouldBe entity.fields("field1")
}
}
}
}
class TestingEntitlementProvider(config: WhiskConfig, loadBalancer: LoadBalancer)
extends EntitlementProvider(config, loadBalancer, ControllerInstanceId("0")) {
// The check method checks both throttle and entitlement.
protected[core] override def check(user: Identity, right: Privilege, resource: Resource)(
implicit transid: TransactionId): Future[Unit] = {
val subject = user.subject
// first, check entitlement
if (failCheckEntitlement) {
Future.failed(RejectRequest(Forbidden))
} else {
// then, check throttle
logging.debug(this, s"test throttle is checking user '$subject' has not exceeded activation quota")
failThrottleForSubject match {
case Some(subject) if subject == user.subject =>
Future.failed(RejectRequest(TooManyRequests, Messages.tooManyRequests(2, 1)))
case _ => Future.successful({})
}
}
}
protected[core] override def grant(user: Identity, right: Privilege, resource: Resource)(
implicit transid: TransactionId) = ???
/** Revokes subject right to resource by removing them from the entitlement matrix. */
protected[core] override def revoke(user: Identity, right: Privilege, resource: Resource)(
implicit transid: TransactionId) = ???
/** Checks if subject has explicit grant for a resource. */
protected override def entitled(user: Identity, right: Privilege, resource: Resource)(
implicit transid: TransactionId) = ???
}
}
| jeremiaswerner/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/controller/test/WebActionsApiTests.scala | Scala | apache-2.0 | 82,272 |
/*
* Copyright 2015 LG CNS.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.netio.service.handle;
import java.io.IOException
import java.util.TimeZone
import scouter.Version
import scouter.lang.Account
import scouter.lang.pack.MapPack
import scouter.lang.value.ListValue
import scouter.lang.value.MapValue
import scouter.io.DataInputX
import scouter.io.DataOutputX
import scouter.net.RequestCmd
import scouter.net.TcpFlag
import scouter.server.Configure
import scouter.server.LoginManager
import scouter.server.LoginUser
import scouter.server.account.AccountManager
import scouter.server.netio.service.anotation.ServiceHandler;
import scouter.util.ArrayUtil
class LoginService {
@ServiceHandler(RequestCmd.LOGIN)
def login(din: DataInputX, dout: DataOutputX, login: Boolean) {
val m = din.readMapPack();
val id = m.getText("id");
val passwd = m.getText("pass");
val ip = m.getText("ip");
val name = m.getText("hostname");
val clientVer = m.getText("version");
val session = LoginManager.login(id, passwd, ip);
m.put("session", session);
if (session == 0) {
m.put("error", "login fail");
} else {
val user = LoginManager.getUser(session);
user.hostname = name;
user.version = clientVer;
m.put("time", System.currentTimeMillis());
m.put("hostname", getHostName());
m.put("type", user.group);
m.put("version", Version.getServerFullVersion());
val acc = AccountManager.getAccount(id);
if (acc != null) {
m.put("email", acc.email);
}
m.put("timezone", TimeZone.getDefault().getDisplayName());
val mv = AccountManager.getGroupPolicy(user.group);
if (mv != null) {
m.put("policy", mv);
}
}
dout.writeByte(TcpFlag.HasNEXT);
dout.writePack(m);
}
@ServiceHandler(RequestCmd.GET_LOGIN_LIST)
def getLoginList(din: DataInputX, dout: DataOutputX, login: Boolean) {
val users = LoginManager.getLoginUserList()
if (ArrayUtil.len(users) > 0) {
val result = new MapPack();
val sessionLv = result.newList("session");
val userLv = result.newList("user");
val ipLv = result.newList("ip");
val loginTimeLv = result.newList("logintime");
val versioneLv = result.newList("ver");
val hostnameLv = result.newList("host");
for (usr <- users) {
sessionLv.add(usr.session);
userLv.add(usr.id);
ipLv.add(usr.ip);
loginTimeLv.add((System.currentTimeMillis() - usr.logintime) / 1000L);
versioneLv.add(usr.version);
hostnameLv.add(usr.hostname);
}
dout.writeByte(TcpFlag.HasNEXT);
dout.writePack(result);
}
}
@ServiceHandler(RequestCmd.CHECK_SESSION)
def getSessionCheck(din: DataInputX, dout: DataOutputX, login: Boolean) {
val m = din.readMapPack();
val session = m.getLong("session");
val validSession = LoginManager.validSession(session);
m.put("validSession", validSession);
if (validSession == 0) {
m.put("error", "login fail");
}
dout.writeByte(TcpFlag.HasNEXT);
dout.writePack(m);
}
def getHostName(): String = {
Configure.getInstance().hostname;
}
} | jahnaviancha/scouter | scouter.server/src/scouter/server/netio/service/handle/LoginService.scala | Scala | apache-2.0 | 3,829 |
package common.graphql
import akka.actor.{ActorRef, ActorSystem}
import akka.stream.ActorMaterializer
import common.actors.{ActorMessageDelivering, Dispatcher}
import common.ActorNamed
import common.actors.Dispatcher.DispatcherMessage
import core.guice.injection.GuiceActorRefProvider
import scala.concurrent.Future
object DispatcherResolver extends ActorMessageDelivering with GuiceActorRefProvider {
def resolveWithDispatcher[T](
input: Any,
userContext: UserContext,
namedResolverActor: ActorNamed,
before: List[ActorRef] = Nil,
after: List[ActorRef] = Nil
)(implicit actorSystem: ActorSystem, materializer: ActorMaterializer): Future[T] = {
sendMessageWithFunc[T] {
replyTo =>
provideActorRef(Dispatcher) ! DispatcherMessage(
input,
userContext,
replyTo,
provideActorRef(namedResolverActor),
before,
after
)
}
}
}
| sysgears/apollo-universal-starter-kit | modules/core/server-scala/src/main/scala/common/graphql/DispatcherResolver.scala | Scala | mit | 949 |
/*
* @author Philip Stutz
* @author Mihaela Verman
*
* Copyright 2013 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.dcop.graph
import com.signalcollect._
import com.signalcollect.dcop.modules._
import com.signalcollect.dcop.impl._
class RankedDcopEdge[Id, Action, UtilityType](targetId: Id) extends DefaultEdge(targetId) {
type Source = RankedDcopVertex[Id, Action, UtilityType]
def signal = {
val sourceState = source.state
val sourceStateAssignment = source.state.centralVariableAssignment
(sourceStateAssignment._2, sourceState.ranks(sourceStateAssignment._1) / source.edgeCount)
}
}
/**
* A Ranked Dcop vertex. Its state is composed by its action and its rank.ß
*
* @param id The Vertex Id
* @param domain The variable Domain
* @param optimizer The optimizer used
* @param initialState Initial state of the vertex
* @param debug Boolean idicating if there should be any printlines
* @param convergeByEntireState Boolean indicating if the algorithm stops when the entire state or only the action stabilizes.
*/
class RankedDcopVertex[Id, Action, UtilityType](
override val optimizer: Optimizer[Id, Action, RankedConfig[Id, Action], UtilityType],
initialState: RankedConfig[Id, Action],
baseRank: Double = 0.15,
debug: Boolean = false,
eps: Double = 0.00000001,
convergeByEntireState: Boolean = true)
extends DcopVertex[Id, Action, RankedConfig[Id, Action], UtilityType](optimizer, initialState, debug) {
//Initialize (initialAction, baseRank: Double = 0.15,)
type Signal = (Action, Double)
def currentConfig: RankedConfig[Id, Action] = {
val neighborhoodSignalMap = (mostRecentSignalMap.toMap).
asInstanceOf[Map[Id, (Action, Double)]]
val neighborhoodAssignments = neighborhoodSignalMap.
map(tuple => (tuple._1, tuple._2._1)).toMap
val neighborhoodRanks: Map[Id, Double] = neighborhoodSignalMap.
map(tuple => (tuple._1, tuple._2._2)).toMap
// val ranks = neighborhoodRanks + ((id, state._2))
val oldRanks = neighborhoodRanks + ((id, state.ranks(id)))
val oldC = RankedConfig(neighborhoodAssignments, state.numberOfCollects, oldRanks, state.domain, state.centralVariableAssignment)
val ranks = neighborhoodRanks + ((id, computeRankForMove(oldC)))
val c = RankedConfig(neighborhoodAssignments, state.numberOfCollects + 1, ranks, state.domain, state.centralVariableAssignment)
c
}
//TODO: Replace with more general.
def computeRankForMove(c: RankedConfig[Id, Action]): Double = {
val allies = c.neighborhood.filter(_._2 != c.centralVariableValue)
val allyRankSum = allies.keys.map(c.ranks).sum
val dampingFactor = 1.0 - baseRank
val newPageRank = baseRank + dampingFactor * allyRankSum
newPageRank
}
def sameMaps(newMap: Map[Id, Double], oldMap: Map[Id, Double]): Boolean = {
for (elem1 <- newMap) {
val inSecondMapValue = oldMap.getOrElse(elem1._1, -1.0)
if (math.abs(elem1._2 - inSecondMapValue) > eps) return false
}
true
}
override def isStateUnchanged(oldConfig: RankedConfig[Id, Action], newConfig: RankedConfig[Id, Action]): Boolean = {
(oldConfig.centralVariableAssignment == newConfig.centralVariableAssignment) &&
(oldConfig.neighborhood == newConfig.neighborhood) &&
sameMaps(oldConfig.ranks, newConfig.ranks)
}
override def collect = {
val c = currentConfig
if (optimizer.shouldConsiderMove(c)) {
changeMove(c)
} else {
if (debug) {
if (isConverged(c)) {
println(s"Vertex $id has converged and stays at move $c.")
} else {
println(s"Vertex $id still NOT converged, stays at move, and has $c.")
}
}
c
}
}
}
| elaverman/dcop-algorithms-old | src/main/scala/com/signalcollect/dcop/graph/RankedDcopVertex.scala | Scala | apache-2.0 | 4,297 |
package uk.co.morleydev.zander.client.controller.impl
import uk.co.morleydev.zander.client.model.OperationArguments
import uk.co.morleydev.zander.client.service.PurgeProjectArtefacts
import uk.co.morleydev.zander.client.validator.ValidateArtefactDetailsExistence
import uk.co.morleydev.zander.client.controller.Controller
class PurgeController(validateArtefactDetailsExists : ValidateArtefactDetailsExistence,
purge : PurgeProjectArtefacts) extends Controller {
override def apply(args : OperationArguments): Unit = {
validateArtefactDetailsExists(args.project, args.compiler, args.mode)
purge(args.project, args.compiler, args.mode)
}
}
| MorleyDev/zander.client | src/main/scala/uk/co/morleydev/zander/client/controller/impl/PurgeController.scala | Scala | mit | 674 |
package memnets.core.impl
import memnets.core._
import memnets.linalg.WMatrix
import memnets.ml._
import memnets.model._
import memnets.ui._
import scala.collection.mutable._
private[core] class ModelImpl(val builder: ModelBuilder, val config: ModelConfig) extends BuiltModel with Logging {
// hints override config for doubles
val numberType = config.numberTypeWithHints()
private val _system = DynamicSystem(WMatrix(numberType = numberType))
_system.tau = config.tauWithHints()
_system.game = builder.modelType == ModelType.Game // do b4 body...
name = builder.name // set after in case model swapped in body
logger.debug(s"numberType: $numberType")
logger.debug(f"tau: ${system.tau}%.1f")
private var _engineListener: EngineListener = NULL_ENGINE_LISTENER
private var _phasePlot: PhasePlot = _
private var _usesData = false
private var _usesFile = false
private var _usesSizeScale = false
val controls = ArrayBuffer[GameControl]()
val equation = ArrayBuffer[Y]()
val tracked = ArrayBuffer[Trackable]()
val skins = ListBuffer[SkinType]()
val trials = ArrayBuffer[Trial]()
/** useful for testing final results or for automating trial */
var validator: TickListener = NULL_TICK_LISTENER
implicit def system: DynamicSystem = _system
implicit def thisModel: Model = this
implicit def lastTrial: Trial = {
if (trials.isEmpty) Trial()
trials.last
}
implicit def phasePlot: PhasePlot = {
if (null == _phasePlot) _phasePlot = new PhasePlot()(system)
_phasePlot
}
implicit def skinFactory: SF = config.skinFactory
implicit def workingDir = config.workingDir
/** only valid in a ticklistener */
def currentTrial = trials(system.now.trialIndex)
def fileHint(default: String): String = {
_usesFile = true
config.fileHint.getOrElseP(default)
}
def sizeHint(default: Int = 8, min: Int = 2): Int = {
_usesSizeScale = true
val size = Math.max(min, default * config.sizeScale).toInt
logger.debug(s"sizeHint: size = $size")
size
}
def loadData(): Data = {
_usesData = true
config.data.getOrElse {
var ds = DataSources.dataSourcesModel.getSelectedItem
if (ds == null)
ds = DataSources.dataSourcesModel.getItems.head
ds.data()
}
}
def usesData = _usesData
def usesFile = _usesFile
def usesSizeScale = _usesSizeScale
def logr = logger
def onEvent: EngineListener = _engineListener
def onEvent_=(e: EngineListener) = {
if (e != null)
_engineListener = e
}
def skin: Option[SkinType] = skins.headOption
/** NOTE: this will ADD to list, not replace anything. wanted simple script call */
def skin_=(sk: SkinType): Unit = {
if (null != sk)
skins += sk
else
logger.warn("null skin")
}
def destroy(): Unit = {
trials.clear()
skins.clear()
_engineListener = NULL_ENGINE_LISTENER
validator = NULL_TICK_LISTENER
controls.clear()
equation.clear()
tracked.clear()
}
def gameLost(): Unit = currentTrial.forceLose = true
}
| MemoryNetworks/memnets | api/src/main/scala/memnets/core/impl/ModelImpl.scala | Scala | apache-2.0 | 3,041 |
package org.jetbrains.plugins.scala
package lang
package psi
package impl
package expr
import com.intellij.lang.ASTNode
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.types.api.Nothing
import org.jetbrains.plugins.scala.lang.psi.types.result._
/**
* @author Alexander Podkhalyuzin, ilyas
*/
class ScThrowImpl(node: ASTNode) extends ScExpressionImplBase(node) with ScThrow {
protected override def innerType: TypeResult = Right(Nothing)
override def toString: String = "ThrowStatement"
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScThrowImpl.scala | Scala | apache-2.0 | 549 |
package com.shocktrade.common.models.post
import com.shocktrade.common.models.user.UserLike
import scala.scalajs.js
import scala.scalajs.js.annotation.ScalaJSDefined
/**
* Represents a reply
* @author [email protected]
* @see [[ReplyLikes]]
*/
class Reply extends js.Object {
var _id: js.UndefOr[String] = js.undefined
var text: js.UndefOr[String] = js.undefined
var submitter: js.UndefOr[UserLike] = js.undefined
var creationTime: js.UndefOr[js.Date] = js.undefined
var lastUpdateTime: js.UndefOr[js.Date] = js.undefined
// UI-only indicators
var likeLoading: js.UndefOr[Boolean] = js.undefined
}
/**
* Reply Companion
* @author [email protected]
*/
object Reply {
def apply(text: String, submitter: UserLike): Reply = {
val reply = new Reply()
reply.text = text
reply.submitter = submitter
reply.creationTime = new js.Date()
reply
}
} | ldaniels528/shocktrade.js | app/shared/common/src/main/scala/com/shocktrade/common/models/post/Reply.scala | Scala | apache-2.0 | 913 |
class A { def m() { var x = new 0; } }
object Main { def main(args: Array[String]) { } }
| tobast/compil-petitscala | tests/syntax/bad/testfile-expr8-1.scala | Scala | gpl-3.0 | 89 |
package features.implicits
// Example 1: TypeClass
object TypeClass{
def lessThan[A: Ordering](a: A, b: A): Boolean = implicitly[Ordering[A]].lt(a, b)
implicit val intOrdering = new Ordering[Int]{
def compare(a: Int, b:Int): Int = a - b
}
val a = 10
val b = 12
val lesser = if(lessThan(a, b)) a else b
}
// Example 2: Class Extension
object ClassExtension{
implicit class HexableString(s: String) {
def asHexVal: Seq[String] = s map { c =>
f"0x$c%02x"
}
}
// Pimp
val haxVal = "implicit".asHexVal
}
object InternalDsls{
implicit class Recoverable[A](f: =>A){
def recover(g: Throwable => A): A =
try{
f
} catch {
case t: Throwable => g(t)
}
}
def thisThrows(): Int = throw new Exception("Argh!")
val stable = thisThrows() recover { t =>
if(t.getMessage == "Argh!") {
10
}else{
5
}
}
}
/**
* Other Usages:
* - Decluttering code
*
*
*/ | adilakhter/scalaznoob | src/main/scala/features/implicits/Implicits.scala | Scala | apache-2.0 | 963 |
/*
* @author Francisco de Freitas
*
* Copyright 2011 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.examples
import com.signalcollect._
/**
* Signal/Collect implementation of finding Hamiltonian paths in graphs.
*/
object Hamiltonian extends App {
val graph = GraphBuilder.build
/**
* Still need to test performance on complete and larger graphs
*/
graph.addVertex(new HamiltonianVertex("a", Map(List("a") -> 0)))
graph.addVertex(new HamiltonianVertex("b", Map(List("b") -> 0)))
graph.addVertex(new HamiltonianVertex("c", Map(List("c") -> 0)))
graph.addVertex(new HamiltonianVertex("d", Map(List("d") -> 0)))
graph.addVertex(new HamiltonianVertex("e", Map(List("e") -> 0)))
graph.addEdge("a", new HamiltonianEdge("d", 3))
graph.addEdge("d", new HamiltonianEdge("a", 3))
graph.addEdge("a", new HamiltonianEdge("b", 1))
graph.addEdge("b", new HamiltonianEdge("a", 1))
graph.addEdge("d", new HamiltonianEdge("b", 2))
graph.addEdge("b", new HamiltonianEdge("d", 2))
graph.addEdge("d", new HamiltonianEdge("c", 1))
graph.addEdge("c", new HamiltonianEdge("d", 1))
graph.addEdge("b", new HamiltonianEdge("c", 1))
graph.addEdge("c", new HamiltonianEdge("b", 1))
// a problem with isolated vertices is that it is not able to find hamiltonian paths depending on the starting vertex
graph.addEdge("e", new HamiltonianEdge("a", 1))
graph.addEdge("a", new HamiltonianEdge("e", 1))
val stats = graph.execute
println(stats)
graph.foreachVertex(println(_))
graph.shutdown
}
/**
* The state of a vertex is all the paths currently collected from the graph
* Each path will be kept such that there will be no "revisiting" of vertices (each path will not have a repeated vertex id)
* Implementation is rather inefficient since it keeps a map where the value is the weights sum and keys as lists
*
* IMPORTANT CONSTRAINTS: This algorithm is ONLY correct if the graph is bidirectional and has no "dangling" vertices
*
*/
class HamiltonianVertex(vertexId: String, initialState: Map[List[String], Int]) extends DataGraphVertex(vertexId, initialState) {
override type Signal = Map[List[String], Int]
/*
* The state will contain all paths visited so far, not mattering the size of the path
*/
def collect: Map[List[String], Int] = {
// consolidate the maps into one map
val pathMap = signals reduceLeft (_ ++ _)
// add signal maps to state as a one map
val newState = List(pathMap, state) reduceLeft (_ ++ _)
newState
}
/*
* Prints the shortest Hamiltonian path from vertex such as if the vertex were the initial one
*/
override def toString = {
val max = (state.keySet).foldLeft(0)((i, s) => i max s.length)
val longests = ((state filter { x => x._1.length == max }))
var min = Int.MaxValue
var key = List("")
for (k <- longests.keySet)
if (longests.get(k).get < min) {
min = longests.get(k).get
key = k
}
"Id: " + id + " | Path: [" + key.mkString("->") + "]=" + min
}
}
/**
* The edge implementation of the signal function will signal to all its connected vertexes the
* current collected paths (ignoring those paths that contain the target vertex) by the source
* vertex in order to determine the hamiltonian paths.
*
* @param w the initial weight of the vertex
*/
class HamiltonianEdge(t: Any, w: Int) extends OnlySignalOnChangeEdge(t) {
override def weight: Double = w
type Source = HamiltonianVertex
def signal = {
// signals only paths that do not contain the target vertex id
((source.state.keySet) filterNot { x => x contains (id.targetId) }).map { k =>
Tuple2(k.::(id.targetId.toString), source.state.get(k).get + weight.toInt)
}.toMap
}
}
| mageru/signal-collect | src/main/scala/com/signalcollect/examples/Hamiltonian.scala | Scala | apache-2.0 | 4,320 |
package org.ai4fm.proofprocess.core.analysis
import org.ai4fm.proofprocess.Term
/**
* A trait to signal that the term can be matched using `==`.
*
* This is a type-based solution to ensure that terms are matched correctly.
*
* Note that EMF does not use structural equality itself, so this trait requires
* bridging from EMF to structural equality.
*
* @author Andrius Velykis
*/
trait EqTerm extends Eq {
def term: Term
}
| andriusvelykis/proofprocess | org.ai4fm.proofprocess.core/src/org/ai4fm/proofprocess/core/analysis/EqTerm.scala | Scala | epl-1.0 | 444 |
/**
* ScaBa - A Scala implementation for Discrete Baysian (Belief) Networks
*
* (c) 2015 Jan Charles Lenk
*
* Licensed under LGPL
*
*
* @author [email protected]
*/
package scaba.example
import scaba._
import scaba.inference._
object Test extends App {
test()
def profile[T]( f: => T):(T,Long) = {
val t0 = System.nanoTime()
val r = f
val t1 = System.nanoTime()
(r, (t1-t0)/1000)
}
def meandAndSD(vs:List[Int]):Tuple4[Double,Double,Double,Int] = {
val mean = vs.sum.toDouble/vs.length
val sd = Math.sqrt( ((vs.map(v => (mean - v)*(mean - v))).sum)/vs.length)
val sorted = vs.sorted
(mean,sd,vs(vs.length/2),vs.length)
}
def test( al:InferenceAlg, bn:BBN, query:InferenceQuery, repetitions: Int):QueryResult = {
implicit val bbn = bn
implicit val alg = al
val results = for (i<-1 to repetitions) yield (
profile { P(query) }
)
val mean = results.map( _._2).sum/results.length
val sd = Math.sqrt( ((results.map(v => (mean - v._2)*(mean - v._2))).sum)/results.length)
(alg.getClass.getSimpleName, results.head._1, mean.toInt , sd.toInt, results.map( _._2.toInt).toList)
}
def test( al:List[InferenceAlg], bn:BBN, query:InferenceQuery, repetitions:Int):AlgoQueryResult = {
val results = for (alg <- al) yield (test(alg, bn, query,repetitions))
val list =results.map(r=>(r._1,r._5)).toMap
val equality = results.forall( _._2==results.head._2)
val ret = (query, results, equality, list)
println(query+"\\n"+results.map(a=>(a._1,a._2,a._3,a._4)).mkString("\\n"))
ret
}
def test2( al:List[InferenceAlg], queries: List[(BBN,InferenceQuery)], repetitions:Int):List[AlgoQueryResult] = {
val results = queries.map( q => test(al, q._1, q._2, repetitions))
val a = al.map( alg => (alg.getClass.getSimpleName, results.flatMap( r=>r._4(alg.getClass.getSimpleName))))
val rrr = a.map( e=>(e._1, meandAndSD(e._2)))
println( rrr.mkString("\\n"))
// var aggregation = al.map(a=> (a.getClass.getSimpleName,List[Int]())).toMap
// val r = results.tail.map( _._2.map(s=>(s._1,s._3)).toMap)
// r.foreach( x =>
// x.foreach( u =>
// aggregation += (u._1 -> (u._2 :: aggregation(u._1)))
// )
// )
// val uu = aggregation.map( a => {
// val sum = a._2.sum
// (a._1, sum/a._2.length)
// }).toMap
//
// println( uu.mkString("\\n"))
results
}
def test() = {
implicit var bbn:BBN = Alarm
val queries = List[(BBN,InferenceQuery)](
(Alarm, 'Burglary | 'MaryCalls<<'True & 'JohnCalls << 'True),
(Alarm, 'Burglary | 'MaryCalls<<'True & 'JohnCalls << 'True),
(Alarm, 'Burglary | 'MaryCalls<<'True ),
(Alarm, 'Earthquake | 'MaryCalls<<'True & 'JohnCalls << 'True),
(Alarm, 'Earthquake | 'MaryCalls<<'True ),
(Alarm, 'MaryCalls | 'Earthquake<<'True),
(Alarm, 'JohnCalls | 'Earthquake<<'True),
(Alarm, 'MaryCalls | 'Earthquake<<'True & 'Burglary << 'False),
(Alarm, 'JohnCalls | 'Earthquake<<'True & 'Burglary << 'False)
)
bbn = Student
val queries2 = List[(BBN,InferenceQuery)](
(Student, 'Letter | 'Intell << 'hi),
(Student, 'Letter | 'Intell << 'lo),
(Student, 'Letter | 'Diff << 'hi & 'SAT << 'hi),
(Student, 'Letter | 'Diff << 'hi & 'SAT << 'lo),
(Student, 'Letter | 'Diff << 'lo & 'SAT << 'hi),
(Student, 'Letter | 'Diff << 'lo & 'SAT << 'lo),
(Student, 'Intell | 'Letter << 'good & 'SAT << 'hi),
(Student, 'Intell | 'Letter << 'bad & 'SAT << 'hi),
(Student, 'Intell | 'Letter << 'good & 'SAT << 'lo),
(Student, 'Intell | 'Letter << 'bad & 'SAT << 'lo)
)
val algs = List[InferenceAlg](
Enumeration,// Elimination,
Elimination)
//test2( algs, queries, 100)
System.out.println("Starting...");
Thread.sleep(100)
val results:List[AlgoQueryResult]= test2( algs, queries ::: queries2, 100000)
}
type QueryResult = (String, List[(Symbol,Double)],Int,Int,List[Int])
type AlgoQueryResult = (InferenceQuery,List[QueryResult], Boolean,Map[String,List[Int]])
} | jachalen/scaba | src/main/scala-2.11/scaba/example/Test.scala | Scala | lgpl-2.1 | 4,214 |
package squants.energy
import squants.mass.{ChemicalAmount, Moles}
import squants.{AbstractQuantityNumeric, Dimension, PrimaryUnit, Quantity, SiUnit, UnitConverter, UnitOfMeasure}
/**
*
* @author Nicolas Vinuesa
* @since 1.4
*
* @param value Double
*/
final class MolarEnergy private (val value: Double, val unit: MolarEnergyUnit)
extends Quantity[MolarEnergy] {
def dimension = MolarEnergy
def *(that: ChemicalAmount): Energy = Joules(this.toJoulesPerMole * that.toMoles)
def toJoulesPerMole = to(JoulesPerMole)
}
object MolarEnergy extends Dimension[MolarEnergy] {
private[energy] def apply[A](n: A, unit: MolarEnergyUnit)(implicit num: Numeric[A]) = new MolarEnergy(num.toDouble(n), unit)
def apply(value: Any) = parse(value)
def name = "MolarEnergy"
def primaryUnit = JoulesPerMole
def siUnit = JoulesPerMole
def units = Set(JoulesPerMole)
}
trait MolarEnergyUnit extends UnitOfMeasure[MolarEnergy] with UnitConverter {
def apply[A](n: A)(implicit num: Numeric[A]) = MolarEnergy(n, this)
}
object JoulesPerMole extends MolarEnergyUnit with PrimaryUnit with SiUnit {
val symbol = Joules.symbol + "/" + Moles.symbol
}
object MolarEnergyConversions {
lazy val joulePerMole = JoulesPerMole(1)
implicit class MolarEnergyConversions[A](n: A)(implicit num: Numeric[A]) {
def joulesPerMole = JoulesPerMole(n)
}
implicit object MolarEnergyNumeric extends AbstractQuantityNumeric[MolarEnergy](MolarEnergy.primaryUnit)
}
| garyKeorkunian/squants | shared/src/main/scala/squants/energy/MolarEnergy.scala | Scala | apache-2.0 | 1,473 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.check.checksum
import io.gatling.core.check.Check
import io.gatling.http.check.HttpCheck
import io.gatling.http.check.HttpCheckScope.Checksum
import io.gatling.http.response.Response
class ChecksumCheck(val algorithm: String, wrapped: Check[Response]) extends HttpCheck(wrapped, Checksum, None)
| timve/gatling | gatling-http/src/main/scala/io/gatling/http/check/checksum/ChecksumCheck.scala | Scala | apache-2.0 | 939 |
/** This file is part of Nexus, which is Copyright 2012 Johannes Åman Pohjola
* Nexus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, version 3.
*
* Nexus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Nexus. If not, see <http://www.gnu.org/licenses/>.
*/
package nexus
case class Move(move:SemiMove,leave:List[DrawTile],draw:List[DrawTile]) {
}
| Sen045/nexus | src/nexus/move.scala | Scala | gpl-3.0 | 745 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf.loaders
import java.nio.ByteOrder
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.nn.ops.{TruncateDiv => TruncateDivOps}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.tf.Context
import org.tensorflow.framework.{DataType, NodeDef}
import scala.reflect.ClassTag
class TruncateDiv extends TensorflowOpsLoader {
import Utils._
override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder, context: Context[T])
(implicit ev: TensorNumeric[T]): Module[T] = {
val t = getType(nodeDef.getAttrMap, "T")
if (t == DataType.DT_INT32) {
TruncateDivOps[T, Int]()
} else {
throw new UnsupportedOperationException(s"Not support load TruncateDiv when type is ${t}")
}
}
}
| wzhongyuan/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/utils/tf/loaders/TruncateDiv.scala | Scala | apache-2.0 | 1,444 |
package fpinscala.iomonad
object IO0 {
/*
Our first attempt at data type for representing computations that
may perform I/O. Has a simple 'interpreter' baked in--the `run`
function, which just returns `Unit`.
*/
trait IO { self =>
def run: Unit
def ++(io: IO): IO = new IO {
def run = { self.run; io.run }
}
}
object IO {
def empty: IO = new IO { def run = () }
}
/*
The API of this `IO` type isn't very useful. Not many operations
(it is only a monoid), and not many laws to help with reasoning. It
is completely _opaque_. Also cannot represent _input_ effects, like
reading from console, for instance:
*/
def fahrenheitToCelsius(f: Double): Double =
(f - 32) * 5.0/9.0
// Ordinary code with side effects
def converter: Unit = {
println("Enter a temperature in degrees Fahrenheit: ")
val d = readLine.toDouble
println(fahrenheitToCelsius(d))
}
// A pure version is not possible!
/*
def converter: IO = {
val prompt: IO = PrintLine("Enter a temperature in degrees fahrenheit: ")
// now what ???
}
*/
}
object IO1 {
/*
We need a way for our `IO` actions to yield a result of some
meaningful type. We do this by adding a type parameter to `IO`,
which now forms a `Monad`.
*/
sealed trait IO[A] { self =>
def run: A
def map[B](f: A => B): IO[B] =
new IO[B] { def run = f(self.run) }
def flatMap[B](f: A => IO[B]): IO[B] =
new IO[B] { def run = f(self.run).run }
}
object IO extends Monad[IO] {
def unit[A](a: => A): IO[A] = new IO[A] { def run = a }
def flatMap[A,B](fa: IO[A])(f: A => IO[B]) = fa flatMap f
def apply[A](a: => A): IO[A] = unit(a) // syntax for IO { .. }
def ref[A](a: A): IO[IORef[A]] = IO { new IORef(a) }
sealed class IORef[A](var value: A) {
def set(a: A): IO[A] = IO { value = a; a }
def get: IO[A] = IO { value }
def modify(f: A => A): IO[A] = get flatMap (a => set(f(a)))
}
}
// We can now express the example
def ReadLine: IO[String] = IO { readLine }
def PrintLine(msg: String): IO[Unit] = IO { println(msg) }
import IO0.fahrenheitToCelsius
def converter: IO[Unit] = for {
_ <- PrintLine("Enter a temperature in degrees Fahrenheit: ")
d <- ReadLine.map(_.toDouble)
_ <- PrintLine(fahrenheitToCelsius(d).toString)
} yield ()
/* Some other examples */
import IO._ // import all the `IO` combinators that come from `Monad`
// An `IO[Unit]` that reads a line from the console and echoes it back.
val echo = ReadLine.flatMap(PrintLine)
// Parses an `Int` by reading a line from the console.
val readInt: IO[Int] = ReadLine.map(_.toInt)
// Parses an `(Int,Int)` by reading two lines from the console.
val readInts: IO[(Int,Int)] = readInt ** readInt
// Repeat `converter` 5 times, discarding the results (which are
// just `Unit`). We can replace `converter` here with any `IO`
// action we wished to repeat 5 times (ex: `echo` or `readInts`).
val prompts: IO[Unit] = replicateM_(5)(converter)
// An `IO[List[String]]` that will read 10 lines from the console and
// return the list of results.
val lines: IO[List[String]] = replicateM(10)(ReadLine)
/*
Larger example using various monadic combinators. Sample run:
The Amazing Factorial REPL, v2.0
q - quit
<number> - compute the factorial of the given number
<anything else> - bomb with horrible error
3
factorial: 6
7
factorial: 5040
q
*/
val helpstring = """
| The Amazing Factorial REPL, v2.0
| q - quit
| <number> - compute the factorial of the given number
| <anything else> - bomb with horrible error
""".trim.stripMargin
def factorial(n: Int): IO[Int] = for {
acc <- ref(1)
_ <- foreachM (1 to n toStream) (i => acc.modify(_ * i).skip)
result <- acc.get
} yield result
val factorialREPL: IO[Unit] = sequence_(
IO { println(helpstring) },
doWhile { IO { readLine } } { line =>
val ok = line != "q"
when (ok) { for {
n <- factorial(line.toInt)
_ <- IO { println("factorial: " + n) }
} yield () }
}
)
}
object IO2a {
/*
The previous IO representation overflows the stack for some programs.
The problem is that `run` call itself recursively, which means that
an infinite or long running IO computation will have a chain of regular
calls to `run`, eventually overflowing the stack.
The general solution is to make the `IO` type into a data type that we
interpret using a tail recursive loop, using pattern matching.
*/
sealed trait IO[A] {
def flatMap[B](f: A => IO[B]): IO[B] =
FlatMap(this, f) // we do not interpret the `flatMap` here, just return it as a value
def map[B](f: A => B): IO[B] =
flatMap(f andThen (Return(_)))
}
case class Return[A](a: A) extends IO[A]
case class Suspend[A](resume: () => A) extends IO[A]
case class FlatMap[A,B](sub: IO[A], k: A => IO[B]) extends IO[B]
object IO extends Monad[IO] { // Notice that none of these operations DO anything
def unit[A](a: => A): IO[A] = Return(a)
def flatMap[A,B](a: IO[A])(f: A => IO[B]): IO[B] = a flatMap f
}
def printLine(s: String): IO[Unit] =
Suspend(() => Return(println(s)))
val p = IO.forever(printLine("Still going..."))
val actions: Stream[IO[Unit]] =
Stream.fill(100000)(printLine("Still going..."))
val composite: IO[Unit] =
actions.foldLeft(IO.unit(())) { (acc, a) => acc flatMap { _ => a } }
// There is only one sensible way to implement this as a
// tail-recursive function, the one tricky case is left-nested
// flatMaps, as in `((a flatMap f) flatMap g)`, which we
// reassociate to the right as `a flatMap (ar => f(a) flatMap g)`
@annotation.tailrec def run[A](io: IO[A]): A = io match {
case Return(a) => a
case Suspend(r) => r()
case FlatMap(x, f) => x match {
case Return(a) => run(f(a))
case Suspend(r) => run(f(r()))
case FlatMap(y, g) => run(y flatMap (a => g(a) flatMap f))
}
}
}
object IO2b {
/*
* As it turns out, there's nothing about this data type that is specific
* to I/O, it's just a general purpose data type for optimizing tail calls.
* Here it is, renamed to `TailRec`. This type is also sometimes called
* `Trampoline`, because of the way interpreting it bounces back and forth
* between the main `run` loop and the functions contained in the `TailRec`.
*/
sealed trait TailRec[A] {
def flatMap[B](f: A => TailRec[B]): TailRec[B] =
FlatMap(this, f)
def map[B](f: A => B): TailRec[B] =
flatMap(f andThen (Return(_)))
}
case class Return[A](a: A) extends TailRec[A]
case class Suspend[A](resume: () => A) extends TailRec[A]
case class FlatMap[A,B](sub: TailRec[A], k: A => TailRec[B]) extends TailRec[B]
object TailRec extends Monad[TailRec] {
def unit[A](a: => A): TailRec[A] = Return(a)
def flatMap[A,B](a: TailRec[A])(f: A => TailRec[B]): TailRec[B] = a flatMap f
}
@annotation.tailrec def run[A](t: TailRec[A]): A = t match {
case Return(a) => a
case Suspend(r) => r()
case FlatMap(x, f) => x match {
case Return(a) => run(f(a))
case Suspend(r) => run(f(r()))
case FlatMap(y, g) => run(y flatMap (a => g(a) flatMap f))
}
}
}
object IO2c {
import fpinscala.parallelism.Nonblocking._
/*
* We've solved our first problem of ensuring stack safety, but we're still
* being very inexplicit about what sort of effects can occur, and we also
* haven't found a way of describing asynchronous computations. Our `Suspend`
* thunks will just block the current thread when run by the interpreter.
* We could fix that by changing the signature of `Suspend` to take a `Par`.
* We'll call this new type `Async`.
*/
sealed trait Async[A] { // will rename this type to `Async`
def flatMap[B](f: A => Async[B]): Async[B] =
FlatMap(this, f)
def map[B](f: A => B): Async[B] =
flatMap(f andThen (Return(_)))
}
case class Return[A](a: A) extends Async[A]
case class Suspend[A](resume: Par[A]) extends Async[A] // notice this is a `Par`
case class FlatMap[A,B](sub: Async[A], k: A => Async[B]) extends Async[B]
object Async extends Monad[Async] {
def unit[A](a: => A): Async[A] = Return(a)
def flatMap[A,B](a: Async[A])(f: A => Async[B]): Async[B] = a flatMap f
}
// return either a `Suspend`, a `Return`, or a right-associated `FlatMap`
@annotation.tailrec def step[A](async: Async[A]): Async[A] = async match {
case FlatMap(FlatMap(x, f), g) => step(x flatMap (a => f(a) flatMap g))
case FlatMap(Return(x), f) => step(f(x))
case _ => async
}
def run[A](async: Async[A]): Par[A] = step(async) match {
case Return(a) => Par.unit(a)
case Suspend(r) => r
case FlatMap(x, f) => x match {
case Suspend(r) => Par.flatMap(r)(a => run(f(a)))
case _ => sys.error("Impossible, since `step` eliminates these cases")
}
}
// The fact that `run` only uses the `unit` and `flatMap` functions of
// `Par` is a clue that choosing `Par` was too specific of a choice,
// this interpreter could be generalized to work with any monad.
}
object IO3 {
/*
We can generalize `TailRec` and `Async` to the type `Free`, which is
a `Monad` for any choice of `F`.
*/
sealed trait Free[F[_],A] {
def flatMap[B](f: A => Free[F,B]): Free[F,B] =
FlatMap(this, f)
def map[B](f: A => B): Free[F,B] =
flatMap(f andThen (Return(_)))
}
case class Return[F[_],A](a: A) extends Free[F, A]
case class Suspend[F[_],A](s: F[A]) extends Free[F, A]
case class FlatMap[F[_],A,B](s: Free[F, A],
f: A => Free[F, B]) extends Free[F, B]
// Exercise 1: Implement the free monad
def freeMonad[F[_]]: Monad[({type f[a] = Free[F,a]})#f] =
new Monad[({type f[a] = Free[F,a]})#f] {
def unit[A](a: => A) = Return(a)
def flatMap[A,B](fa: Free[F, A])(f: A => Free[F, B]) = fa flatMap f
}
// Exercise 2: Implement a specialized `Function0` interpreter.
@annotation.tailrec
def runTrampoline[A](a: Free[Function0,A]): A = (a) match {
case Return(a) => a
case Suspend(r) => r()
case FlatMap(x,f) => x match {
case Return(a) => runTrampoline { f(a) }
case Suspend(r) => runTrampoline { f(r()) }
case FlatMap(a0,g) => runTrampoline { a0 flatMap { a0 => g(a0) flatMap f } }
}
}
// Exercise 3: Implement a `Free` interpreter which works for any `Monad`
def run[F[_],A](a: Free[F,A])(implicit F: Monad[F]): F[A] = step(a) match {
case Return(a) => F.unit(a)
case Suspend(r) => r
case FlatMap(Suspend(r), f) => F.flatMap(r)(a => run(f(a)))
case _ => sys.error("Impossible, since `step` eliminates these cases")
}
// return either a `Suspend`, a `Return`, or a right-associated `FlatMap`
@annotation.tailrec
def step[F[_],A](a: Free[F,A]): Free[F,A] = a match {
case FlatMap(FlatMap(x, f), g) => step(x flatMap (a => f(a) flatMap g))
case FlatMap(Return(x), f) => step(f(x))
case _ => a
}
/*
The type constructor `F` lets us control the set of external requests our
program is allowed to make. For instance, here is a type that allows for
only console I/O effects.
*/
import fpinscala.parallelism.Nonblocking.Par
sealed trait Console[A] {
def toPar: Par[A]
def toThunk: () => A
// other interpreters
def toState: ConsoleState[A]
def toReader: ConsoleReader[A]
}
case object ReadLine extends Console[Option[String]] {
def toPar = Par.lazyUnit(run)
def toThunk = () => run
def run: Option[String] =
try Some(readLine())
catch { case e: Exception => None }
def toState = ConsoleState { bufs =>
bufs.in match {
case List() => (None, bufs)
case h :: t => (Some(h), bufs.copy(in = t))
}
}
def toReader = ConsoleReader { in => Some(in) }
}
case class PrintLine(line: String) extends Console[Unit] {
def toPar = Par.lazyUnit(println(line))
def toThunk = () => println(line)
def toReader = ConsoleReader { s => () } // noop
def toState = ConsoleState { bufs => ((), bufs.copy(out = bufs.out :+ line)) } // append to the output
}
object Console {
type ConsoleIO[A] = Free[Console, A]
def readLn: ConsoleIO[Option[String]] =
Suspend(ReadLine)
def printLn(line: String): ConsoleIO[Unit] =
Suspend(PrintLine(line))
}
/*
How do we actually _run_ a `ConsoleIO` program? We don't have a `Monad[Console]`
for calling `run`, and we can't use `runTrampoline` either since we have `Console`,
not `Function0`. We need a way to translate from `Console` to `Function0`
(if we want to evaluate it sequentially) or a `Par`.
We introduce the following type to do this translation:
*/
/* Translate between any `F[A]` to `G[A]`. */
trait Translate[F[_], G[_]] { def apply[A](f: F[A]): G[A] }
type ~>[F[_], G[_]] = Translate[F,G] // gives us infix syntax `F ~> G` for `Translate[F,G]`
implicit val function0Monad = new Monad[Function0] {
def unit[A](a: => A) = () => a
def flatMap[A,B](a: Function0[A])(f: A => Function0[B]) =
() => f(a())()
}
implicit val parMonad = new Monad[Par] {
def unit[A](a: => A) = Par.unit(a)
def flatMap[A,B](a: Par[A])(f: A => Par[B]) = Par.fork { Par.flatMap(a)(f) }
}
def runFree[F[_],G[_],A](free: Free[F,A])(t: F ~> G)(
implicit G: Monad[G]): G[A] =
step(free) match {
case Return(a) => G.unit(a)
case Suspend(r) => t(r)
case FlatMap(Suspend(r), f) => G.flatMap(t(r))(a => runFree(f(a))(t))
case _ => sys.error("Impossible, since `step` eliminates these cases")
}
val consoleToFunction0 =
new (Console ~> Function0) { def apply[A](a: Console[A]) = a.toThunk }
val consoleToPar =
new (Console ~> Par) { def apply[A](a: Console[A]) = a.toPar }
def runConsoleFunction0[A](a: Free[Console,A]): () => A =
runFree[Console,Function0,A](a)(consoleToFunction0)
def runConsolePar[A](a: Free[Console,A]): Par[A] =
runFree[Console,Par,A](a)(consoleToPar)
/*
The `runConsoleFunction0` implementation is unfortunately not stack safe,
because it relies of the stack safety of the underlying monad, and the
`Function0` monad we gave is not stack safe. To see the problem, try
running: `freeMonad.forever(Console.printLn("Hello"))`.
*/
// Exercise 4 (optional, hard): Implement `runConsole` using `runFree`,
// without going through `Par`. Hint: define `translate` using `runFree`.
def translate[F[_],G[_],A](f: Free[F,A])(fg: F ~> G): Free[G,A] = {
type FreeG[A] = Free[G,A]
val t = new (F ~> FreeG) {
def apply[A](a: F[A]): Free[G,A] = Suspend { fg(a) }
}
runFree(f)(t)(freeMonad[G])
}
def runConsole[A](a: Free[Console,A]): A =
runTrampoline { translate(a)(new (Console ~> Function0) {
def apply[A](c: Console[A]) = c.toThunk
})}
/*
There is nothing about `Free[Console,A]` that requires we interpret
`Console` using side effects. Here are two pure ways of interpreting
a `Free[Console,A]`.
*/
import Console._
case class Buffers(in: List[String], out: Vector[String])
// A specialized state monad
case class ConsoleState[A](run: Buffers => (A, Buffers)) {
def map[B](f: A => B): ConsoleState[B] =
ConsoleState { s =>
val (a, s1) = run(s)
(f(a), s1)
}
def flatMap[B](f: A => ConsoleState[B]): ConsoleState[B] =
ConsoleState { s =>
val (a, s1) = run(s)
f(a).run(s1)
}
}
object ConsoleState {
implicit val monad = new Monad[ConsoleState] {
def unit[A](a: => A) = ConsoleState(bufs => (a,bufs))
def flatMap[A,B](ra: ConsoleState[A])(f: A => ConsoleState[B]) = ra flatMap f
}
}
// A specialized reader monad
case class ConsoleReader[A](run: String => A) {
def map[B](f: A => B): ConsoleReader[B] =
ConsoleReader(r => f(run(r)))
def flatMap[B](f: A => ConsoleReader[B]): ConsoleReader[B] =
ConsoleReader(r => f(run(r)).run(r))
}
object ConsoleReader {
implicit val monad = new Monad[ConsoleReader] {
def unit[A](a: => A) = ConsoleReader(_ => a)
def flatMap[A,B](ra: ConsoleReader[A])(f: A => ConsoleReader[B]) = ra flatMap f
}
}
val consoleToState =
new (Console ~> ConsoleState) { def apply[A](a: Console[A]) = a.toState }
val consoleToReader =
new (Console ~> ConsoleReader) { def apply[A](a: Console[A]) = a.toReader }
/* Can interpet these as before to convert our `ConsoleIO` to a pure value that does no I/O! */
def runConsoleReader[A](io: ConsoleIO[A]): ConsoleReader[A] =
runFree[Console,ConsoleReader,A](io)(consoleToReader)
def runConsoleState[A](io: ConsoleIO[A]): ConsoleState[A] =
runFree[Console,ConsoleState,A](io)(consoleToState)
// So `Free[F,A]` is not really an I/O type. The interpreter `runFree` gets
// to choose how to interpret these `F` requests, and whether to do "real" I/O
// or simply convert to some pure value!
// NB: These interpretations are not stack safe for the same reason,
// can instead work with `case class ConsoleReader[A](run: String => Trampoline[A])`,
// which gives us a stack safe monad
// We conclude that a good representation of an `IO` monad is this:
type IO[A] = Free[Par, A]
/*
* Exercise 5: Implement a non-blocking read from an asynchronous file channel.
* We'll just give the basic idea - here, we construct a `Future`
* by reading from an `AsynchronousFileChannel`, a `java.nio` class
* which supports asynchronous reads.
*/
import java.nio._
import java.nio.channels._
// Provides the syntax `Async { k => ... }` for asyncronous IO blocks.
def Async[A](cb: (A => Unit) => Unit): IO[A] =
Suspend(Par.async(cb))
// Provides the `IO { ... }` syntax for synchronous IO blocks.
def IO[A](a: => A): IO[A] = Suspend { Par.delay(a) }
def read(file: AsynchronousFileChannel,
fromPosition: Long,
numBytes: Int): Par[Either[Throwable, Array[Byte]]] =
Par.async { (cb: Either[Throwable, Array[Byte]] => Unit) =>
val buf = ByteBuffer.allocate(numBytes)
file.read(buf, fromPosition, (), new CompletionHandler[Integer, Unit] {
def completed(bytesRead: Integer, ignore: Unit) = {
val arr = new Array[Byte](bytesRead)
buf.slice.get(arr, 0, bytesRead)
cb(Right(arr))
}
def failed(err: Throwable, ignore: Unit) =
cb(Left(err))
})
}
}
| ardlema/fpinscala | exercises/src/main/scala/fpinscala/iomonad/IO.scala | Scala | mit | 18,850 |
/*
* Copyright (c) 2021, salesforce.com, inc.
* All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
package com.krux.hyperion.activity
import com.krux.hyperion.HyperionContext
import com.krux.hyperion.adt.{ HBoolean, HString }
import com.krux.hyperion.common.{ BaseFields, PipelineObjectId, S3Uri }
import com.krux.hyperion.expression.RunnableObject
import com.krux.hyperion.resource.{ Ec2Resource, Resource }
case class SendFlowdockMessageActivity private (
baseFields: BaseFields,
activityFields: ActivityFields[Ec2Resource],
shellCommandActivityFields: ShellCommandActivityFields,
jarUri: HString,
mainClass: HString,
flowApiToken: HString,
message: HString,
user: HString,
continueOnError: HBoolean,
tags: Seq[HString]
) extends BaseShellCommandActivity {
type Self = SendFlowdockMessageActivity
def updateBaseFields(fields: BaseFields) = copy(baseFields = fields)
def updateActivityFields(fields: ActivityFields[Ec2Resource]) = copy(activityFields = fields)
def updateShellCommandActivityFields(fields: ShellCommandActivityFields) = copy(shellCommandActivityFields = fields)
def continuingOnError = copy(continueOnError = HBoolean.True)
def withUser(user: HString) = copy(user = user)
def withTags(tag: HString*) = copy(tags = this.tags ++ tag)
private def arguments: Seq[HString] = Seq(
continueOnError.exists("--fail-on-error": HString).toSeq,
Seq[HString]("--api-key", flowApiToken),
Seq[HString]("--user", user),
if (tags.isEmpty) Seq.empty else Seq[HString]("--tags", tags.mkString(",")),
Seq[HString](message)
).flatten
override def scriptArguments = (jarUri.serialize: HString) +: mainClass +: arguments
}
object SendFlowdockMessageActivity extends RunnableObject {
def apply(flowApiToken: HString, message: HString)(runsOn: Resource[Ec2Resource])(implicit hc: HyperionContext): SendFlowdockMessageActivity =
new SendFlowdockMessageActivity(
baseFields = BaseFields(PipelineObjectId(SendFlowdockMessageActivity.getClass)),
activityFields = ActivityFields(runsOn),
shellCommandActivityFields = ShellCommandActivityFields(S3Uri(s"${hc.scriptUri}activities/run-jar.sh")),
jarUri = s"${hc.scriptUri}activities/hyperion-notification-activity-current-assembly.jar",
mainClass = "com.krux.hyperion.contrib.activity.notification.SendFlowdockMessage",
flowApiToken = flowApiToken,
message = message,
user = "hyperion",
continueOnError = HBoolean.False,
tags = Seq.empty
)
}
| realstraw/hyperion | contrib/activity/definition/src/main/scala/com/krux/hyperion/activity/SendFlowdockMessageActivity.scala | Scala | bsd-3-clause | 2,647 |
import sbt._
import Keys._
import sbtassembly.AssemblyKeys._
object Deploy {
val deployPackTask = TaskKey[Unit]("deploy-pack")
val deployPack = deployPackTask <<= (assembly, jarName in assembly, target, baseDirectory) map { (_, jarName, target, base) =>
IO.delete(target / "deploy")
IO.createDirectory(target / "deploy")
IO.copyFile(target / jarName, target / "deploy" / jarName)
if (base / "server.conf" exists)
IO.copyFile(base / "server.conf", target / "deploy" / "server.conf")
IO.write(target / "deploy" / "start.sh",
"""#!/bin/bash
|DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd)"
|cd $DIR
|java -Dfile.encoding=UTF8 -jar %s
|""" format (jarName) stripMargin)
target / "deploy" / "start.sh" setExecutable true
}
val deployHost = SettingKey[Option[String]]("deployHost", "default host, on which deployed files will be pushed")
val deployDest = SettingKey[Option[String]]("deployDest", "default destination on that host")
val deploySsh = InputKey[Unit]("deploy-ssh") <<= inputTask { (argTask: TaskKey[Seq[String]]) =>
(argTask, deployPackTask, deployHost, deployDest, jarName in assembly, target) map { (args, _, deployHost, deployDest, jarName, target) =>
val (host, dest) = if (args.size < 2) {
(for {
host <- deployHost
dest <- deployDest
} yield (host, dest)).getOrElse(sys.error("Destination was not provided on command line - and there was no default"))
} else (args(0), args(1))
val cmd = "rsync" +: "-avz" +: IO.listFiles(target / "deploy").map(_.toString) :+ (host+":"+dest)
println("Copying files: " + cmd.mkString(" "))
if (Process(cmd).! == 0) {
val startCmd = List("ssh", host, "-x", "cd " + dest + "; (nohup ./start.sh > server.log 2>&1 &)")
println("Starting process: " + startCmd.mkString(" "))
Process(startCmd).!
}
}
}
lazy val deploySettings = Seq(deployPack, deployHost := None, deployDest := None, deploySsh)
}
| circlespainter/FrameworkBenchmarks | frameworks/Scala/lift-stateless/project/Deploy.scala | Scala | bsd-3-clause | 2,042 |
package com.lucidchart.open.cashy.config
import play.api.Play.{configuration, current}
case class UploadFeatures(
krakenEnabled: Boolean,
compressJsEnabled: Boolean,
compressCssEnabled: Boolean
)
trait UploadFeatureConfig {
private val krakenEnabled = configuration.getBoolean("kraken.enabled").getOrElse(false)
private val jsCompressionEnabled = configuration.getBoolean("upload.jsCompression.enabled").getOrElse(false)
private val cssCompressionEnabled = configuration.getBoolean("upload.cssCompression.enabled").getOrElse(false)
implicit val uploadFeatures = UploadFeatures(krakenEnabled, jsCompressionEnabled, cssCompressionEnabled)
} | lucidsoftware/cashy | app/com/lucidchart/open/cashy/config/UploadFeatureConfig.scala | Scala | apache-2.0 | 649 |
package org.jetbrains.plugins.scala.worksheet.ammonite.runconfiguration
import com.intellij.execution.configurations.{ConfigurationFactory, ConfigurationType, RunConfiguration}
import com.intellij.openapi.project.Project
/**
* User: Dmitry.Naydanov
* Date: 13.09.17.
*/
class AmmoniteRunConfigurationFactory(tpe: ConfigurationType) extends ConfigurationFactory(tpe) {
override def createTemplateConfiguration(project: Project): RunConfiguration =
new AmmoniteRunConfiguration(project, this)
}
| jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/worksheet/ammonite/runconfiguration/AmmoniteRunConfigurationFactory.scala | Scala | apache-2.0 | 508 |
package com.getjenny.starchat.services.esclient
/**
* Created by Angelo Leto <[email protected]> on 01/07/16.
*/
object BayesOperatorCacheElasticClient extends SystemElasticClient {
override val indexSuffix: String = "bayes_operator_cache"
override val mappingPath = "/index_management/json_index_spec/system/bayes_operator_cache.json"
override val updateMappingPath = "/index_management/json_index_spec/system/update/bayes_operator_cache.json"
}
| GetJenny/starchat | src/main/scala/com/getjenny/starchat/services/esclient/BayesOperatorCacheElasticClient.scala | Scala | gpl-2.0 | 462 |
package opencl.generator
import ir._
import ir.ast._
import lift.arithmetic.SizeVar
import opencl.executor.{Compile, TestWithExecutor, Utils}
import opencl.ir._
import opencl.ir.pattern._
import org.junit.Assert._
import org.junit.Test
object TestInject extends TestWithExecutor
class TestInject {
@Test def injectExactlyOneIterationVariable(): Unit = {
val N = SizeVar("N")
val f = fun(
ArrayTypeWSWC(Float, N),
in => MapWrg( MapLcl(id)) o Split(128) $ in
)
val code = Compile(f, 128,1,1, N, 1, 1, collection.immutable.Map())
assertEquals(0, "for\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertEquals(0, "if\\\\s*\\\\(".r.findAllMatchIn(code).length)
}
@Test def injectExactlyOneIteration(): Unit = {
val inputSize = 1024
val input = Array.tabulate(inputSize)(_.toFloat)
val f = fun(
ArrayTypeWSWC(Float, SizeVar("N")),
in => MapWrg( MapLcl(id)) o Split(128) $ in
)
val inputs = Seq(input)
val (output, runtime, code) = Utils.execute(f, inputs, 128, inputSize, (true, false))
println("output.size = " + output.length)
println("output(0) = " + output(0))
println("runtime = " + runtime)
assertEquals(1, "for\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertEquals(0, "if\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertArrayEquals(input, output, 0.0f)
}
@Test def injectLessThanOneIteration(): Unit ={
val inputSize = 1024
val input = Array.tabulate(inputSize)(_.toFloat)
val f = fun(
ArrayTypeWSWC(Float, SizeVar("N")),
in => MapWrg( MapLcl(id)) o Split(64) $ in
)
val inputs = Seq(input)
val (output, _, code) = Utils.execute(f, inputs, 128, inputSize, (true, false))
assertEquals(1, "for\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertEquals(1, "if\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertArrayEquals(input, output, 0.0f)
}
@Test def injectMoreThanOneIteration(): Unit ={
val inputSize = 1024
val input = Array.tabulate(inputSize)(_.toFloat)
val f = fun(
ArrayTypeWSWC(Float, SizeVar("N")),
in => MapWrg( MapLcl(id)) o Split(256) $ in
)
val inputs = Seq(input)
val (output, _, code) = Utils.execute(f, inputs, 128, inputSize, (true, false))
assertEquals(2, "for\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertEquals(0, "if\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertArrayEquals(input, output, 0.0f)
}
@Test def injectGroupExactlyOneIteration(): Unit = {
val inputSize = 1024
val input = Array.tabulate(inputSize)(_.toFloat)
val f = fun(
ArrayTypeWSWC(Float, SizeVar("N")),
in => MapWrg( MapLcl(id)) o Split(128) $ in
)
val inputs = Seq(input)
val (output, _, code) = Utils.execute(f, inputs, 128, inputSize, (true, true))
assertEquals(0, "for\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertEquals(0, "if\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertArrayEquals(input, output, 0.0f)
}
@Test def injectGroupLessThanOneIteration(): Unit ={
val inputSize = 1024
val input = Array.tabulate(inputSize)(_.toFloat)
val f = fun(
ArrayTypeWSWC(Float, SizeVar("N")),
in => MapWrg( MapLcl(id)) o Split(128) $ in
)
val inputs = Seq(input)
val (output, _, code) = Utils.execute(f, inputs, 128, inputSize*2, (true, true))
assertEquals(0, "for\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertEquals(1, "if\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertArrayEquals(input, output, 0.0f)
}
@Test def injectGroupMoreThanOneIteration(): Unit ={
val inputSize = 1024
val input = Array.tabulate(inputSize)(_.toFloat)
val f = fun(
ArrayTypeWSWC(Float, SizeVar("N")),
in => MapWrg( MapLcl(id)) o Split(128) $ in
)
val inputs = Seq(input)
val (output, _, code) = Utils.execute(f, inputs, 128, inputSize/2, (true, true))
assertEquals(1, "for\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertEquals(0, "if\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertArrayEquals(input, output, 0.0f)
}
@Test def injectGroupSameExpressionTwice(): Unit = {
val inputSize = 1024
val input = Array.tabulate(inputSize)(_.toFloat)
val f = fun(
ArrayTypeWSWC(Float, SizeVar("N")),
in => MapWrg( MapLcl(id)) o Split(128) $ in
)
val inputs = Seq(input)
val (output, _, code) = Utils.execute(f, inputs, 128, inputSize, (true, true))
assertEquals(0, "for\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertEquals(0, "if\\\\s*\\\\(".r.findAllMatchIn(code).length)
assertArrayEquals(input, output, 0.0f)
val (output2, _, code2) = Utils.execute(f, inputs, 128, inputSize, (true, true))
assertEquals(0, "for\\\\s*\\\\(".r.findAllMatchIn(code2).length)
assertEquals(0, "if\\\\s*\\\\(".r.findAllMatchIn(code2).length)
assertArrayEquals(input, output2, 0.0f)
}
}
| lift-project/lift | src/test/opencl/generator/TestInject.scala | Scala | mit | 4,838 |
package clients
import io.flow.token.v0.interfaces.{Client => TokenClient}
import play.api.{Environment, Configuration, Mode}
import play.api.inject.Module
class TokenClientModule extends Module {
def bindings(env: Environment, conf: Configuration) = {
env.mode match {
case Mode.Prod | Mode.Dev => Seq(
bind[TokenClient].to[DefaultTokenClient]
)
case Mode.Test => Seq(
// TODO: Add mock
bind[TokenClient].to[DefaultTokenClient]
)
}
}
}
| flowvault/proxy | app/clients/Bindings.scala | Scala | mit | 501 |
/*
* Copyright (c) 2014-2018 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.observables
import monix.execution.{Ack, Cancelable}
import monix.reactive.Observable
import monix.reactive.observers.Subscriber
import monix.execution.atomic.Atomic
import scala.annotation.tailrec
import scala.concurrent.Future
/** A `RefCountObservable` is an observable that wraps a
* [[ConnectableObservable]], initiating the connection on the first
* `subscribe()` and then staying connected as long as there is at least
* one subscription active.
*
* @param source - the connectable observable we are wrapping
*/
final class RefCountObservable[+A] private (source: ConnectableObservable[A])
extends Observable[A] {
private[this] val refs = Atomic(-1)
private[this] lazy val connection: Cancelable =
source.connect()
@tailrec
def unsafeSubscribeFn(subscriber: Subscriber[A]): Cancelable = {
val current = refs.get
val update = current match {
case x if x < 0 => 1
case 0 => 0
case x => x + 1
}
if (update == 0) {
source.unsafeSubscribeFn(subscriber)
}
else if (!refs.compareAndSet(current, update)) {
// retry
unsafeSubscribeFn(subscriber)
} else {
implicit val s = subscriber.scheduler
// Protecting the countdown call is important, otherwise canceling this
// subscription can be concurrent with a downstream stop.
val countdown = Cancelable(() => countDownToConnectionCancel())
// Subscribing and triggering connect() if this is the first subscription
val ret = source.unsafeSubscribeFn(wrap(subscriber, countdown))
if (current == -1) connection // triggers connect()
// A composite that both cancels this subscription and does the countdown
Cancelable { () => try ret.cancel() finally countdown.cancel() }
}
}
private def wrap[U >: A](downstream: Subscriber[U], subscription: Cancelable): Subscriber[U] =
new Subscriber[U] {
implicit val scheduler = downstream.scheduler
def onNext(elem: U): Future[Ack] = {
downstream.onNext(elem)
.syncOnStopOrFailure(_ => subscription.cancel())
}
def onError(ex: Throwable): Unit = {
try downstream.onError(ex) finally
subscription.cancel()
}
def onComplete(): Unit = {
try downstream.onComplete() finally
subscription.cancel()
}
}
@tailrec
private[this] def countDownToConnectionCancel(): Unit = refs.get match {
case x if x > 0 =>
val update = x-1
if (!refs.compareAndSet(x, update))
countDownToConnectionCancel()
else if (update == 0)
connection.cancel()
case 0 => ()
case negative =>
throw new IllegalStateException(s"refs=$negative (after init)")
}
}
object RefCountObservable {
/** Builder for [[RefCountObservable]] */
def apply[A](connectable: ConnectableObservable[A]): Observable[A] =
new RefCountObservable(connectable)
}
| Wogan/monix | monix-reactive/shared/src/main/scala/monix/reactive/observables/RefCountObservable.scala | Scala | apache-2.0 | 3,609 |
package bynull.hackerrank.swap_nodes
/**
* Created by null on 7/17/16.
*/
object SwapNodesApp extends App {
val oldIn = System.in
val dataUrl = ClassLoader.getSystemResource("swap_nodes.txt")
try {
System.setIn(dataUrl.openStream())
Solution.main(Array())
} finally {
System.setIn(oldIn)
}
}
object Solution {
def main(args: Array[String]) {
val lines = io.Source.stdin.getLines()
val nodes = lines.take(1).toSeq.head.toInt
//val data = lines.take(nodes).toList
var tree = Node(1)
tree = funcFill(tree, readValues() /*v(2,3)*/)
println(tree)
def funcFill(parent: Node[Int], lr: (Int, Int)): Node[Int] = {
lr match {
case (l, r) if l == -1 && r == -1 =>
parent
case (l, r) if r == -1 =>
println("Only left: " + l)
val left = parent.left(l)
funcFill(left, readValues())
case (l, r) if l == -1 =>
println("Only right: " + r)
val right = parent.right(r)
funcFill(right, readValues())
case (l: Int, r: Int) => //s1
println("Two nodes: " + l + ", " + r)
val p = parent.leftRight(l, r)
val leftSubtree = readValues() //v(4, -1)
val rightSubtree = readValues() //v(5, -1)
p.leftRightNode(p.l.map(ll => funcFill(ll, leftSubtree)), p.r.map(rr => funcFill(rr, rightSubtree)))
}
}
def readValues(): (Int, Int) = {
val values = lines.take(1).toSeq.head.split(' ')
values(0).toInt -> values(1).toInt
}
}
trait Tree[A]
case class Node[A](value: A, l: Option[Node[A]] = None, r: Option[Node[A]] = None) extends Tree[A] {
def left(v: A) = Node(value, newNode(v), r)
def right(v: A) = Node(value, l, newNode(v))
def leftRight(l: A, r: A): Node[A] = Node(value, newNode(l), newNode(r))
def leftRightNode(lNode: Option[Node[A]], rNode: Option[Node[A]]): Node[A] = Node(value, lNode, rNode)
private def newNode(v: A): Option[Node[A]] = if (v == -1) None else Some(Node(v))
override def toString: String = {
s"*\\n${toStr(0)}"
}
private def toStr(offset: Int): String = {
val offsetStr = if (offset == 0) "" else (1 to offset).map(x => " ").mkString
val vStr = s"|$offsetStr${if (offset == 0) "" else "|"}-$value"
val lStr = s"${l.map(n => n.toStr(offset + 2)).getOrElse("")}"
val rStr = s"${r.map(n => n.toStr(offset + 2)).getOrElse("")}"
s"$vStr\\n$lStr\\n$rStr"
}
}
} | xnull/programming | scala/src/main/scala/bynull/hackerrank/swap_nodes/Solution.scala | Scala | apache-2.0 | 2,486 |
package todo
import com.github.plokhotnyuk.jsoniter_scala.core.{JsonReader, JsonValueCodec, JsonWriter}
import com.github.plokhotnyuk.jsoniter_scala.macros.JsonCodecMaker
import loci.transmitter.IdenticallyTransmittable
import rescala.extra.lattices.delta.JsoniterCodecs._
import rescala.extra.lattices.delta.crdt.reactive
import rescala.extra.lattices.delta.crdt.reactive.{LWWRegister, RGA}
import todo.Todolist.replicaId
object Codecs {
implicit val taskRefCodec: JsonValueCodec[TaskRef] = JsonCodecMaker.make
implicit val codecState: JsonValueCodec[RGA.State[TaskRef]] = RGAStateCodec
implicit val codecRGA: JsonValueCodec[RGA[TaskRef]] =
new JsonValueCodec[RGA[TaskRef]] {
override def decodeValue(
in: JsonReader,
default: RGA[TaskRef]
): RGA[TaskRef] = {
val state = codecState.decodeValue(in, default.state)
new RGA[TaskRef](state, replicaId, List())
}
override def encodeValue(x: RGA[TaskRef], out: JsonWriter): Unit =
codecState.encodeValue(x.state, out)
override def nullValue: RGA[TaskRef] = RGA[TaskRef](replicaId)
}
implicit val transmittableList: IdenticallyTransmittable[RGA.State[TaskRef]] =
IdenticallyTransmittable()
implicit val todoTaskCodec: JsonValueCodec[TaskData] = JsonCodecMaker.make
implicit val codecLwwState: JsonValueCodec[LWWRegister.State[TaskData]] = JsonCodecMaker.make
implicit val transmittableLWW: IdenticallyTransmittable[LWWRegister.State[TaskData]] =
IdenticallyTransmittable()
type LwC = LWWRegister[TaskData]
implicit val codecLww: JsonValueCodec[LwC] =
new JsonValueCodec[LwC] {
override def decodeValue(in: JsonReader, default: LwC): LwC = {
val state: reactive.LWWRegister.State[TaskData] = codecLwwState.decodeValue(in, default.state)
new LWWRegister[TaskData](state, replicaId, List())
}
override def encodeValue(x: LwC, out: JsonWriter): Unit = codecLwwState.encodeValue(x.state, out)
override def nullValue: LwC = {
println(s"reading null")
LWWRegister[TaskData](replicaId)
}
}
}
| guidosalva/REScala | Code/Examples/Todolist/src/main/scala/todo/Codecs.scala | Scala | apache-2.0 | 2,117 |
package stronghold.massSpectrometry
/**
* problem description: http://rosalind.info/problems/conv/
*/
object SpectralConvolution {
object SampleData {
val sample: List[String] =
List(
"186.07931 287.12699 548.20532 580.18077 681.22845 706.27446 782.27613 968.35544 968.35544",
"101.04768 158.06914 202.09536 318.09979 419.14747 463.17369"
)
}
import SampleData.sample
import utils.UtilityFunctions.readInputData
val inputFileName: String = "/stronghold/datasets/rosalind_conv.txt"
def getData(isPractice: Boolean): List[List[Double]] = {
val data: List[String] = if (isPractice) sample else readInputData(inputFileName)
data.map(_.split(" ").map(_.toDouble).toList)
}
def round(number: Double, digits: Int): Double = math.round(number * math.pow(10, digits)) / math.pow(10, digits)
def calcConvolutionOfSpectra(spectrum1: List[Double], spectrum2: List[Double]): (Int, Double) = {
val convolution: Map[String, Int] = (for {
mass1 <- spectrum1
mass2 <- spectrum2
} yield round(mass1 - mass2, 4).toString).groupBy(identity).mapValues(_.length)
val (shift, multiplicity): (String, Int) = convolution.maxBy{ case (_, count) => count }
(multiplicity, math.abs(shift.toDouble))
}
def main(args: Array[String]): Unit = {
val List(spectrum1, spectrum2): List[List[Double]] = getData(isPractice = false)
val (multiplicity, shift): (Int, Double) = calcConvolutionOfSpectra(spectrum1, spectrum2)
println(multiplicity)
println(shift)
}
}
| ghostrider77/Bioinformatics | Bioinformatics/src/main/scala-2.11/stronghold/massSpectrometry/SpectralConvolution.scala | Scala | mit | 1,547 |
/**
* Copyright 2009 Robey Pointer <[email protected]>
* Copyright 2012-2013 Alexey Aksenov <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.digimead.configgy
import java.net.InetAddress
import scala.collection.{ immutable, mutable }
import scala.collection.JavaConversions._
import java.lang.UnsupportedOperationException
import org.slf4j.LoggerFactory
/**
* A ConfigMap that wraps the system environment. This is used as a
* fallback when looking up "$(...)" substitutions in config files.
*/
object EnvironmentAttributes extends ConfigMap {
protected val log = LoggerFactory.getLogger(getClass)
private val env = immutable.Map.empty[String, String] ++ (System.getenv()).iterator
// deal with java.util.Properties extending
// java.util.Hashtable[Object, Object] and not
// java.util.Hashtable[String, String]
private def getSystemProperties(): mutable.HashMap[String, String] = {
val map = new mutable.HashMap[String, String]
for (entry <- System.getProperties().iterator) {
entry match {
case (k: String, v: String) => map.put(k, v)
case _ =>
}
}
map
}
def getName() = ""
def getString(key: String): Option[String] = {
getSystemProperties().get(key).orElse(env.get(key))
}
def getConfigMap(key: String): Option[ConfigMap] = None
def configMap(key: String): ConfigMap = throw new UnsupportedOperationException("not implemented")
def getList(key: String): Seq[String] = getString(key) match {
case None => Array[String]()
case Some(x) => Array[String](x)
}
def setString(key: String, value: String): Unit = throw new UnsupportedOperationException("read-only attributes")
def setList(key: String, value: Seq[String]): Unit = throw new UnsupportedOperationException("read-only attributes")
def setConfigMap(key: String, value: ConfigMap): Unit = throw new UnsupportedOperationException("read-only attributes")
def contains(key: String): Boolean = {
env.contains(key) || getSystemProperties().contains(key)
}
def remove(key: String): Boolean = throw new UnsupportedOperationException("read-only attributes")
def clear() = throw new UnsupportedOperationException("not implemented")
def keys: Iterator[String] = (getSystemProperties().keySet ++ env.keySet).iterator
def asMap(): Map[String, String] = throw new UnsupportedOperationException("not implemented")
def toConfigString = throw new UnsupportedOperationException("not implemented")
def subscribe(subscriber: Subscriber): SubscriptionKey = throw new UnsupportedOperationException("not implemented")
def copy(): ConfigMap = this
def copyInto[T <: ConfigMap](m: T) = m
def inheritFrom: Option[ConfigMap] = None
def inheritFrom_=(config: Option[ConfigMap]) = throw new UnsupportedOperationException("not implemented")
def dump(): String = throw new UnsupportedOperationException("not implemented")
}
| ezh/digi-configgy | src/main/scala/org/digimead/configgy/EnvironmentAttributes.scala | Scala | apache-2.0 | 3,434 |
package com.gu.pandomainauth
import java.util.concurrent.atomic.AtomicReference
import java.util.concurrent.{Executors, ScheduledExecutorService, TimeUnit}
import com.amazonaws.services.s3.AmazonS3
import com.gu.pandomainauth.model.PanDomainAuthSettings
import org.slf4j.LoggerFactory
import scala.language.postfixOps
/**
* PanDomainAuthSettingsRefresher will periodically refresh the pan domain settings and expose them via the "settings" method
*
* @param domain the domain you are authenticating against
* @param system the identifier for your app, typically the same as the subdomain your app runs on
* @param bucketName the bucket where the settings are stored
* @param settingsFileKey the name of the file that contains the private settings for the given domain
* @param s3Client the AWS S3 client that will be used to download the settings from the bucket
* @param scheduler optional scheduler that will be used to run the code that updates the bucket
*/
class PanDomainAuthSettingsRefresher(
val domain: String,
val system: String,
val bucketName: String,
settingsFileKey: String,
val s3Client: AmazonS3,
scheduler: ScheduledExecutorService = Executors.newScheduledThreadPool(1)
) {
private val logger = LoggerFactory.getLogger(this.getClass)
// This is deliberately designed to throw an exception during construction if we cannot immediately read the settings
private val authSettings: AtomicReference[PanDomainAuthSettings] = new AtomicReference[PanDomainAuthSettings](loadSettings() match {
case Right(settings) => PanDomainAuthSettings(settings)
case Left(err) => throw Settings.errorToThrowable(err)
})
scheduler.scheduleAtFixedRate(() => refresh(), 1, 1, TimeUnit.MINUTES)
def settings: PanDomainAuthSettings = authSettings.get()
private def loadSettings(): Either[SettingsFailure, Map[String, String]] = {
Settings.fetchSettings(settingsFileKey, bucketName, s3Client).flatMap(Settings.extractSettings)
}
private def refresh(): Unit = {
loadSettings() match {
case Right(settings) =>
logger.debug(s"Updated pan-domain settings for $domain")
authSettings.set(PanDomainAuthSettings(settings))
case Left(err) =>
logger.error(s"Failed to update pan-domain settings for $domain")
Settings.logError(err, logger)
}
}
}
| guardian/pan-domain-authentication | pan-domain-auth-core/src/main/scala/com/gu/pandomainauth/PanDomainAuthSettingsRefresher.scala | Scala | apache-2.0 | 2,354 |
package models.oauth2
import play.api.libs.ws.WS
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import play.api.libs.json.Json
import play.api.libs.json.JsObject
import scala.concurrent.Future
object GoogleOAuth2 extends OAuth2 {
// definido em:
// https://developers.google.com/
// https://console.developers.google.com/project
//
override val clientID = "---> COLOQUE O SEU CLIENT ID <---"
override val clientSecret = "---> COLOQUE O SEU CLIENT SECRET <---"
override val callbackURL = "http://localhost:9000/callback"
override val loginURL = s"https://accounts.google.com/o/oauth2/auth?client_id=$clientID&response_type=code&scope=openid email&redirect_uri=$callbackURL"
def usuario(code: String): Future[Option[Usuario]] = {
for (
optAccessToken <- obtenhaAccessToken(code);
optDados <- obtenhaDadosDoUsuario(optAccessToken);
optUsuario <- obtenhaUsuario(optDados)
) yield optUsuario
}
private def obtenhaUsuario(optDados: Option[JsObject]) = {
optDados match {
case None => Future.successful(None)
case Some(dados) => {
val nome = (dados \ "nome").as[String]
val at = (dados \ "at").as[String]
val pic = (dados \ "pic").as[String]
val email = (dados \ "email").as[String]
val chave = email // poderia ser uma informação codificada
Future.successful(Some(Usuario(chave, at, nome, pic, email)))
}
}
}
private def obtenhaDadosDoUsuario(optAccessToken: Option[String]): Future[Option[JsObject]] = {
optAccessToken match {
case None => Future.successful(None)
case Some(accessToken) => {
import play.api.Play.current // necessário por causa do WS
val url = s"https://www.googleapis.com/oauth2/v2/userinfo?access_token=$accessToken"
WS.url(url).get.map(resposta => {
val respJson = resposta.json
Some(Json.obj("at" -> accessToken, "nome" -> respJson \ "name", "pic" -> respJson \ "picture", "email" -> respJson \ "email"))
})
}
}
}
private def obtenhaAccessToken(code: String): Future[Option[String]] = {
import play.api.Play.current
val params = Map("code" -> Seq(code),
"client_id" -> Seq(clientID),
"client_secret" -> Seq(clientSecret),
"redirect_uri" -> Seq(callbackURL),
"grant_type" -> Seq("authorization_code"))
WS.url("https://accounts.google.com/o/oauth2/token").post(params).map(resposta => (resposta.json \ "access_token").asOpt[String])
}
} | w2srobinho/ine5646 | origem_destino/app/models/oauth2/GoogleOAuth2.scala | Scala | mit | 2,610 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.security
import java.util
import scala.collection.JavaConversions._
/**
* AuthorizationsProvider that wraps another provider and ensures that the auths returned do not exceed a pre-set list
*/
class FilteringAuthorizationsProvider (val wrappedProvider: AuthorizationsProvider)
extends AuthorizationsProvider {
private var filter: Option[Array[String]] = None
override def getAuthorizations: util.List[String] =
filter match {
case None => wrappedProvider.getAuthorizations
case Some(f) => wrappedProvider.getAuthorizations.intersect(f)
}
override def configure(params: java.util.Map[String, java.io.Serializable]): Unit = {
filter = AuthsParam.lookupOpt(params).filterNot(_.isEmpty).map(_.split(","))
wrappedProvider.configure(params)
}
}
| jahhulbert-ccri/geomesa | geomesa-security/src/main/scala/org/locationtech/geomesa/security/FilteringAuthorizationsProvider.scala | Scala | apache-2.0 | 1,288 |
class A
object A
| sbt/sbt | sbt-app/src/sbt-test/tests/bak/p1/src/config-9/scala/A.scala | Scala | apache-2.0 | 17 |
package com.github.bruneli.scalaopt.core.discrete
import com.github.bruneli.scalaopt.core.constraint.{CP, Constraint}
import com.github.bruneli.scalaopt.core.function.ObjectiveFunction
import com.github.bruneli.scalaopt.core.linalg.DenseVectorLike
import com.github.bruneli.scalaopt.core.variable.{ContinuousVariable, DiscreteVariable, Variable, Variables}
/**
* Mixed Integer Program
*
* @tparam A objective function type
* @tparam B constraint type
* @tparam R relaxation program type
* @author bruneli
*/
trait MIP[A <: ObjectiveFunction[Variable],
B <: Constraint[Variable],
R <: CP[ContinuousVariable, _, _]] extends CP[Variable, A, B] {
self =>
val matchingPrecision: Double
lazy val discreteVariableIndices = getDiscreteVariableIndices()
def variables: Variables[Variable]
def relaxationProgram: R
def adaptVariablesTo(solution: DenseVectorLike[Variable]): Variables[Variable] = {
variables.mapWithIndex((x, i) => solution.coordinate(i))
}
def acceptsAsFeasibleSolution(solution: DenseVectorLike[ContinuousVariable]): Boolean = {
discreteVariableIndices.forall(
i => isDiscrete(solution(i), variables(i).asInstanceOf[DiscreteVariable]))
}
private def isDiscrete(candidate: Variable,
variable: DiscreteVariable): Boolean = {
val dxLow = variable.floor(candidate).map(candidate.x - _.x)
val dxHigh = variable.ceil(candidate).map(_.x - candidate.x)
dxLow.exists(_ < matchingPrecision) || dxHigh.exists(_ < matchingPrecision)
}
private def getDiscreteVariableIndices(): Seq[Int] = {
for {(variable, index) <- variables.zipWithIndex
if variable.isInstanceOf[DiscreteVariable]} yield {
index
}
}
}
| bruneli/scalaopt | core/src/main/scala/com/github/bruneli/scalaopt/core/discrete/MIP.scala | Scala | apache-2.0 | 1,744 |
package com.nulabinc.backlog.r2b.exporter.core
import java.io.PrintStream
import com.google.inject.Guice
import com.nulabinc.backlog.migration.common.conf.ExcludeOption
import com.nulabinc.backlog.migration.common.domain.{BacklogProjectKey, BacklogTextFormattingRule}
import com.nulabinc.backlog.migration.common.dsl.ConsoleDSL
import com.nulabinc.backlog.migration.common.utils.{ConsoleOut, Logging}
import com.nulabinc.backlog.r2b.exporter.conf.ExportConfig
import com.nulabinc.backlog.r2b.exporter.modules.RedmineModule
import com.nulabinc.backlog.r2b.exporter.service.ProjectExporter
import com.nulabinc.backlog.r2b.mapping.core.MappingContainer
import com.nulabinc.backlog.r2b.redmine.conf.RedmineApiConfiguration
import com.osinka.i18n.Messages
import monix.eval.Task
import monix.execution.Scheduler
/**
* @author
* uchida
*/
object Boot extends Logging {
def execute(
apiConfig: RedmineApiConfiguration,
mappingContainer: MappingContainer,
backlogProjectKey: BacklogProjectKey,
backlogTextFormattingRule: BacklogTextFormattingRule,
exclude: ExcludeOption
)(implicit s: Scheduler, consoleDSL: ConsoleDSL[Task]): PrintStream = {
try {
val injector =
Guice.createInjector(
new RedmineModule(
apiConfig,
mappingContainer,
backlogProjectKey,
backlogTextFormattingRule,
ExportConfig(exclude)
)
)
ConsoleOut.println(
s"""
|${Messages("export.start")}
|--------------------------------------------------""".stripMargin
)
val projectExporter = injector.getInstance(classOf[ProjectExporter])
projectExporter.boot(mappingContainer)
ConsoleOut.println(s"""--------------------------------------------------
|${Messages("export.finish")}""".stripMargin)
} catch {
case e: Throwable =>
ConsoleOut.error(s"${Messages("cli.error.unknown")}:${e.getMessage}")
throw e
}
}
}
| nulab/BacklogMigration-Redmine | src/main/scala/com/nulabinc/backlog/r2b/exporter/core/Boot.scala | Scala | mit | 2,071 |
/**
* A simple text based RPG
*
* @package simplerpg
* @copyright 2015
*/
package simplerpg
case class NPC(name: String, health: Long, damage: Long) extends Character {
def isPlayerControlled(): Boolean = false
def askForCommands(): Array[String] = Array.empty[String]
}
| mcross1882/SimpleRPG | src/main/scala/simplerpg/player/NPC.scala | Scala | mit | 289 |
package cgta.otest
package runner
import sbt.testing.{Task, SubclassFingerprint, TaskDef, Logger, EventHandler}
import cgta.otest.runner.TestResults.{FailedFatalException, Ignored, FailedBad, Passed, FailedAssertion, FailedUnexpectedException}
import org.scalajs.testinterface.TestUtils
//////////////////////////////////////////////////////////////
// Copyright (c) 2014 Ben Jackman, Jeff Gomberg
// All Rights Reserved
// please contact [email protected] or [email protected]
// for licensing inquiries
// Created by bjackman @ 5/28/14 12:00 PM
//////////////////////////////////////////////////////////////
class OtestTask(
val taskDef: TaskDef,
tracker: TestResultTracker,
testClassLoader: ClassLoader) extends sbt.testing.Task {
override def tags(): Array[String] = Array()
def execute(eventHandler: EventHandler, loggers: Array[Logger], continuation: (Array[Task]) => Unit): Unit = {
continuation(execute(eventHandler, loggers))
}
override def execute(eventHandler: EventHandler, loggers: Array[Logger]): Array[Task] = {
tracker.begin()
val name = taskDef.fullyQualifiedName()
taskDef.fingerprint() match {
case fingerprint: SubclassFingerprint if fingerprint.superclassName() == FrameworkHelp.funSuiteName =>
if (fingerprint.isModule) {
TestUtils.loadModule(name, testClassLoader) match {
case m: FunSuite =>
runSuite(eventHandler, m, loggers)(taskDef)
case x =>
sys.error(s"Cannot test $taskDef of type: $x")
}
} else {
sys.error("FunSuite only works on objects, classes don't work.")
}
case _ =>
}
Array()
}
def runSuite(eventHandler: EventHandler, s: FunSuite, loggers: Array[Logger])(implicit taskDef: TaskDef) {
val st = tracker.newSuiteTracker(taskDef, eventHandler)
try {
val hasOnly = s.SuiteImpl.tests.exists(_.only)
for (test <- s.SuiteImpl.tests) {
runTest(test, st, hasOnly)
}
tracker.Suites.completed += 1
} finally {
st.logResults(s.SuiteImpl.simpleName, loggers)
}
}
def runTest(test: TestWrapper, st: TestResultTracker#SuiteTracker, hasOnly: Boolean)(implicit taskDef: TaskDef) = {
val startUtcMs = System.currentTimeMillis()
def durMs = System.currentTimeMillis() - startUtcMs
if (hasOnly && !test.only) {
st.addResult(Ignored(test.name, becauseOnly = true))
} else if (test.ignored) {
st.addResult(Ignored(test.name, becauseOnly = false))
} else {
try {
test.body()
st.addResult(if (test.bad) FailedBad(test.name, durMs) else Passed(test.name, durMs))
} catch {
case e: AssertionFailureException =>
st.addResult(if (test.bad) Passed(test.name, durMs) else FailedAssertion(test.name, e, durMs))
case e if CatchableThrowable(e) =>
st.addResult(FailedUnexpectedException(test.name, e, durMs))
case e: Throwable =>
st.addResult(FailedFatalException(test.name, e, durMs))
tracker.Suites.aborted += 1
throw e
}
}
}
} | cgta/otest | otest/shared/src/main/scala/cgta/otest/runner/OtestTask.scala | Scala | mit | 3,112 |
/*
* Copyright 2014 Frugal Mechanic (http://frugalmechanic.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fm.lazyseq
/**
* Used for LazySeq.++
*/
final private class AppendedLazySeq[A, B >: A](left: LazySeq[A], right: LazySeq[B]) extends LazySeq[B] {
final def foreach[U](f: B => U): Unit = {
left foreach f
right foreach f
}
}
| frugalmechanic/fm-lazyseq | src/main/scala/fm/lazyseq/AppendedLazySeq.scala | Scala | apache-2.0 | 872 |
package tutorial.webapp
import org.scalajs.dom
import dom.document
import org.scalajs.jquery.jQuery
import scala.scalajs.js.annotation.JSExportTopLevel
object TutorialApp {
/*def main(args: Array[String]): Unit = {
appendPar(document.body, "Hello, World")
}*/
def appendPar(targetNode: dom.Node, text: String): Unit = {
val parNode = document.createElement("p")
val textNode = document.createTextNode(text)
parNode.appendChild(textNode)
targetNode.appendChild(parNode)
}
@JSExportTopLevel("addClickedMessage")
def addClickedMessage(): Unit = {
//appendPar(document.body, "You clicked the button!")
jQuery("body").append("<p>[message]</p>")
}
}
| abdheshkumar/scalajs-practices | client/src/main/scala/tutorial/webapp/TutorialApp.scala | Scala | apache-2.0 | 690 |
package io.scalajs.npm.mongodb
import scala.scalajs.js
import scala.scalajs.js.annotation.JSGlobal
/**
* Unordered Bulk Operation
* @author [email protected]
*/
@js.native
@JSGlobal("UnorderedBulkOperation")
class UnorderedBulkOperation() extends js.Object {
/**
* Execute the ordered bulk operation
* @param options Optional settings.
* @param callback The command result callback
* @example execute(options, callback)
*/
def execute(options: js.Any, callback: js.Function): Unit = js.native
/**
* Initiate a find operation for an update/updateOne/remove/removeOne/replaceOne
* @param selector The selector for the bulk operation.
* @throws MongoError ()
* @example find(selector)
*/
def find(selector: js.Any): Unit = js.native
/**
* Add a single insert document to the bulk operation
* @param doc the document to insert
* @throws MongoError ()
* @example insert(doc)
*/
def insert(doc: js.Any): Unit = js.native
}
| scalajs-io/mongodb | src/main/scala/io/scalajs/npm/mongodb/UnorderedBulkOperation.scala | Scala | apache-2.0 | 1,011 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package xml
package factory
import parsing.ValidatingMarkupHandler
/**
* @author Burak Emir
*/
abstract class Binder(val preserveWS: Boolean) extends ValidatingMarkupHandler {
var result: NodeBuffer = new NodeBuffer()
def reportSyntaxError(pos: Int, str: String) = {}
final def procInstr(pos: Int, target: String, txt: String) =
ProcInstr(target, txt)
final def comment(pos: Int, txt: String) =
Comment(txt)
final def entityRef(pos: Int, n: String) =
EntityRef(n)
final def text(pos: Int, txt: String) =
Text(txt)
final def traverse(n: Node): Unit = n match {
case x: ProcInstr =>
result &+ procInstr(0, x.target, x.text)
case x: Comment =>
result &+ comment(0, x.text)
case x: Text =>
result &+ text(0, x.data)
case x: EntityRef =>
result &+ entityRef(0, x.entityName)
case x: Elem =>
elemStart(0, x.prefix, x.label, x.attributes, x.scope)
val old = result
result = new NodeBuffer()
for (m <- x.child) traverse(m)
result = old &+ elem(0, x.prefix, x.label, x.attributes, x.scope, x.minimizeEmpty, NodeSeq.fromSeq(result)).toList
elemEnd(0, x.prefix, x.label)
}
final def validate(n: Node): Node = {
this.rootLabel = n.label
traverse(n)
result(0)
}
}
| som-snytt/scala-xml | src/main/scala/scala/xml/factory/Binder.scala | Scala | bsd-3-clause | 1,832 |
package actor.actor_mes
import scala.actors.Actor
/**
* Created by fqc on 2016/7/27.
*/
object Server extends Actor {
override def act(): Unit = {
loop(react {
case Person(name, age) => {
println(s"name = $name ,age = $age")
sender ! "copy that" //通过发送者的句柄发送消息
}
case _ => println("unknow") //需要有一个默认的接收,否则actor邮箱中会出现堆满消息的可能,得不到处理
})
}
}
| fqc/Scala_sidepro | src/actor/actor_mes/Server.scala | Scala | mit | 478 |
/*
* Copyright (C) 2005, The OpenURP Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openurp.std.register.model
import java.time.Instant
import org.beangle.data.model.LongId
import org.beangle.data.model.pojo.Remark
import org.openurp.code.std.model.{UncheckinReason, UnregisteredReason}
import org.openurp.base.edu.model.Semester
import org.openurp.base.edu.model.Student
class Register extends LongId with Remark {
var std: Student = _
var semester: Semester = _
var registerAt: Option[Instant] = None
var registered: Option[Boolean] = None
var checkin: Option[Boolean] = None
/** 是否缴费 */
var tuitionPaid: Option[Boolean] = None
/** 未注册原因 */
var unregisteredReason: Option[UnregisteredReason] = None
/** 未报到原因 */
var uncheckinReason: Option[UncheckinReason] = None
/** 操作人 */
var operateBy: String = _
/** 操作ip */
var operateIp: String = _
}
| openurp/api | std/src/main/scala/org/openurp/std/register/model/Register.scala | Scala | lgpl-3.0 | 1,574 |
package dsmoq.exceptions
/**
* 入力チェックに違反した場合に送出する例外
*
* @param errors エラーメッセージのリスト
*/
class InputValidationException(errors: Iterable[(String, String)]) extends RuntimeException {
val validationErrors = errors
/**
* エラーメッセージを取得する
*/
def getErrorMessage(): Iterable[InputValidationError] = {
validationErrors.map {
case (name, message) => {
InputValidationError(
name = name,
message = message
)
}
}
}
}
/**
* 入力チェックエラーのケースクラス
*
* @param name たいしょうのなまえ
* @param message エラーメッセージ
*/
case class InputValidationError(
name: String,
message: String
)
| nkawa/dsmoq | server/apiServer/src/main/scala/dsmoq/exceptions/InputValidationException.scala | Scala | apache-2.0 | 788 |
package org.scalaide.refactoring.internal
import org.eclipse.ltk.ui.refactoring.RefactoringWizard
/**
* Wraps the `ScalaIdeRefactoring` instance in a wizard and adds
* the pages from the refactoring to the wizard.
*/
class ScalaRefactoringWizard(
refactoring: ScalaIdeRefactoring,
flags: Int = RefactoringWizard.DIALOG_BASED_USER_INTERFACE)
extends RefactoringWizard(refactoring, flags) {
def addUserInputPages(): Unit = {
refactoring.getPages foreach addPage
}
}
| Kwestor/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/refactoring/internal/ScalaRefactoringWizard.scala | Scala | bsd-3-clause | 494 |
package org.mitlware
import scalaz._
import scalaz.MonadState._
// TODO: replace parametric views with proper lenses
trait OrderView[T] {
def getOrder : OrderImpl[T]
def setOrder( order : OrderImpl[T] ) : this.type
}
trait OrderImpl[T] {
def better( x : T, y : T ) : T
}
object Order {
def better[T,Env <: OrderView[T]]( x : T, y : T ) : State[Env,T] = State { s => (s,s.getOrder.better(x,y)) }
} | MitLware/MitLware-scala | src/org/mitlware/Order.scala | Scala | bsd-3-clause | 411 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime
import java.io.File
import Predef.{ any2stringadd => _, _ => _ }
import org.ensime.api._
import org.ensime.util.file._
package object config {
implicit class RichEnsimeConfig(val c: EnsimeConfig) extends AnyVal {
def scalaSourceFiles: Set[File] = for {
module: EnsimeModule <- c.modules.values.toSet
root <- module.sourceRoots
file <- root.tree
if file.isFile && file.getName.endsWith(".scala")
} yield file
}
}
| d6y/ensime-server | core/src/main/scala/org/ensime/config/package.scala | Scala | gpl-3.0 | 600 |
package qidong.statics
import org.scalatest.FunSuite
import scala.concurrent.Future
import util.Try
class AutoFlattenEffectTest extends FunSuite {
import qidong.pipeline.ops._
test("") {
}
}
| chenharryhua/qidong | src/test/scala/qidong/statics/AutoFlattenEffectTest.scala | Scala | apache-2.0 | 202 |
package org.jetbrains.plugins.scala
package lang
package psi
package types
import com.intellij.ide.highlighter.JavaFileType
import com.intellij.psi._
import com.intellij.psi.util.MethodSignatureUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameters
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunction, ScTypeAlias, ScTypeAliasDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypedDefinition
import org.jetbrains.plugins.scala.lang.psi.types.api.{Any, TypeSystem}
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.{TypeParameter, TypeParameterExt}
import scala.collection.mutable.ArrayBuffer
case class TypeAliasSignature(name: String, typeParams: List[TypeParameter], lowerBound: ScType,
upperBound: ScType, isDefinition: Boolean, ta: ScTypeAlias) {
def this(ta: ScTypeAlias) {
this(ta.name, ta.typeParameters.map(new TypeParameter(_)).toList, ta.lowerBound.getOrNothing,
ta.upperBound.getOrAny, ta.isDefinition, ta)
}
def updateTypes(fun: ScType => ScType, withCopy: Boolean = true): TypeAliasSignature = {
def updateTypeParam(tp: TypeParameter): TypeParameter = {
new TypeParameter(tp.name, tp.typeParams.map(updateTypeParam), {
val res = fun(tp.lowerType())
() => res
}, {
val res = fun(tp.upperType())
() => res
}, tp.ptp)
}
val res = TypeAliasSignature(name, typeParams.map(updateTypeParam), fun(lowerBound), fun(upperBound), isDefinition, ta)
if (withCopy) res.copy(ta = ScTypeAlias.getCompoundCopy(res, ta))
else res
}
def updateTypesWithVariance(fun: (ScType, Int) => ScType, variance: Int, withCopy: Boolean = true): TypeAliasSignature = {
def updateTypeParam(tp: TypeParameter): TypeParameter = {
new TypeParameter(tp.name, tp.typeParams.map(updateTypeParam), () => fun(tp.lowerType(), variance),
() => fun(tp.upperType(), -variance), tp.ptp)
}
val res = TypeAliasSignature(name, typeParams.map(updateTypeParam), fun(lowerBound, variance),
fun(upperBound, -variance), isDefinition, ta)
if (withCopy) res.copy(ta = ScTypeAlias.getCompoundCopy(res, ta))
else res
}
def canEqual(other: Any): Boolean = other.isInstanceOf[TypeAliasSignature]
override def equals(other: Any): Boolean = other match {
case that: TypeAliasSignature =>
(that canEqual this) &&
name == that.name &&
typeParams == that.typeParams &&
lowerBound == that.lowerBound &&
upperBound == that.upperBound &&
isDefinition == that.isDefinition
case _ => false
}
override def hashCode(): Int = {
val state = Seq(name, typeParams, lowerBound, upperBound, isDefinition)
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
def getType = ta match {
case definition: ScTypeAliasDefinition => definition.aliasedType.toOption
case _ => None
}
}
class Signature(val name: String, private val typesEval: List[Seq[() => ScType]], val paramLength: List[Int],
private val tParams: Array[TypeParameter], val substitutor: ScSubstitutor,
val namedElement: PsiNamedElement, val hasRepeatedParam: Seq[Int] = Seq.empty)
(implicit val typeSystem: TypeSystem) {
def this(name: String, stream: Seq[() => ScType], paramLength: Int, substitutor: ScSubstitutor,
namedElement: PsiNamedElement)
(implicit typeSystem: TypeSystem) =
this(name, List(stream), List(paramLength), Array.empty, substitutor, namedElement)
private def types: List[Seq[() => ScType]] = typesEval
def substitutedTypes: List[Seq[() => ScType]] = types.map(_.map(f => () => substitutor.subst(f())))
def typeParams: Array[TypeParameter] = tParams.map(_.update(substitutor.subst))
def equiv(other: Signature): Boolean = {
def fieldCheck(other: Signature): Boolean = {
def isField(s: Signature) = s.namedElement.isInstanceOf[PsiField]
!isField(this) ^ isField(other)
}
ScalaPsiUtil.convertMemberName(name) == ScalaPsiUtil.convertMemberName(other.name) &&
((typeParams.length == other.typeParams.length && paramTypesEquiv(other)) ||
(paramLength == other.paramLength && javaErasedEquiv(other))) && fieldCheck(other)
}
def javaErasedEquiv(other: Signature): Boolean = {
(this, other) match {
case (ps1: PhysicalSignature, ps2: PhysicalSignature) if ps1.isJava && ps2.isJava =>
val psiSub1 = ScalaPsiUtil.getPsiSubstitutor(ps1.substitutor, ps1.method.getProject, ps1.method.getResolveScope)
val psiSub2 = ScalaPsiUtil.getPsiSubstitutor(ps2.substitutor, ps2.method.getProject, ps2.method.getResolveScope)
val psiSig1 = ps1.method.getSignature(psiSub1)
val psiSig2 = ps2.method.getSignature(psiSub2)
MethodSignatureUtil.METHOD_PARAMETERS_ERASURE_EQUALITY.equals(psiSig1, psiSig2)
case _ => false
}
}
def paramTypesEquiv(other: Signature): Boolean = {
paramTypesEquivExtended(other, new ScUndefinedSubstitutor, falseUndef = true)._1
}
def paramTypesEquivExtended(other: Signature, uSubst: ScUndefinedSubstitutor,
falseUndef: Boolean): (Boolean, ScUndefinedSubstitutor) = {
import org.jetbrains.plugins.scala.lang.psi.types.Signature._
var undefSubst = uSubst
if (paramLength != other.paramLength && !(paramLength.sum == 0 && other.paramLength.sum == 0)) return (false, undefSubst)
if (hasRepeatedParam != other.hasRepeatedParam) return (false, undefSubst)
val unified1 = unify(substitutor, typeParams, typeParams)
val unified2 = unify(other.substitutor, typeParams, other.typeParams)
val clauseIterator = substitutedTypes.iterator
val otherClauseIterator = other.substitutedTypes.iterator
while (clauseIterator.hasNext && otherClauseIterator.hasNext) {
val clause1 = clauseIterator.next()
val clause2 = otherClauseIterator.next()
val typesIterator = clause1.iterator
val otherTypesIterator = clause2.iterator
while (typesIterator.hasNext && otherTypesIterator.hasNext) {
val t1 = typesIterator.next()
val t2 = otherTypesIterator.next()
val tp2 = unified2.subst(t2())
val tp1 = unified1.subst(t1())
var t = tp2.equiv(tp1, undefSubst, falseUndef)
if (!t._1 && tp1.equiv(api.AnyRef) && this.isJava) {
t = tp2.equiv(Any, undefSubst, falseUndef)
}
if (!t._1 && tp2.equiv(api.AnyRef) && other.isJava) {
t = Any.equiv(tp1, undefSubst, falseUndef)
}
if (!t._1) {
return (false, undefSubst)
}
undefSubst = t._2
}
}
(true, undefSubst)
}
override def equals(that: Any) = that match {
case s: Signature => equiv(s) && parameterlessKind == s.parameterlessKind
case _ => false
}
def parameterlessKind: Int = {
namedElement match {
case f: ScFunction if !f.hasParameterClause => 1
case p: PsiMethod => 2
case _ => 3
}
}
override def hashCode: Int = {
simpleHashCode * 31 + parameterlessKind
}
/**
* Use it, while building class hierarchy.
* Because for class hierarch def foo(): Int is the same thing as def foo: Int and val foo: Int.
*/
def simpleHashCode: Int = {
ScalaPsiUtil.convertMemberName(name).hashCode
}
def isJava: Boolean = false
def parameterlessCompatible(other: Signature): Boolean = {
(namedElement, other.namedElement) match {
case (f1: ScFunction, f2: ScFunction) =>
!f1.hasParameterClause ^ f2.hasParameterClause
case (f1: ScFunction, p: PsiMethod) => f1.hasParameterClause
case (p: PsiMethod, f2: ScFunction) => f2.hasParameterClause
case (p1: PsiMethod, p2: PsiMethod) => true
case (p: PsiMethod, _) => false
case (_, f: ScFunction) => !f.hasParameterClause
case (_, f: PsiMethod) => false
case _ => true
}
}
}
object Signature {
def apply(function: ScFunction)(implicit typeSystem: TypeSystem) = new Signature(
function.name,
PhysicalSignature.typesEval(function),
PhysicalSignature.paramLength(function),
TypeParameter.fromArray(function.getTypeParameters),
ScSubstitutor.empty,
function,
PhysicalSignature.hasRepeatedParam(function)
)
def getter(definition: ScTypedDefinition)(implicit typeSystem: TypeSystem) = new Signature(
definition.name,
Seq.empty,
0,
ScSubstitutor.empty,
definition
)
def setter(definition: ScTypedDefinition)(implicit typeSystem: TypeSystem) = new Signature(
s"$definition.name_=",
Seq(() => definition.getType().getOrAny),
1,
ScSubstitutor.empty,
definition
)
def unify(subst: ScSubstitutor, tps1: Array[TypeParameter], tps2: Array[TypeParameter]) = {
var res = subst
val iterator1 = tps1.iterator
val iterator2 = tps2.iterator
while (iterator1.hasNext && iterator2.hasNext) {
val (tp1, tp2) = (iterator1.next(), iterator2.next())
res = res bindT(tp2.nameAndId, tp1.toType)
}
res
}
}
import com.intellij.psi.PsiMethod
object PhysicalSignature {
def typesEval(method: PsiMethod): List[Seq[() => ScType]] = method match {
case fun: ScFunction =>
fun.effectiveParameterClauses.map(clause => ScalaPsiUtil.mapToLazyTypesSeq(clause.effectiveParameters)).toList
case _ => List(ScalaPsiUtil.mapToLazyTypesSeq(method.getParameterList match {
case p: ScParameters => p.params
case p => p.getParameters.toSeq
}))
}
def paramLength(method: PsiMethod): List[Int] = method match {
case fun: ScFunction => fun.effectiveParameterClauses.map(_.effectiveParameters.length).toList
case _ => List(method.getParameterList.getParametersCount)
}
def hasRepeatedParam(method: PsiMethod): Seq[Int] = {
method.getParameterList match {
case p: ScParameters =>
val params = p.params
val res = new ArrayBuffer[Int]()
var i = 0
while (i < params.length) {
if (params(i).isRepeatedParameter) res += i
i += 1
}
res
case p =>
val parameters = p.getParameters
if (parameters.isEmpty) return Seq.empty
if (parameters(parameters.length - 1).isVarArgs) return Seq(parameters.length - 1)
Seq.empty
}
}
def unapply(signature: PhysicalSignature): Option[(PsiMethod, ScSubstitutor)] = {
Some(signature.method, signature.substitutor)
}
}
class PhysicalSignature(val method: PsiMethod, override val substitutor: ScSubstitutor)
(implicit override val typeSystem: TypeSystem)
extends Signature(method.name, PhysicalSignature.typesEval(method), PhysicalSignature.paramLength(method),
TypeParameter.fromArray(method.getTypeParameters), substitutor, method, PhysicalSignature.hasRepeatedParam(method)) {
override def isJava = method.getLanguage == JavaFileType.INSTANCE.getLanguage
} | katejim/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/types/Signature.scala | Scala | apache-2.0 | 11,036 |
package fpinscala
package monads
import parsing._
import testing._
import parallelism._
import state._
import parallelism.Par._
trait Functor[F[_]] {
def map[A,B](fa: F[A])(f: A => B): F[B]
def distribute[A,B](fab: F[(A, B)]): (F[A], F[B]) =
(map(fab)(_._1), map(fab)(_._2))
def codistribute[A,B](e: Either[F[A], F[B]]): F[Either[A, B]] = e match {
case Left(fa) => map(fa)(Left(_))
case Right(fb) => map(fb)(Right(_))
}
}
object Functor {
val listFunctor = new Functor[List] {
def map[A,B](as: List[A])(f: A => B): List[B] = as map f
}
}
trait Monad[M[_]] extends Functor[M] { self =>
def unit[A](a: => A): M[A]
def flatMap[A,B](ma: M[A])(f: A => M[B]): M[B]
def map[A,B](ma: M[A])(f: A => B): M[B] =
flatMap(ma)(a => unit(f(a)))
def map2[A,B,C](ma: M[A], mb: M[B])(f: (A, B) => C): M[C] =
flatMap(ma)(a => map(mb)(b => f(a, b)))
def sequence[A](lma: List[M[A]]): M[List[A]] =
lma.foldRight(unit(List[A]()))((ma, mla) => map2(ma, mla)(_ :: _))
def traverse[A,B](la: List[A])(f: A => M[B]): M[List[B]] =
la.foldRight(unit(List[B]()))((a, mlb) => map2(f(a), mlb)(_ :: _))
def replicateM[A](n: Int, ma: M[A]): M[List[A]] =
// sequence(List.fill(n)(ma))
// if (n <= 0) unit(List[A]()) else map2(ma, replicateM(n - 1, ma))(_ :: _)
sequence(List.fill(n)(ma))
def product[A,B](ma: M[A], mb: M[B]): M[(A, B)] = map2(ma, mb)((_, _))
// def filterM[A](la: List[A])(f: A => M[Boolean]): M[List[A]] = {
// map2(unit(la), sequence(la.map(f))) { (as,bs) =>
// (as zip bs) filter(_._2) map(_._1)
// }
// }
// translated from http://hackage.haskell.org/package/base-4.7.0.1/docs/src/Control-Monad.html
def filterM[A](la: List[A])(f: A => M[Boolean]): M[List[A]] = la match {
case Nil => unit(Nil)
case x :: xs => for {
flg <- f(x)
ys <- filterM(xs)(f)
} yield if (flg) x::ys else ys
}
def compose[A,B,C](f: A => M[B], g: B => M[C]): A => M[C] =
(a: A) => flatMap(f(a))(g)
def composeViaJoinAndMap[A,B,C](f: A => M[B], g: B => M[C]): A => M[C] =
(a: A) => join(map(f(a))(g))
// Implement in terms of `compose`:
def flatMapViaCompose[A,B](ma: M[A])(f: A => M[B]): M[B] =
// any dummy input value will do here
// compose((_:Unit) => ma, f)(())
compose((_:Boolean) => ma, f)(true)
def flatMapViaJoinAndMap[A,B](ma: M[A])(f: A => M[B]): M[B] =
join(map(ma)(f))
def join[A](mma: M[M[A]]): M[A] =
mma.flatMap(ma => ma)
implicit def toMonadOps[A](ma: M[A]): MonadOps[A] = MonadOps[A](ma)
case class MonadOps[A](ma: M[A]) {
def unit(a: => A) = self.unit(a)
def flatMap[B](f: A => M[B]) = self.flatMap(ma)(f)
def map[B](f: A => B) = self.map(ma)(f)
}
}
object Monad {
val genMonad = new Monad[Gen] {
def unit[A](a: => A): Gen[A] = Gen.unit(a)
override def flatMap[A,B](ma: Gen[A])(f: A => Gen[B]): Gen[B] =
ma flatMap f
}
val parMonad: Monad[Par] = new Monad[Par] {
override def unit[A](a: => A): Par[A] = Par.unit(a)
override def flatMap[A,B](ma: Par[A])(f: A => Par[B]): Par[B] =
// ma flatMap f
Par.flatMap(ma)(f)
}
def parserMonad[P[+_]](p: Parsers[P]): Monad[P] = new Monad[P] {
import p._
override def unit[A](a: => A): P[A] = p.succeed(a)
override def flatMap[A,B](ma: P[A])(f: A => P[B]): P[B] =
// ma flatMap(f)
p.flatMap(ma)(f)
}
val optionMonad: Monad[Option] = new Monad[Option] {
override def unit[A](a: => A): Option[A] = Option(a)
override def flatMap[A,B](ma: Option[A])(f: A => Option[B]): Option[B] =
ma flatMap f
}
val streamMonad: Monad[Stream] = new Monad[Stream] {
override def unit[A](a: => A): Stream[A] = Stream(a)
override def flatMap[A,B](ma: Stream[A])(f: A => Stream[B]): Stream[B] =
ma flatMap f
}
val listMonad: Monad[List] = new Monad[List] {
override def unit[A](a: => A): List[A] = List(a)
override def flatMap[A,B](ma: List[A])(f: A => List[B]): List[B] =
ma flatMap f
}
def stateMonad[S] = new Monad[({type lambda[x] = State[S, x]})#lambda] {
def unit[A](a: => A): State[S, A] = State(s => (a, s))
override def flatMap[A,B](st: State[S, A])(f: A => State[S, B]): State[S, B] =
st flatMap f
}
val idMonad: Monad[Id] = new Monad[Id] {
def unit[A](a: => A) = Id(a)
override def flatMap[A,B](ida: Id[A])(f: A => Id[B]): Id[B] = ida flatMap f
}
def getState[S]: State[S,S] = State(s => (s,s))
def setState[S](s: S): State[S,Unit] = State(_ => ((),s))
val F = stateMonad[Int]
def zipWithIndex[A](as: List[A]): List[(Int,A)] =
as.foldLeft(F.unit(List[(Int, A)]()))((acc,a) => for {
xs <- acc
n <- getState
_ <- setState(n + 1)
} yield (n, a) :: xs).run(0)._1.reverse
def readerMonad[R] = new Monad[({type f[x] = Reader[R,x]})#f] {
def unit[A](a: => A): Reader[R,A] = Reader(_ => a)
override def flatMap[A,B](st: Reader[R,A])(f: A => Reader[R,B]): Reader[R,B] =
Reader(r => f(st.run(r)).run(r))
}
}
case class Id[A](value: A) {
def map[B](f: A => B): Id[B] = Id(f(value))
def flatMap[B](f: A => Id[B]): Id[B] = f(value)
}
case class Reader[R, A](run: R => A)
object Reader {
def ask[R]: Reader[R, R] = Reader(r => r)
}
| fpinscala-muc/fpinscala-abo64 | exercises/src/main/scala/fpinscala/monads/Monad.scala | Scala | mit | 5,251 |
/*
*************************************************************************************
* Copyright 2016 Normation SAS
*************************************************************************************
*
* This file is part of Rudder.
*
* Rudder is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU General Public License version 3, the copyright holders add
* the following Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU General
* Public License version 3, when you create a Related Module, this
* Related Module is not considered as a part of the work and may be
* distributed under the license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* Rudder is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Rudder. If not, see <http://www.gnu.org/licenses/>.
*
*************************************************************************************
*/
package com.normation.rudder.repository.jdbc
import com.normation.BoxSpecMatcher
import com.normation.rudder.db.DB
import com.normation.rudder.domain.nodes.NodeGroupCategoryId
import com.normation.rudder.domain.nodes.NodeGroupId
import com.normation.rudder.domain.policies.Rule
import com.normation.rudder.db.DBCommon
import com.normation.rudder.repository.FullNodeGroupCategory
import com.normation.rudder.services.eventlog.HistorizationServiceImpl
import com.normation.rudder.services.policies.NodeConfigData
import org.junit.runner.RunWith
import org.specs2.mutable._
import org.specs2.runner.JUnitRunner
/**
*
* Test on database.
*
*/
@RunWith(classOf[JUnitRunner])
class HistorizationRepositoryTest extends DBCommon with BoxSpecMatcher {
lazy val repos = new HistorizationJdbcRepository(doobie)
lazy val service = new HistorizationServiceImpl(repos)
sequential
"Basic add and close for nodes" should {
"found nothing at begining" in {
repos.getAllOpenedNodes must haveSize[Seq[DB.SerializedNodes[Long]]](0)
}
"be able to add and found" in {
val op1 = repos.updateNodes(Seq(NodeConfigData.node1), Seq())
val op2 = repos.getAllOpenedNodes
(op1 === ()) and (op2.size === 1) and (op2.head.nodeId === "node1")
}
"be able to close and found new ones" in {
val op1 = service.updateNodes(Set(NodeConfigData.node2)).openOrThrowException("that test should not throw")
val op2 = repos.getAllOpenedNodes
(op1 === ()) and (op2.size === 1) and (op2.head.nodeId === "node2")
}
"check that policy servers are ignored (not sure why)" in {
val op1 = service.updateNodes(Set(NodeConfigData.root)).openOrThrowException("that test should not throw")
val op2 = repos.getAllOpenedNodes
(op1 === ()) and (op2.size === 0)
}
}
"Basic add and close for groups" should {
//build a full category based on the groups id from NodeConfigDate
def buildCategory(groups: List[NodeGroupId]) = FullNodeGroupCategory(NodeGroupCategoryId("test_root"), "", "", Nil
, groups.map(g => NodeConfigData.fullRuleTargetInfos.getOrElse(g, throw new Exception(s"Missing group with ID '${g}' in NodeConfigDate, for tests")))
)
"found nothing at begining" in {
repos.getAllOpenedGroups() must haveSize[Seq[(DB.SerializedGroups[Long], Seq[DB.SerializedGroupsNodes])]](0)
}
"be able to add and found" in {
val op1 = repos.updateGroups(Seq(NodeConfigData.g1), Seq())
val op2 = repos.getAllOpenedGroups()
(op1 === ()) and (op2.size === 1) and (op2.head._1.groupId === "1")
}
"be able to close and found new ones" in {
val op1 = service.updateGroups(buildCategory(NodeConfigData.g2.id :: NodeConfigData.g3.id :: Nil)).openOrThrowException("that test should not throw")
val op2 = repos.getAllOpenedGroups()
(op1 === ()) and (op2.size === 2) and (op2.head._1.groupId === "2")
}
}
"Basic add and close for directives" should {
"found nothing at begining" in {
repos.getAllOpenedDirectives() must haveSize[Seq[DB.SerializedDirectives[Long]]](0)
}
"be able to add and found" in {
val op1 = repos.updateDirectives(Seq((NodeConfigData.d1, NodeConfigData.fat1.toActiveTechnique, NodeConfigData.t1)), Seq())
val op2 = repos.getAllOpenedDirectives()
(op1 === ()) and (op2.size === 1) and (op2.head.directiveId === "d1")
}
"be able to close and found new ones" in {
val op1 = service.updateDirectiveNames(NodeConfigData.directives).openOrThrowException("that test should not throw")
val op2 = repos.getAllOpenedDirectives()
(op1 === ()) and (op2.size === 2) and (op2.sortBy(_.directiveId).last.directiveId === "d2")
}
}
"Basic add and close for rules" should {
"found nothing at begining" in {
repos.getAllOpenedRules() must haveSize[Seq[Rule]](0)
}
"be able to add and found" in {
val op1 = repos.updateRules(Seq(NodeConfigData.r1), Seq())
val op2 = repos.getAllOpenedRules()
(op1 === ()) and (op2.size === 1) and (op2.head.id.value === "r1")
}
"be able to close and found new ones" in {
val op1 = service.updatesRuleNames(NodeConfigData.r2 :: Nil).openOrThrowException("that test should not throw")
val op2 = repos.getAllOpenedRules()
(op1 === ()) and (op2.size === 1) and (op2.head.id.value === "r2")
}
}
}
| armeniaca/rudder | rudder-core/src/test/scala/com/normation/rudder/repository/jdbc/HistorizationRepositoryTest.scala | Scala | gpl-3.0 | 6,117 |
package com.scalaAsm.x86
package Instructions
package x87
// Description: Load Constant +1.0
// Category: general/ldconst
trait FLD1 extends InstructionDefinition {
val mnemonic = "FLD1"
}
object FLD1 extends ZeroOperands[FLD1] with FLD1Impl
trait FLD1Impl extends FLD1 {
implicit object _0 extends NoOp{
val opcode: OneOpcode = 0xD9 /+ 5
override def hasImplicitOperand = true
}
}
| bdwashbu/scala-x86-inst | src/main/scala/com/scalaAsm/x86/Instructions/x87/FLD1.scala | Scala | apache-2.0 | 404 |
package uk.gov.gds.ier.transaction.ordinary.address
import com.google.inject.{Inject, Singleton}
import uk.gov.gds.ier.config.Config
import uk.gov.gds.ier.model.{
Addresses,
PossibleAddress}
import uk.gov.gds.ier.security.EncryptionService
import uk.gov.gds.ier.serialiser.JsonSerialiser
import uk.gov.gds.ier.service.{ScotlandService, AddressService}
import uk.gov.gds.ier.step.{GoTo, OrdinaryStep, Routes}
import uk.gov.gds.ier.validation.ErrorTransformForm
import uk.gov.gds.ier.transaction.ordinary.{OrdinaryControllers, InprogressOrdinary}
import uk.gov.gds.ier.controller.routes.ExitController
import uk.gov.gds.ier.assets.RemoteAssets
@Singleton
class AddressSelectStep @Inject() (
val serialiser: JsonSerialiser,
val config: Config,
val encryptionService: EncryptionService,
val addressService: AddressService,
val scotlandService: ScotlandService,
val remoteAssets: RemoteAssets,
val ordinary: OrdinaryControllers
) extends OrdinaryStep
with AddressSelectMustache
with AddressForms {
val validation = addressForm
val routing = Routes(
get = routes.AddressSelectStep.get,
post = routes.AddressSelectStep.post,
editGet = routes.AddressSelectStep.editGet,
editPost = routes.AddressSelectStep.editPost
)
def nextStep(currentState: InprogressOrdinary) = {
currentState.address.map(_.postcode) match {
case Some(postcode) if postcode.trim.toUpperCase.startsWith("BT") => GoTo (ExitController.northernIreland)
case _ => ordinary.OtherAddressStep
}
}
override val onSuccess = TransformApplication { currentState =>
val addressWithAddressLine = currentState.address.map {
addressService.fillAddressLine(_)
}
val currentStateNEW = currentState.copy(
address = addressWithAddressLine,
possibleAddresses = None
)
//Before moving on, check the noDOB range option is accurate based on this new address selected
//Wipe the noDOB range option selected if it is NOT acceptable for this new address chosen
//(ie. the noDOB option must be of a selection appropriate for this address's country)
if(scotlandService.resetNoDOBRange(currentState)) {
val currentStateRESET = currentStateNEW.copy(
dob = None
)
currentStateRESET
}
else currentStateNEW
} andThen GoToNextIncompleteStep()
}
| alphagov/ier-frontend | app/uk/gov/gds/ier/transaction/ordinary/address/AddressSelectStep.scala | Scala | mit | 2,352 |
package org.broadinstitute.dsde.vault.datamanagement.controller
case class EntityType(databaseKey: String, endpoint: String)
object EntityType {
val UNMAPPED_BAM = EntityType("unmappedBAM", "ubam")
val ANALYSIS = EntityType("analysis", "analyses")
val UBAM_COLLECTION = EntityType("uBAMCollection", "ubamcollection")
val TYPES = Seq(UNMAPPED_BAM, ANALYSIS, UBAM_COLLECTION)
}
| broadinstitute/vault-datamanagement | src/main/scala/org/broadinstitute/dsde/vault/datamanagement/controller/EntityType.scala | Scala | bsd-3-clause | 386 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gearpump.cluster.worker
import java.io.File
import scala.sys.process.Process
import com.typesafe.config.Config
import org.slf4j.{Logger, LoggerFactory}
import org.apache.gearpump.cluster.scheduler.Resource
import org.apache.gearpump.util.{ProcessLogRedirector, RichProcess}
/**
* CGroupProcessLauncher is used to launch a process for Executor with CGroup.
* For more details, please refer to
* http://gearpump.apache.org/releases/latest/deployment-resource-isolation.html
*/
class CGroupProcessLauncher(val config: Config) extends ExecutorProcessLauncher {
private val APP_MASTER = -1
private val cgroupManager: Option[CGroupManager] = CGroupManager.getInstance(config)
private val LOG: Logger = LoggerFactory.getLogger(getClass)
override def cleanProcess(appId: Int, executorId: Int): Unit = {
if (executorId != APP_MASTER) {
cgroupManager.foreach(_.shutDownExecutor(appId, executorId))
}
}
override def createProcess(
appId: Int, executorId: Int, resource: Resource, appConfig: Config, options: Array[String],
classPath: Array[String], mainClass: String, arguments: Array[String]): RichProcess = {
val cgroupCommand = if (executorId != APP_MASTER) {
cgroupManager.map(_.startNewExecutor(appConfig, resource.slots, appId,
executorId)).getOrElse(List.empty)
} else List.empty
LOG.info(s"Launch executor $executorId with CGroup ${cgroupCommand.mkString(" ")}, " +
s"classpath: ${classPath.mkString(File.pathSeparator)}")
val java = System.getProperty("java.home") + "/bin/java"
val command = cgroupCommand ++ List(java) ++ options ++ List("-cp", classPath
.mkString(File.pathSeparator), mainClass) ++ arguments
LOG.info(s"Starting executor process java $mainClass ${arguments.mkString(" ")}; " +
s"options: ${options.mkString(" ")}")
val logger = new ProcessLogRedirector()
val process = Process(command).run(logger)
new RichProcess(process, logger)
}
}
| manuzhang/incubator-gearpump | experiments/cgroup/src/main/scala/org/apache/gearpump/cluster/worker/CGroupProcessLauncher.scala | Scala | apache-2.0 | 2,796 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution
import java.util.concurrent.TimeUnit._
import scala.collection.mutable.HashMap
import org.apache.commons.lang3.StringUtils
import org.apache.hadoop.fs.Path
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.{InternalRow, TableIdentifier}
import org.apache.spark.sql.catalyst.catalog.BucketSpec
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.physical.{HashPartitioning, Partitioning, UnknownPartitioning}
import org.apache.spark.sql.catalyst.util.truncatedString
import org.apache.spark.sql.connector.expressions.Aggregation
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.execution.datasources.parquet.{ParquetFileFormat => ParquetSource}
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.{BaseRelation, Filter}
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.vectorized.ColumnarBatch
import org.apache.spark.util.Utils
import org.apache.spark.util.collection.BitSet
trait DataSourceScanExec extends LeafExecNode {
def relation: BaseRelation
def tableIdentifier: Option[TableIdentifier]
protected val nodeNamePrefix: String = ""
override val nodeName: String = {
s"Scan $relation ${tableIdentifier.map(_.unquotedString).getOrElse("")}"
}
// Metadata that describes more details of this scan.
protected def metadata: Map[String, String]
protected val maxMetadataValueLength = conf.maxMetadataStringLength
override def simpleString(maxFields: Int): String = {
val metadataEntries = metadata.toSeq.sorted.map {
case (key, value) =>
key + ": " + StringUtils.abbreviate(redact(value), maxMetadataValueLength)
}
val metadataStr = truncatedString(metadataEntries, " ", ", ", "", maxFields)
redact(
s"$nodeNamePrefix$nodeName${truncatedString(output, "[", ",", "]", maxFields)}$metadataStr")
}
override def verboseStringWithOperatorId(): String = {
val metadataStr = metadata.toSeq.sorted.filterNot {
case (_, value) if (value.isEmpty || value.equals("[]")) => true
case (key, _) if (key.equals("DataFilters") || key.equals("Format")) => true
case (_, _) => false
}.map {
case (key, value) => s"$key: ${redact(value)}"
}
s"""
|$formattedNodeName
|${ExplainUtils.generateFieldString("Output", output)}
|${metadataStr.mkString("\\n")}
|""".stripMargin
}
/**
* Shorthand for calling redactString() without specifying redacting rules
*/
protected def redact(text: String): String = {
Utils.redact(conf.stringRedactionPattern, text)
}
/**
* The data being read in. This is to provide input to the tests in a way compatible with
* [[InputRDDCodegen]] which all implementations used to extend.
*/
def inputRDDs(): Seq[RDD[InternalRow]]
}
/** Physical plan node for scanning data from a relation. */
case class RowDataSourceScanExec(
output: Seq[Attribute],
requiredSchema: StructType,
filters: Set[Filter],
handledFilters: Set[Filter],
aggregation: Option[Aggregation],
rdd: RDD[InternalRow],
@transient relation: BaseRelation,
tableIdentifier: Option[TableIdentifier])
extends DataSourceScanExec with InputRDDCodegen {
override lazy val metrics =
Map("numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"))
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
rdd.mapPartitionsWithIndexInternal { (index, iter) =>
val proj = UnsafeProjection.create(schema)
proj.initialize(index)
iter.map( r => {
numOutputRows += 1
proj(r)
})
}
}
// Input can be InternalRow, has to be turned into UnsafeRows.
override protected val createUnsafeProjection: Boolean = true
override def inputRDD: RDD[InternalRow] = rdd
override val metadata: Map[String, String] = {
def seqToString(seq: Seq[Any]): String = seq.mkString("[", ", ", "]")
val (aggString, groupByString) = if (aggregation.nonEmpty) {
(seqToString(aggregation.get.aggregateExpressions),
seqToString(aggregation.get.groupByColumns))
} else {
("[]", "[]")
}
val markedFilters = if (filters.nonEmpty) {
for (filter <- filters) yield {
if (handledFilters.contains(filter)) s"*$filter" else s"$filter"
}
} else {
handledFilters
}
Map(
"ReadSchema" -> requiredSchema.catalogString,
"PushedFilters" -> seqToString(markedFilters.toSeq),
"PushedAggregates" -> aggString,
"PushedGroupby" -> groupByString)
}
// Don't care about `rdd` and `tableIdentifier` when canonicalizing.
override def doCanonicalize(): SparkPlan =
copy(
output.map(QueryPlan.normalizeExpressions(_, output)),
rdd = null,
tableIdentifier = None)
}
/**
* Physical plan node for scanning data from HadoopFsRelations.
*
* @param relation The file-based relation to scan.
* @param output Output attributes of the scan, including data attributes and partition attributes.
* @param requiredSchema Required schema of the underlying relation, excluding partition columns.
* @param partitionFilters Predicates to use for partition pruning.
* @param optionalBucketSet Bucket ids for bucket pruning.
* @param optionalNumCoalescedBuckets Number of coalesced buckets.
* @param dataFilters Filters on non-partition columns.
* @param tableIdentifier Identifier for the table in the metastore.
* @param disableBucketedScan Disable bucketed scan based on physical query plan, see rule
* [[DisableUnnecessaryBucketedScan]] for details.
*/
case class FileSourceScanExec(
@transient relation: HadoopFsRelation,
output: Seq[Attribute],
requiredSchema: StructType,
partitionFilters: Seq[Expression],
optionalBucketSet: Option[BitSet],
optionalNumCoalescedBuckets: Option[Int],
dataFilters: Seq[Expression],
tableIdentifier: Option[TableIdentifier],
disableBucketedScan: Boolean = false)
extends DataSourceScanExec {
// Note that some vals referring the file-based relation are lazy intentionally
// so that this plan can be canonicalized on executor side too. See SPARK-23731.
override lazy val supportsColumnar: Boolean = {
relation.fileFormat.supportBatch(relation.sparkSession, schema)
}
private lazy val needsUnsafeRowConversion: Boolean = {
if (relation.fileFormat.isInstanceOf[ParquetSource]) {
conf.parquetVectorizedReaderEnabled
} else {
false
}
}
override def vectorTypes: Option[Seq[String]] =
relation.fileFormat.vectorTypes(
requiredSchema = requiredSchema,
partitionSchema = relation.partitionSchema,
relation.sparkSession.sessionState.conf)
private lazy val driverMetrics: HashMap[String, Long] = HashMap.empty
/**
* Send the driver-side metrics. Before calling this function, selectedPartitions has
* been initialized. See SPARK-26327 for more details.
*/
private def sendDriverMetrics(): Unit = {
driverMetrics.foreach(e => metrics(e._1).add(e._2))
val executionId = sparkContext.getLocalProperty(SQLExecution.EXECUTION_ID_KEY)
SQLMetrics.postDriverMetricUpdates(sparkContext, executionId,
metrics.filter(e => driverMetrics.contains(e._1)).values.toSeq)
}
private def isDynamicPruningFilter(e: Expression): Boolean =
e.find(_.isInstanceOf[PlanExpression[_]]).isDefined
@transient lazy val selectedPartitions: Array[PartitionDirectory] = {
val optimizerMetadataTimeNs = relation.location.metadataOpsTimeNs.getOrElse(0L)
val startTime = System.nanoTime()
val ret =
relation.location.listFiles(
partitionFilters.filterNot(isDynamicPruningFilter), dataFilters)
setFilesNumAndSizeMetric(ret, true)
val timeTakenMs = NANOSECONDS.toMillis(
(System.nanoTime() - startTime) + optimizerMetadataTimeNs)
driverMetrics("metadataTime") = timeTakenMs
ret
}.toArray
// We can only determine the actual partitions at runtime when a dynamic partition filter is
// present. This is because such a filter relies on information that is only available at run
// time (for instance the keys used in the other side of a join).
@transient private lazy val dynamicallySelectedPartitions: Array[PartitionDirectory] = {
val dynamicPartitionFilters = partitionFilters.filter(isDynamicPruningFilter)
if (dynamicPartitionFilters.nonEmpty) {
val startTime = System.nanoTime()
// call the file index for the files matching all filters except dynamic partition filters
val predicate = dynamicPartitionFilters.reduce(And)
val partitionColumns = relation.partitionSchema
val boundPredicate = Predicate.create(predicate.transform {
case a: AttributeReference =>
val index = partitionColumns.indexWhere(a.name == _.name)
BoundReference(index, partitionColumns(index).dataType, nullable = true)
}, Nil)
val ret = selectedPartitions.filter(p => boundPredicate.eval(p.values))
setFilesNumAndSizeMetric(ret, false)
val timeTakenMs = (System.nanoTime() - startTime) / 1000 / 1000
driverMetrics("pruningTime") = timeTakenMs
ret
} else {
selectedPartitions
}
}
/**
* [[partitionFilters]] can contain subqueries whose results are available only at runtime so
* accessing [[selectedPartitions]] should be guarded by this method during planning
*/
private def hasPartitionsAvailableAtRunTime: Boolean = {
partitionFilters.exists(ExecSubqueryExpression.hasSubquery)
}
private def toAttribute(colName: String): Option[Attribute] =
output.find(_.name == colName)
// exposed for testing
lazy val bucketedScan: Boolean = {
if (relation.sparkSession.sessionState.conf.bucketingEnabled && relation.bucketSpec.isDefined
&& !disableBucketedScan) {
val spec = relation.bucketSpec.get
val bucketColumns = spec.bucketColumnNames.flatMap(n => toAttribute(n))
bucketColumns.size == spec.bucketColumnNames.size
} else {
false
}
}
override lazy val (outputPartitioning, outputOrdering): (Partitioning, Seq[SortOrder]) = {
if (bucketedScan) {
// For bucketed columns:
// -----------------------
// `HashPartitioning` would be used only when:
// 1. ALL the bucketing columns are being read from the table
//
// For sorted columns:
// ---------------------
// Sort ordering should be used when ALL these criteria's match:
// 1. `HashPartitioning` is being used
// 2. A prefix (or all) of the sort columns are being read from the table.
//
// Sort ordering would be over the prefix subset of `sort columns` being read
// from the table.
// e.g.
// Assume (col0, col2, col3) are the columns read from the table
// If sort columns are (col0, col1), then sort ordering would be considered as (col0)
// If sort columns are (col1, col0), then sort ordering would be empty as per rule #2
// above
val spec = relation.bucketSpec.get
val bucketColumns = spec.bucketColumnNames.flatMap(n => toAttribute(n))
val numPartitions = optionalNumCoalescedBuckets.getOrElse(spec.numBuckets)
val partitioning = HashPartitioning(bucketColumns, numPartitions)
val sortColumns =
spec.sortColumnNames.map(x => toAttribute(x)).takeWhile(x => x.isDefined).map(_.get)
val shouldCalculateSortOrder =
conf.getConf(SQLConf.LEGACY_BUCKETED_TABLE_SCAN_OUTPUT_ORDERING) &&
sortColumns.nonEmpty &&
!hasPartitionsAvailableAtRunTime
val sortOrder = if (shouldCalculateSortOrder) {
// In case of bucketing, its possible to have multiple files belonging to the
// same bucket in a given relation. Each of these files are locally sorted
// but those files combined together are not globally sorted. Given that,
// the RDD partition will not be sorted even if the relation has sort columns set
// Current solution is to check if all the buckets have a single file in it
val files = selectedPartitions.flatMap(partition => partition.files)
val bucketToFilesGrouping =
files.map(_.getPath.getName).groupBy(file => BucketingUtils.getBucketId(file))
val singleFilePartitions = bucketToFilesGrouping.forall(p => p._2.length <= 1)
// TODO SPARK-24528 Sort order is currently ignored if buckets are coalesced.
if (singleFilePartitions && optionalNumCoalescedBuckets.isEmpty) {
// TODO Currently Spark does not support writing columns sorting in descending order
// so using Ascending order. This can be fixed in future
sortColumns.map(attribute => SortOrder(attribute, Ascending))
} else {
Nil
}
} else {
Nil
}
(partitioning, sortOrder)
} else {
(UnknownPartitioning(0), Nil)
}
}
@transient
private lazy val pushedDownFilters = {
val supportNestedPredicatePushdown = DataSourceUtils.supportNestedPredicatePushdown(relation)
dataFilters.flatMap(DataSourceStrategy.translateFilter(_, supportNestedPredicatePushdown))
}
override lazy val metadata: Map[String, String] = {
def seqToString(seq: Seq[Any]) = seq.mkString("[", ", ", "]")
val location = relation.location
val locationDesc =
location.getClass.getSimpleName +
Utils.buildLocationMetadata(location.rootPaths, maxMetadataValueLength)
val metadata =
Map(
"Format" -> relation.fileFormat.toString,
"ReadSchema" -> requiredSchema.catalogString,
"Batched" -> supportsColumnar.toString,
"PartitionFilters" -> seqToString(partitionFilters),
"PushedFilters" -> seqToString(pushedDownFilters),
"DataFilters" -> seqToString(dataFilters),
"Location" -> locationDesc)
// TODO(SPARK-32986): Add bucketed scan info in explain output of FileSourceScanExec
if (bucketedScan) {
relation.bucketSpec.map { spec =>
val numSelectedBuckets = optionalBucketSet.map { b =>
b.cardinality()
} getOrElse {
spec.numBuckets
}
metadata + ("SelectedBucketsCount" ->
(s"$numSelectedBuckets out of ${spec.numBuckets}" +
optionalNumCoalescedBuckets.map { b => s" (Coalesced to $b)"}.getOrElse("")))
} getOrElse {
metadata
}
} else {
metadata
}
}
override def verboseStringWithOperatorId(): String = {
val metadataStr = metadata.toSeq.sorted.filterNot {
case (_, value) if (value.isEmpty || value.equals("[]")) => true
case (key, _) if (key.equals("DataFilters") || key.equals("Format")) => true
case (_, _) => false
}.map {
case (key, _) if (key.equals("Location")) =>
val location = relation.location
val numPaths = location.rootPaths.length
val abbreviatedLocation = if (numPaths <= 1) {
location.rootPaths.mkString("[", ", ", "]")
} else {
"[" + location.rootPaths.head + s", ... ${numPaths - 1} entries]"
}
s"$key: ${location.getClass.getSimpleName} ${redact(abbreviatedLocation)}"
case (key, value) => s"$key: ${redact(value)}"
}
s"""
|$formattedNodeName
|${ExplainUtils.generateFieldString("Output", output)}
|${metadataStr.mkString("\\n")}
|""".stripMargin
}
lazy val inputRDD: RDD[InternalRow] = {
val readFile: (PartitionedFile) => Iterator[InternalRow] =
relation.fileFormat.buildReaderWithPartitionValues(
sparkSession = relation.sparkSession,
dataSchema = relation.dataSchema,
partitionSchema = relation.partitionSchema,
requiredSchema = requiredSchema,
filters = pushedDownFilters,
options = relation.options,
hadoopConf = relation.sparkSession.sessionState.newHadoopConfWithOptions(relation.options))
val readRDD = if (bucketedScan) {
createBucketedReadRDD(relation.bucketSpec.get, readFile, dynamicallySelectedPartitions,
relation)
} else {
createReadRDD(readFile, dynamicallySelectedPartitions, relation)
}
sendDriverMetrics()
readRDD
}
override def inputRDDs(): Seq[RDD[InternalRow]] = {
inputRDD :: Nil
}
/** SQL metrics generated only for scans using dynamic partition pruning. */
private lazy val staticMetrics = if (partitionFilters.exists(isDynamicPruningFilter)) {
Map("staticFilesNum" -> SQLMetrics.createMetric(sparkContext, "static number of files read"),
"staticFilesSize" -> SQLMetrics.createSizeMetric(sparkContext, "static size of files read"))
} else {
Map.empty[String, SQLMetric]
}
/** Helper for computing total number and size of files in selected partitions. */
private def setFilesNumAndSizeMetric(
partitions: Seq[PartitionDirectory],
static: Boolean): Unit = {
val filesNum = partitions.map(_.files.size.toLong).sum
val filesSize = partitions.map(_.files.map(_.getLen).sum).sum
if (!static || !partitionFilters.exists(isDynamicPruningFilter)) {
driverMetrics("numFiles") = filesNum
driverMetrics("filesSize") = filesSize
} else {
driverMetrics("staticFilesNum") = filesNum
driverMetrics("staticFilesSize") = filesSize
}
if (relation.partitionSchemaOption.isDefined) {
driverMetrics("numPartitions") = partitions.length
}
}
override lazy val metrics = Map(
"numOutputRows" -> SQLMetrics.createMetric(sparkContext, "number of output rows"),
"numFiles" -> SQLMetrics.createMetric(sparkContext, "number of files read"),
"metadataTime" -> SQLMetrics.createTimingMetric(sparkContext, "metadata time"),
"filesSize" -> SQLMetrics.createSizeMetric(sparkContext, "size of files read")
) ++ {
// Tracking scan time has overhead, we can't afford to do it for each row, and can only do
// it for each batch.
if (supportsColumnar) {
Some("scanTime" -> SQLMetrics.createTimingMetric(sparkContext, "scan time"))
} else {
None
}
} ++ {
if (relation.partitionSchemaOption.isDefined) {
Map(
"numPartitions" -> SQLMetrics.createMetric(sparkContext, "number of partitions read"),
"pruningTime" ->
SQLMetrics.createTimingMetric(sparkContext, "dynamic partition pruning time"))
} else {
Map.empty[String, SQLMetric]
}
} ++ staticMetrics
protected override def doExecute(): RDD[InternalRow] = {
val numOutputRows = longMetric("numOutputRows")
if (needsUnsafeRowConversion) {
inputRDD.mapPartitionsWithIndexInternal { (index, iter) =>
val toUnsafe = UnsafeProjection.create(schema)
toUnsafe.initialize(index)
iter.map { row =>
numOutputRows += 1
toUnsafe(row)
}
}
} else {
inputRDD.mapPartitionsInternal { iter =>
iter.map { row =>
numOutputRows += 1
row
}
}
}
}
protected override def doExecuteColumnar(): RDD[ColumnarBatch] = {
val numOutputRows = longMetric("numOutputRows")
val scanTime = longMetric("scanTime")
inputRDD.asInstanceOf[RDD[ColumnarBatch]].mapPartitionsInternal { batches =>
new Iterator[ColumnarBatch] {
override def hasNext: Boolean = {
// The `FileScanRDD` returns an iterator which scans the file during the `hasNext` call.
val startNs = System.nanoTime()
val res = batches.hasNext
scanTime += NANOSECONDS.toMillis(System.nanoTime() - startNs)
res
}
override def next(): ColumnarBatch = {
val batch = batches.next()
numOutputRows += batch.numRows()
batch
}
}
}
}
override val nodeNamePrefix: String = "File"
/**
* Create an RDD for bucketed reads.
* The non-bucketed variant of this function is [[createReadRDD]].
*
* The algorithm is pretty simple: each RDD partition being returned should include all the files
* with the same bucket id from all the given Hive partitions.
*
* @param bucketSpec the bucketing spec.
* @param readFile a function to read each (part of a) file.
* @param selectedPartitions Hive-style partition that are part of the read.
* @param fsRelation [[HadoopFsRelation]] associated with the read.
*/
private def createBucketedReadRDD(
bucketSpec: BucketSpec,
readFile: (PartitionedFile) => Iterator[InternalRow],
selectedPartitions: Array[PartitionDirectory],
fsRelation: HadoopFsRelation): RDD[InternalRow] = {
logInfo(s"Planning with ${bucketSpec.numBuckets} buckets")
val filesGroupedToBuckets =
selectedPartitions.flatMap { p =>
p.files.map { f =>
PartitionedFileUtil.getPartitionedFile(f, f.getPath, p.values)
}
}.groupBy { f =>
BucketingUtils
.getBucketId(new Path(f.filePath).getName)
.getOrElse(throw new IllegalStateException(s"Invalid bucket file ${f.filePath}"))
}
val prunedFilesGroupedToBuckets = if (optionalBucketSet.isDefined) {
val bucketSet = optionalBucketSet.get
filesGroupedToBuckets.filter {
f => bucketSet.get(f._1)
}
} else {
filesGroupedToBuckets
}
val filePartitions = optionalNumCoalescedBuckets.map { numCoalescedBuckets =>
logInfo(s"Coalescing to ${numCoalescedBuckets} buckets")
val coalescedBuckets = prunedFilesGroupedToBuckets.groupBy(_._1 % numCoalescedBuckets)
Seq.tabulate(numCoalescedBuckets) { bucketId =>
val partitionedFiles = coalescedBuckets.get(bucketId).map {
_.values.flatten.toArray
}.getOrElse(Array.empty)
FilePartition(bucketId, partitionedFiles)
}
}.getOrElse {
Seq.tabulate(bucketSpec.numBuckets) { bucketId =>
FilePartition(bucketId, prunedFilesGroupedToBuckets.getOrElse(bucketId, Array.empty))
}
}
new FileScanRDD(fsRelation.sparkSession, readFile, filePartitions)
}
/**
* Create an RDD for non-bucketed reads.
* The bucketed variant of this function is [[createBucketedReadRDD]].
*
* @param readFile a function to read each (part of a) file.
* @param selectedPartitions Hive-style partition that are part of the read.
* @param fsRelation [[HadoopFsRelation]] associated with the read.
*/
private def createReadRDD(
readFile: (PartitionedFile) => Iterator[InternalRow],
selectedPartitions: Array[PartitionDirectory],
fsRelation: HadoopFsRelation): RDD[InternalRow] = {
val openCostInBytes = fsRelation.sparkSession.sessionState.conf.filesOpenCostInBytes
val maxSplitBytes =
FilePartition.maxSplitBytes(fsRelation.sparkSession, selectedPartitions)
logInfo(s"Planning scan with bin packing, max size: $maxSplitBytes bytes, " +
s"open cost is considered as scanning $openCostInBytes bytes.")
// Filter files with bucket pruning if possible
val bucketingEnabled = fsRelation.sparkSession.sessionState.conf.bucketingEnabled
val shouldProcess: Path => Boolean = optionalBucketSet match {
case Some(bucketSet) if bucketingEnabled =>
// Do not prune the file if bucket file name is invalid
filePath => BucketingUtils.getBucketId(filePath.getName).forall(bucketSet.get)
case _ =>
_ => true
}
val splitFiles = selectedPartitions.flatMap { partition =>
partition.files.flatMap { file =>
// getPath() is very expensive so we only want to call it once in this block:
val filePath = file.getPath
if (shouldProcess(filePath)) {
val isSplitable = relation.fileFormat.isSplitable(
relation.sparkSession, relation.options, filePath)
PartitionedFileUtil.splitFiles(
sparkSession = relation.sparkSession,
file = file,
filePath = filePath,
isSplitable = isSplitable,
maxSplitBytes = maxSplitBytes,
partitionValues = partition.values
)
} else {
Seq.empty
}
}
}.sortBy(_.length)(implicitly[Ordering[Long]].reverse)
val partitions =
FilePartition.getFilePartitions(relation.sparkSession, splitFiles, maxSplitBytes)
new FileScanRDD(fsRelation.sparkSession, readFile, partitions)
}
// Filters unused DynamicPruningExpression expressions - one which has been replaced
// with DynamicPruningExpression(Literal.TrueLiteral) during Physical Planning
private def filterUnusedDynamicPruningExpressions(
predicates: Seq[Expression]): Seq[Expression] = {
predicates.filterNot(_ == DynamicPruningExpression(Literal.TrueLiteral))
}
override def doCanonicalize(): FileSourceScanExec = {
FileSourceScanExec(
relation,
output.map(QueryPlan.normalizeExpressions(_, output)),
requiredSchema,
QueryPlan.normalizePredicates(
filterUnusedDynamicPruningExpressions(partitionFilters), output),
optionalBucketSet,
optionalNumCoalescedBuckets,
QueryPlan.normalizePredicates(dataFilters, output),
None,
disableBucketedScan)
}
}
| hvanhovell/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/DataSourceScanExec.scala | Scala | apache-2.0 | 26,226 |
package org.ucombinator.jaam.tools.missingreturns
import java.io.FileInputStream
import scala.collection.mutable
import soot.{SootMethod => RealSootMethod} // TODO: fix class name collision
import soot.jimple.{Stmt => SootStmt, _}
import org.ucombinator.jaam.serializer._
object MissingReturns {
def missingReturns(jaamFile : String) = {
var states = List[State]()
val stateSet = mutable.Set[(RealSootMethod, Int, String, String)]()
val stream = new FileInputStream(jaamFile)
val pi = new PacketInput(stream)
var packet: Packet = null
while ({packet = pi.read(); !packet.isInstanceOf[EOF]}) {
packet match {
case s@State(id, stmt, framePointer, kontStack) =>
states +:= s
stateSet.add((stmt.method, stmt.index, framePointer, kontStack))
case _ => {}
}
}
pi.close()
for (State(id, stmt, framePointer, kontStack) <- states) {
val isCall = stmt.stmt match {
case sootStmt : InvokeStmt => true
case sootStmt : DefinitionStmt =>
sootStmt.getRightOp() match {
case rhs : InvokeExpr => true
case _ => false
}
case _ => false
}
// We can't use the next stmt, b/c Soot will complain that there is not
// active body present. Fortunately, we can use the stmt index instead.
//val nextStmt = stmt.method.getActiveBody().getUnits().getSuccOf(stmt.stmt).asInstanceOf[SootStmt]
if (isCall && !stateSet.contains((stmt.method, stmt.index + 1, framePointer, kontStack))) {
println("Missing return for call in state " + id)
println(" method: " + stmt.method)
println(" index: " + stmt.index)
println(" stmt: " + stmt.stmt)
println(" fp: " + framePointer)
println(" kontStack: " + kontStack)
}
}
}
}
| Ucombinator/jaam | src/main/scala/org/ucombinator/jaam/tools/MissingReturn.scala | Scala | bsd-2-clause | 1,844 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.geotools
import java.util.{List => jList}
import com.typesafe.config._
import org.locationtech.geomesa.utils.geotools.AttributeSpec.{GeomAttributeSpec, ListAttributeSpec, MapAttributeSpec, SimpleAttributeSpec}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes.Configs._
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes.InternalConfigs._
import org.opengis.feature.simple.SimpleFeatureType
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
/**
* SimpleFeatureSpec parsing from/to typesafe config
*/
object SimpleFeatureSpecConfig {
val TypeNamePath = "type-name"
val AttributesPath = "attributes"
val UserDataPath = "user-data"
val TypePath = "type"
val NamePath = "name"
// config keys that are not attribute options - all other fields are assumed to be options
private val NonOptions = Seq(TypePath, NamePath)
/**
* Parse a SimpleFeatureType spec from a typesafe Config
*
* @param conf config
* @param path instead of parsing the root config, parse the nested config at this path
* @return
*/
def parse(conf: Config, path: Option[String]): (Option[String], SimpleFeatureSpec) = {
import org.locationtech.geomesa.utils.conf.ConfConversions._
val toParse = path match {
case Some(p) => conf.getConfigOpt(p).map(conf.withFallback).getOrElse(conf)
case None => conf
}
parse(toParse)
}
/**
* Convert a simple feature type to a typesafe config
*
* @param sft simple feature type
* @param includeUserData include user data
* @param includePrefix include the geomesa.sfts.XXX prefix
* @return
*/
def toConfig(sft: SimpleFeatureType, includeUserData: Boolean, includePrefix: Boolean): Config = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
// Update "default" options (dtg and geom)
val defaults = sft.getDtgField.toSeq ++ Option(sft.getGeomField)
val attributes = sft.getAttributeDescriptors.map { ad =>
val config = AttributeSpec(sft, ad).toConfigMap
if (defaults.contains(ad.getLocalName)) {
config.updated("default", "true").asJava
} else {
config.asJava
}
}
val base = ConfigFactory.empty()
.withValue(TypeNamePath, ConfigValueFactory.fromAnyRef(sft.getTypeName))
.withValue(AttributesPath, ConfigValueFactory.fromIterable(attributes))
val updated = if (includeUserData) {
val prefixes = sft.getUserDataPrefixes
// special handling for keywords delimiter
val keywords = Map(KEYWORDS_KEY -> sft.getKeywords.asJava).filter(_._2.nonEmpty)
val toConvert = keywords ++ sft.getUserData.collect {
case (k, v) if v != null && prefixes.exists(k.toString.startsWith) && k != KEYWORDS_KEY => (k.toString, v)
}
val userData = ConfigValueFactory.fromMap(toConvert)
base.withValue(UserDataPath, userData)
} else {
base
}
if (includePrefix) {
updated.atPath(s"${ConfigSftParsing.path}.${sft.getTypeName}")
} else {
updated
}
}
/**
* Convert a simple feature type to a typesafe config and renders it as a string
*
* @param sft simple feature type
* @param includeUserData include user data
* @param concise concise or verbose string
* @return
*/
def toConfigString(sft: SimpleFeatureType,
includeUserData: Boolean,
concise: Boolean,
includePrefix: Boolean,
json: Boolean): String = {
val opts = if (concise) {
ConfigRenderOptions.concise.setJson(json)
} else {
ConfigRenderOptions.defaults().setFormatted(true).setComments(false).setOriginComments(false).setJson(json)
}
toConfig(sft, includeUserData, includePrefix).root().render(opts)
}
private def parse(conf: Config): (Option[String], SimpleFeatureSpec) = {
import org.locationtech.geomesa.utils.conf.ConfConversions._
val name = conf.getStringOpt(TypeNamePath)
val attributes = conf.getConfigListOpt("fields").getOrElse(conf.getConfigList(AttributesPath)).map(buildField)
val opts = getOptions(conf.getConfigOpt(UserDataPath).getOrElse(ConfigFactory.empty))
(name, SimpleFeatureSpec(attributes, opts))
}
private def buildField(conf: Config): AttributeSpec = {
val attribute = SimpleFeatureSpecParser.parseAttribute(s"${conf.getString(NamePath)}:${conf.getString(TypePath)}")
val options = getOptions(conf)
attribute match {
case s: SimpleAttributeSpec => s.copy(options = options)
case s: GeomAttributeSpec => s.copy(options = options)
case s: ListAttributeSpec => s.copy(options = options)
case s: MapAttributeSpec => s.copy(options = options)
}
}
def normalizeKey(k: String): String = ConfigUtil.splitPath(k).mkString(".")
private def getOptions(conf: Config): Map[String, String] = {
val asMap = conf.entrySet().map(e => normalizeKey(e.getKey) -> e.getValue.unwrapped()).toMap
asMap.filterKeys(!NonOptions.contains(_)).map {
// Special case to handle adding keywords
case (KEYWORDS_KEY, v: jList[String]) => KEYWORDS_KEY -> v.mkString(KEYWORDS_DELIMITER)
case (k, v: jList[String]) => k -> v.mkString(",")
case (k, v) => k -> s"$v"
}
}
}
| ronq/geomesa | geomesa-utils/src/main/scala/org/locationtech/geomesa/utils/geotools/SimpleFeatureSpecConfig.scala | Scala | apache-2.0 | 5,877 |
package org.hammerlab.guacamole.pileup
import htsjdk.samtools.{CigarElement, CigarOperator}
import org.bdgenomics.adam.util.PhredUtils.phredToSuccessProbability
import org.hammerlab.genomics.reference.{ContigSequence, Locus}
import org.hammerlab.guacamole.reads.MappedRead
import org.hammerlab.guacamole.util.CigarUtils
import org.hammerlab.guacamole.variants.Allele
import scala.annotation.tailrec
/**
* A [[PileupElement]] represents the bases sequenced by a particular read at a particular reference locus.
*
* @param read The read this [[PileupElement]] is coming from.
* @param locus The reference locus.
* @param readPosition The offset into the sequence of bases in the read that this element corresponds to.
* @param cigarElementIndex The index in the read's sequence of cigar elements ([[org.hammerlab.guacamole.reads.MappedRead.cigarElements]])
* of the element that contains the current readPosition.
* @param cigarElementLocus The reference START position of the current cigar element.
* If the element is an INSERTION this the PRECEDING reference base
* @param indexWithinCigarElement The offset of this element within the current cigar element.
*/
case class PileupElement(
read: MappedRead,
locus: Locus,
readPosition: Int,
cigarElementIndex: Int,
cigarElementLocus: Locus,
indexWithinCigarElement: Int,
contigSequence: ContigSequence) {
assume(locus >= read.start)
assume(locus < read.end)
val referenceBase: Byte = contigSequence(locus.toInt)
def cigarElement = read.cigarElements(cigarElementIndex)
def nextCigarElement =
if (cigarElementIndex + 1 < read.cigarElements.size) {
Some(read.cigarElements(cigarElementIndex + 1))
} else {
None
}
def referenceStringIndex =
(cigarElementLocus - read.start).toInt +
(if (cigarElement.getOperator.consumesReferenceBases()) indexWithinCigarElement else 0)
def cigarElementReadLength = CigarUtils.getReadLength(cigarElement)
def cigarElementReferenceLength = CigarUtils.getReferenceLength(cigarElement)
def cigarElementEndLocus = cigarElementLocus + cigarElementReferenceLength
/*
* True if this is the last base of the current cigar element.
*/
def isFinalCigarBase: Boolean = indexWithinCigarElement == cigarElement.getLength - 1
lazy val alignment: Alignment = {
val cigarOperator = cigarElement.getOperator
val nextBaseCigarElement = if (isFinalCigarBase) nextCigarElement else Some(cigarElement)
val nextBaseCigarOperator = nextBaseCigarElement.map(_.getOperator)
def makeInsertion(cigarElem: CigarElement) =
Insertion(
read.sequence.view(
readPosition,
readPosition + CigarUtils.getReadLength(cigarElem) + 1
),
read.baseQualities.view(
readPosition,
readPosition + CigarUtils.getReadLength(cigarElem) + 1
)
)
(cigarOperator, nextBaseCigarOperator) match {
// Since insertions by definition have no corresponding reference loci, there is a choice in whether we "attach"
// them to the preceding or following locus. Here we attach them to the preceding base, since that seems to be the
// conventional choice. That is, if we have a match followed by an insertion, the final base of the match will
// get combined with the insertion into one Alignment, at the match's reference locus.
case (CigarOperator.M, Some(CigarOperator.I)) | (CigarOperator.EQ, Some(CigarOperator.I)) =>
makeInsertion(nextCigarElement.get)
// The exception to the above is insertion at the start of a contig, where there is no preceding reference base to
// anchor to; in this case, the spec calls for including the reference base immediately after the insertion (the
// first reference base of the contig).
case (CigarOperator.I, Some(_)) if cigarElementLocus == 0 =>
makeInsertion(cigarElement)
// In general, a PileupElement pointing at an Insertion cigar-element is an error.
case (CigarOperator.I, _) => throw InvalidCigarElementException(this)
case (CigarOperator.M | CigarOperator.EQ | CigarOperator.X, Some(CigarOperator.D)) =>
val deletedBases = contigSequence.slice(locus.toInt, locus.toInt + nextCigarElement.get.getLength + 1)
val anchorBaseSequenceQuality = read.baseQualities(readPosition)
Deletion(deletedBases, anchorBaseSequenceQuality)
case (CigarOperator.D, _) =>
MidDeletion(referenceBase)
case (op, Some(CigarOperator.D)) =>
throw new AssertionError(
"Found deletion preceded by cigar operator %s at PileupElement for read %s at locus %d".format(
op, read.toString, locus)
)
case (CigarOperator.M, _) | (CigarOperator.EQ, _) | (CigarOperator.X, _) =>
val base: Byte = read.sequence(readPosition)
val quality = read.baseQualities(readPosition)
if (base == referenceBase) {
Match(base, quality)
} else {
Mismatch(base, quality, referenceBase)
}
case (CigarOperator.S, _) | (CigarOperator.N, _) | (CigarOperator.H, _) => Clipped
case (CigarOperator.P, _) =>
throw new AssertionError("`P` CIGAR-ops should have been ignored earlier in `findNextCigarElement`")
}
}
/* If you only care about what kind of CigarOperator is at this position, but not its associated sequence, then you
* can use these state variables.
*/
def isInsertion = alignment match { case Insertion(_, _) => true; case _ => false }
def isDeletion = alignment match { case Deletion(_, _) => true; case _ => false }
def isClipped = alignment match { case Clipped => true; case _ => false }
def isMidDeletion = alignment match { case MidDeletion(_) => true; case _ => false }
def isMismatch = alignment match { case Mismatch(_, _, _) => true; case _ => false }
def isMatch = alignment match { case Match(_, _) => true; case _ => false }
/**
* The sequenced nucleotides at this element.
*
* If the current element is a deletion, then this is the empty array. If it's
* an insertion, then this will be an array of length >= 1: the contents of
* the inserted sequence starting at the current locus. Otherwise, this is
* an array of length 1.
*/
def sequencedBases: Seq[Byte] = alignment.sequencedBases
def referenceBases: Seq[Byte] = alignment.referenceBases
lazy val allele: Allele = Allele(referenceBases, sequencedBases)
/*
* Base quality score, phred-scaled.
*
* For matches and mismatches this is the base quality score of the current base.
* For insertions this the minimum base quality score of all the bases in the insertion.
* For deletions this is the mapping quality as there are no base quality scores available.
*/
def qualityScore: Int = alignment match {
case Clipped | MidDeletion(_) => read.alignmentQuality
case Deletion(_, qs) => qs
case MatchOrMisMatch(_, qs) => qs
case Insertion(_, qss) => qss.min
}
def probabilityCorrectIgnoringAlignment: Double =
phredToSuccessProbability(qualityScore)
def probabilityCorrectIncludingAlignment: Double =
phredToSuccessProbability(qualityScore) * read.alignmentLikelihood
/**
* Returns a new [[PileupElement]] of the same read, advanced by one cigar element.
*/
def advanceToNextCigarElement: PileupElement = {
val readPositionOffset =
if (cigarElement.getOperator.consumesReadBases()) {
// If this [[CigarElement]] consumes read bases then [[readPosition]] will advance by the rest of this
// [[CigarElement]].
cigarElement.getLength - indexWithinCigarElement
} else {
// Otherwise, [[readPosition]] will stay the same.
0
}
val nextLocus = locus + (cigarElementReferenceLength - indexWithinCigarElement)
PileupElement(
read,
nextLocus,
readPosition + readPositionOffset,
cigarElementIndex + 1,
cigarElementLocus + cigarElementReferenceLength,
// Even if we are somewhere in the middle of the current cigar element, lock to the beginning of the next one.
indexWithinCigarElement = 0,
contigSequence
)
}
/**
* Returns whether the current cigar element of this [[org.hammerlab.guacamole.reads.MappedRead]] contains the given reference locus.
*
* Can only return true if the cigar element consumes reference bases.
*/
def currentCigarElementContainsLocus(referenceLocus: Locus): Boolean = {
cigarElementLocus <= referenceLocus && referenceLocus < cigarElementEndLocus
}
/**
* Returns a new [[PileupElement]] of the same read at a different locus.
*
* To enable an efficient implementation, newLocus must be greater than the current locus.
*
* @param newLocus The desired locus of the new [[PileupElement]]. It must be greater than the current locus, and
* not past the end of the current read.
* @return A new [[PileupElement]] at the given locus.
*/
@tailrec
final def advanceToLocus(newLocus: Locus): PileupElement = {
assume(newLocus >= locus, s"Can't rewind to locus $newLocus from $locus. Pileups only advance. $read")
assume(newLocus < read.end, "This read stops at position %d. Can't advance to %d".format(read.end, newLocus))
if (currentCigarElementContainsLocus(newLocus)) {
// Aside: the current cigar element must consume reference bases if we've gotten here.
val readPositionOffset =
if (cigarElement.getOperator.consumesReadBases()) {
(newLocus - cigarElementLocus - indexWithinCigarElement).toInt
} else {
// If this cigar doesn't consume read bases, then advancing within it will not consume [[readPosition]]
0
}
this.copy(
locus = newLocus,
readPosition = readPosition + readPositionOffset,
indexWithinCigarElement = (newLocus - cigarElementLocus).toInt
)
} else if (newLocus == 0 && cigarElement.getOperator == CigarOperator.I) {
// NOTE(ryan): this is the rare case where we allow a [[PileupElement]] to exist at a non-reference-consuming
// CigarElement (namely, an Insertion at the start of a contig). It is correct for us to emit an Insertion
// alignment in such a case, where typically an Insertion [[CigarElement]] would get skipped over by subsequent
// [[advanceToLocus]] calls, since it represents a zero-width interval of reference bases.
this
} else
advanceToNextCigarElement.advanceToLocus(newLocus)
}
/**
* Distance from the end of the reading frame
* If the read was positive then the sequencing end also corresponds to the read end positions
* If the read was negative, the sequencing end is the mapped start position
*/
def distanceFromSequencingEnd = if (read.isPositiveStrand) read.end - locus else locus - read.start
}
object PileupElement {
/**
* Create a new [[PileupElement]] backed by the given read at the specified locus. The read must overlap the locus.
*/
def apply(read: MappedRead, locus: Locus, contigSequence: ContigSequence): PileupElement =
PileupElement(
read = read,
locus = read.start,
readPosition = 0,
cigarElementIndex = 0,
cigarElementLocus = read.start,
indexWithinCigarElement = 0,
contigSequence
).advanceToLocus(locus)
}
case class InvalidCigarElementException(elem: PileupElement)
extends Exception(
"Should not have a PileupElement at non-reference-consuming cigar-operator I. " +
"Locus: %d, readPosition: %d, cigar: %s (elem idx %d)".format(
elem.locus,
elem.readPosition,
elem.read.cigar.toString, elem.cigarElementIndex
)
)
| hammerlab/guacamole | src/main/scala/org/hammerlab/guacamole/pileup/PileupElement.scala | Scala | apache-2.0 | 11,813 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples.mllib
import org.apache.spark.mllib.fpm.FPGrowth
import org.apache.spark.{SparkConf, SparkContext}
import scopt.OptionParser
/**
* Example for mining frequent itemsets using FP-growth.
* Example usage: ./bin/run-example mllib.FPGrowthExample \\
* --minSupport 0.8 --numPartition 2 ./data/mllib/sample_fpgrowth.txt
*/
object FPGrowthExample {
case class Params(
input: String = null,
minSupport: Double = 0.3,
numPartition: Int = -1) extends AbstractParams[Params]
def main(args: Array[String]) {
val defaultParams = Params()
val parser = new OptionParser[Params]("FPGrowthExample") {
head("FPGrowth: an example FP-growth app.")
opt[Double]("minSupport")
.text(s"minimal support level, default: ${defaultParams.minSupport}")
.action((x, c) => c.copy(minSupport = x))
opt[Int]("numPartition")
.text(s"number of partition, default: ${defaultParams.numPartition}")
.action((x, c) => c.copy(numPartition = x))
arg[String]("<input>")
.text("input paths to input data set, whose file format is that each line " +
"contains a transaction with each item in String and separated by a space")
.required()
.action((x, c) => c.copy(input = x))
}
parser.parse(args, defaultParams) match {
case Some(params) => run(params)
case _ => sys.exit(1)
}
}
def run(params: Params): Unit = {
val conf = new SparkConf().setAppName(s"FPGrowthExample with $params")
val sc = new SparkContext(conf)
val transactions = sc.textFile(params.input).map(_.split(" ")).cache()
println(s"Number of transactions: ${transactions.count()}")
val model = new FPGrowth()
.setMinSupport(params.minSupport)
.setNumPartitions(params.numPartition)
.run(transactions)
println(s"Number of frequent itemsets: ${model.freqItemsets.count()}")
model.freqItemsets.collect().foreach { itemset =>
println(itemset.items.mkString("[", ",", "]") + ", " + itemset.freq)
}
sc.stop()
}
}
// scalastyle:on println
| chgm1006/spark-app | src/main/scala/org/apache/spark/examples/mllib/FPGrowthExample.scala | Scala | apache-2.0 | 3,193 |
package com.lynbrookrobotics.potassium.commons.drivetrain.twoSided
import com.lynbrookrobotics.potassium.commons.drivetrain.ForwardVelocityGains
import com.lynbrookrobotics.potassium.commons.drivetrain.unicycle.UnicycleProperties
import com.lynbrookrobotics.potassium.control.PIDConfig
import com.lynbrookrobotics.potassium.units._
import squants.motion.{MetersPerSecond, MetersPerSecondSquared}
import squants.space.{Length, Meters}
import squants.{Each, Percent, Velocity}
trait TwoSidedDriveProperties extends UnicycleProperties {
val maxLeftVelocity: Velocity
val maxRightVelocity: Velocity
val leftVelocityGains: ForwardVelocityGains
val rightVelocityGains: ForwardVelocityGains
lazy val leftVelocityGainsFull: ForwardVelocityGains#Full =
leftVelocityGains.withF(Percent(100) / maxLeftVelocity)
lazy val rightVelocityGainsFull: ForwardVelocityGains#Full =
rightVelocityGains.withF(Percent(100) / maxRightVelocity)
lazy val maxForwardVelocity: Velocity = maxLeftVelocity min maxRightVelocity
val forwardVelocityGains: ForwardVelocityGains = PIDConfig(
Each(0) / MetersPerSecond(1),
Each(0) / Meters(1),
Each(0) / MetersPerSecondSquared(1)
)
val track: Length
val blendExponent: Double
}
| Team846/potassium | commons/src/main/scala/com/lynbrookrobotics/potassium/commons/drivetrain/twoSided/TwoSidedDriveProperties.scala | Scala | mit | 1,241 |
Subsets and Splits