code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
object g_c_d_euclid_recursive {
def main(args: Array[String]) {
// Put code here
}
}
| LoyolaChicagoBooks/introcs-scala-examples | g_c_d_euclid_recursive/g_c_d_euclid_recursive.scala | Scala | gpl-3.0 | 93 |
package org.jetbrains.plugins.scala
package debugger.evaluateExpression
import org.jetbrains.plugins.scala.debugger._
/**
* Nikolay.Tropin
* 8/5/13
*/
class VariablesFromPatternsEvaluationTest extends VariablesFromPatternsEvaluationTestBase {
override implicit val version: ScalaVersion = Scala_2_11
}
class VariablesFromPatternsEvaluationTest_212 extends VariablesFromPatternsEvaluationTestBase {
override implicit val version: ScalaVersion = Scala_2_12
}
abstract class VariablesFromPatternsEvaluationTestBase extends ScalaDebuggerTestCase{
addFileWithBreakpoints("Match.scala",
s"""
|object Match {
| val name = "name"
| def main(args: Array[String]) {
| val x = (List(1, 2), Some("z"), None)
| x match {
| case all @ (list @ List(q, w), some @ Some(z), _) =>
| ""$bp
| case _ =>
| }
| }
|}
""".stripMargin.trim()
)
def testMatch() {
runDebugger() {
waitForBreakpoint()
evalEquals("all", "(List(1, 2),Some(z),None)")
evalEquals("list", "List(1, 2)")
evalEquals("x", "(List(1, 2),Some(z),None)")
evalEquals("name", "name")
evalEquals("q", "1")
evalEquals("z", "z")
evalEquals("some", "Some(z)")
evalEquals("args", "[]")
}
}
addFileWithBreakpoints("MatchInForStmt.scala",
s"""
|object MatchInForStmt {
| val name = "name"
| def main(args: Array[String]) {
| for (s <- List("a", "b"); if s == "a"; ss = s + s; i <- List(1,2); if i == 1; si = s + i) {
| val x = (List(1, 2), Some("z"), ss :: i :: Nil)
| x match {
| case all @ (q :: qs, some @ Some(z), list @ List(m, _)) =>
| ""$bp
| case _ =>
| }
| }
| }
|}
""".stripMargin.trim()
)
def testMatchInForStmt() {
runDebugger() {
waitForBreakpoint()
evalEquals("all", "(List(1, 2),Some(z),List(aa, 1))")
evalEquals("x", "(List(1, 2),Some(z),List(aa, 1))")
evalEquals("name", "name")
evalEquals("q", "1")
evalEquals("qs", "List(2)")
evalEquals("z", "z")
evalEquals("list", "List(aa, 1)")
evalEquals("m", "aa")
evalEquals("some", "Some(z)")
evalEquals("ss", "aa")
evalEquals("i", "1")
evalEquals("args", "[]")
}
}
addFileWithBreakpoints("RegexMatch.scala",
{
val pattern = """"(-)?(\\d+)(\\.\\d*)?".r"""
s"""
|object RegexMatch {
| val name = "name"
| def main(args: Array[String]) {
| val Decimal = $pattern
| "-2.5" match {
| case number @ Decimal(sign, _, dec) =>
| ""$bp
| case _ =>
| }
| }
|}
""".stripMargin.trim()
}
)
def testRegexMatch() {
runDebugger() {
waitForBreakpoint()
evalEquals("number", "-2.5")
evalEquals("sign", "-")
evalEquals("dec", ".5")
evalEquals("name", "name")
}
}
addFileWithBreakpoints("Multilevel.scala",
s"""
|object Multilevel {
| val name = "name"
| def main(args: Array[String]) {
| List(None, Some(1 :: 2 :: Nil)) match {
| case List(none, some) =>
| some match {
| case Some(seq) =>
| seq match {
| case Seq(1, two) =>
| ""$bp
| case _ =>
| }
| case _ =>
| }
| case _ =>
| }
| }
|}
""".stripMargin.trim()
)
def testMultilevel() {
runDebugger() {
waitForBreakpoint()
evalEquals("name", "name")
evalEquals("args", "[]")
evalEquals("none", "None")
evalEquals("some", "Some(List(1, 2))")
evalEquals("seq", "List(1, 2)")
evalEquals("two", "2")
}
}
addFileWithBreakpoints("LocalInMatch.scala",
s"""
|object LocalInMatch {
| val name = "name"
| def main(args: Array[String]) {
| Option("a") match {
| case None =>
| case some @ Some(a) =>
| def foo(i: Int) {
| ""$bp
| }
| foo(10)
| }
| }
|}
""".stripMargin.trim()
)
def testLocalInMatch() {
runDebugger() {
waitForBreakpoint()
evalEquals("name", "name")
evalEquals("args", "[]")
evalEquals("some", "Some(a)")
evalEquals("a", "a")
evalEquals("i", "10")
}
}
addFileWithBreakpoints("AnonymousInMatch.scala",
s"""
|object AnonymousInMatch {
| val name = "name"
| def main(args: Array[String]) {
| Option("a") match {
| case None =>
| case some @ Some(a) =>
| List(10) foreach { i =>
| ""$bp
| }
| }
| }
|}
""".stripMargin.trim()
)
def testAnonymousInMatch() {
runDebugger() {
waitForBreakpoint()
evalEquals("name", "name")
evalEquals("args", "[]")
evalEquals("some", "Some(a)")
evalEquals("a", "a")
evalEquals("i", "10")
}
}
}
| ilinum/intellij-scala | test/org/jetbrains/plugins/scala/debugger/evaluateExpression/VariablesFromPatternsEvaluationTest.scala | Scala | apache-2.0 | 5,292 |
/**
* Copyright 2011-2017 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.util.cache
import io.gatling.BaseSpec
class CacheSpec extends BaseSpec {
"ImmutableCache.get" should "return the value wrapped in an Option if key present in cache" in {
val cache = Cache.newImmutableCache[String, String](1)
val cacheWithValue = cache + ("key" -> "value")
cacheWithValue.get("key") shouldBe Some("value")
}
it should "return None if the key is not present in cache" in {
val cache = Cache.newImmutableCache[String, String](1)
cache.get("key") shouldBe None
}
"ImmutableCache.+" should "return the same instance when adding a key already in cache" in {
val cache = Cache.newImmutableCache[String, String](1)
val cacheWithValue = cache + ("key" -> "value")
val cacheWithSameValue = cacheWithValue + ("key" -> "value")
cacheWithSameValue should be theSameInstanceAs cacheWithValue
}
it should "overwrite the key first put in the cache when max capacity has been reached" in {
val cache = Cache.newImmutableCache[String, String](2)
val cacheWithFirstValue = cache + ("key" -> "value")
val cacheWithSecondValue = cacheWithFirstValue + ("key2" -> "value2")
val cacheWithThirdValue = cacheWithSecondValue + ("key3" -> "value3")
cacheWithThirdValue.get("key") shouldBe None
cacheWithThirdValue.get("key2") shouldBe Some("value2")
cacheWithThirdValue.get("key3") shouldBe Some("value3")
}
"ImmutableCache.-" should "remove a key from the cache " in {
val cache = Cache.newImmutableCache[String, String](1)
val cacheWithValue = cache + ("key" -> "value")
val cacheWithValueRemoved = cacheWithValue - "key"
cacheWithValueRemoved.get("key") shouldBe None
}
it should "return the same instance when removing a key absent from cache" in {
val cache = Cache.newImmutableCache[String, String](1)
val cacheWithValue = cache + ("key" -> "value")
val cacheWithValueRemoved = cacheWithValue - "key"
val cacheWithSameValueRemoved = cacheWithValueRemoved - "key"
cacheWithValueRemoved should be theSameInstanceAs cacheWithSameValueRemoved
}
}
| MykolaB/gatling | gatling-core/src/test/scala/io/gatling/core/util/cache/CacheSpec.scala | Scala | apache-2.0 | 2,719 |
package vexriscv
import vexriscv.plugin._
import spinal.core._
import scala.collection.mutable.ArrayBuffer
import scala.collection.Seq
object VexRiscvConfig{
def apply(withMemoryStage : Boolean, withWriteBackStage : Boolean, plugins : Seq[Plugin[VexRiscv]]): VexRiscvConfig = {
val config = VexRiscvConfig()
config.plugins ++= plugins
config.withMemoryStage = withMemoryStage
config.withWriteBackStage = withWriteBackStage
config
}
def apply(plugins : Seq[Plugin[VexRiscv]] = ArrayBuffer()) : VexRiscvConfig = apply(true,true,plugins)
}
trait VexRiscvRegressionArg{
def getVexRiscvRegressionArgs() : Seq[String]
}
case class VexRiscvConfig(){
var withMemoryStage = true
var withWriteBackStage = true
val plugins = ArrayBuffer[Plugin[VexRiscv]]()
def add(that : Plugin[VexRiscv]) : this.type = {plugins += that;this}
def find[T](clazz: Class[T]): Option[T] = {
plugins.find(_.getClass == clazz) match {
case Some(x) => Some(x.asInstanceOf[T])
case None => None
}
}
def get[T](clazz: Class[T]): T = {
plugins.find(_.getClass == clazz) match {
case Some(x) => x.asInstanceOf[T]
}
}
def withRvc = plugins.find(_.isInstanceOf[IBusFetcher]) match {
case Some(x) => x.asInstanceOf[IBusFetcher].withRvc
case None => false
}
def withRvf = find(classOf[FpuPlugin]) match {
case Some(x) => true
case None => false
}
def withRvd = find(classOf[FpuPlugin]) match {
case Some(x) => x.p.withDouble
case None => false
}
//Default Stageables
object IS_RVC extends Stageable(Bool)
object BYPASSABLE_EXECUTE_STAGE extends Stageable(Bool)
object BYPASSABLE_MEMORY_STAGE extends Stageable(Bool)
object RS1 extends Stageable(Bits(32 bits))
object RS2 extends Stageable(Bits(32 bits))
object RS1_USE extends Stageable(Bool)
object RS2_USE extends Stageable(Bool)
object RESULT extends Stageable(UInt(32 bits))
object PC extends Stageable(UInt(32 bits))
object PC_CALC_WITHOUT_JUMP extends Stageable(UInt(32 bits))
object INSTRUCTION extends Stageable(Bits(32 bits))
object INSTRUCTION_ANTICIPATED extends Stageable(Bits(32 bits))
object LEGAL_INSTRUCTION extends Stageable(Bool)
object REGFILE_WRITE_VALID extends Stageable(Bool)
object REGFILE_WRITE_DATA extends Stageable(Bits(32 bits))
object MPP extends PipelineThing[UInt]
object DEBUG_BYPASS_CACHE extends PipelineThing[Bool]
object SRC1 extends Stageable(Bits(32 bits))
object SRC2 extends Stageable(Bits(32 bits))
object SRC_ADD_SUB extends Stageable(Bits(32 bits))
object SRC_ADD extends Stageable(Bits(32 bits))
object SRC_SUB extends Stageable(Bits(32 bits))
object SRC_LESS extends Stageable(Bool)
object SRC_USE_SUB_LESS extends Stageable(Bool)
object SRC_LESS_UNSIGNED extends Stageable(Bool)
object SRC_ADD_ZERO extends Stageable(Bool)
object HAS_SIDE_EFFECT extends Stageable(Bool)
//Formal verification purposes
object FORMAL_HALT extends Stageable(Bool)
object FORMAL_PC_NEXT extends Stageable(UInt(32 bits))
object FORMAL_MEM_ADDR extends Stageable(UInt(32 bits))
object FORMAL_MEM_RMASK extends Stageable(Bits(4 bits))
object FORMAL_MEM_WMASK extends Stageable(Bits(4 bits))
object FORMAL_MEM_RDATA extends Stageable(Bits(32 bits))
object FORMAL_MEM_WDATA extends Stageable(Bits(32 bits))
object FORMAL_INSTRUCTION extends Stageable(Bits(32 bits))
object Src1CtrlEnum extends SpinalEnum(binarySequential){
val RS, IMU, PC_INCREMENT, URS1 = newElement() //IMU, IMZ IMJB
}
object Src2CtrlEnum extends SpinalEnum(binarySequential){
val RS, IMI, IMS, PC = newElement() //TODO remplacing ZERO could avoid 32 muxes if SRC_ADD can be disabled
}
object SRC1_CTRL extends Stageable(Src1CtrlEnum())
object SRC2_CTRL extends Stageable(Src2CtrlEnum())
def getRegressionArgs() : Seq[String] = {
val str = ArrayBuffer[String]()
plugins.foreach{
case e : VexRiscvRegressionArg => str ++= e.getVexRiscvRegressionArgs()
case _ =>
}
str
}
}
class VexRiscv(val config : VexRiscvConfig) extends Component with Pipeline{
type T = VexRiscv
import config._
//Define stages
def newStage(): Stage = { val s = new Stage; stages += s; s }
val decode = newStage()
val execute = newStage()
val memory = ifGen(config.withMemoryStage) (newStage())
val writeBack = ifGen(config.withWriteBackStage) (newStage())
def stagesFromExecute = stages.dropWhile(_ != execute)
plugins ++= config.plugins
//regression usage
val lastStageInstruction = CombInit(stages.last.input(config.INSTRUCTION)).keep().addAttribute (Verilator.public)
val lastStagePc = CombInit(stages.last.input(config.PC)).keep().addAttribute(Verilator.public)
val lastStageIsValid = CombInit(stages.last.arbitration.isValid).keep().addAttribute(Verilator.public)
val lastStageIsFiring = CombInit(stages.last.arbitration.isFiring).keep().addAttribute(Verilator.public)
//Verilator perf
decode.arbitration.removeIt.noBackendCombMerge
if(withMemoryStage){
memory.arbitration.removeIt.noBackendCombMerge
}
execute.arbitration.flushNext.noBackendCombMerge
}
| SpinalHDL/VexRiscv | src/main/scala/vexriscv/VexRiscv.scala | Scala | mit | 5,185 |
package capitulo03
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
import scala.collection.mutable.ArrayBuffer
import scala.util.Sorting
@RunWith(classOf[JUnitRunner])
class AlgoritmosComuns extends FunSuite{
test("Soma de elementos da array"){
assert(19 == Array(1,7,2,9).sum)
assert(19 == ArrayBuffer(1,7,2,9).sum)
}
test("Máximo elemento"){
assert("little" == Array("Mary", "had", "a", "little", "lamb").max)
assert("little" == ArrayBuffer("Mary", "had", "a", "little", "lamb").max)
}
test("Ordenação"){
assert(Array(1,2,7,9) === Array(1,7,2,9).sorted)
assert(ArrayBuffer(1,2,7,9) === ArrayBuffer(1,7,2,9).sorted)
}
test("É possível fornecer uma função de comparação"){
assert(Array(9,7,2,1) === Array(1,7,2,9).sortWith(_ > _))
assert(ArrayBuffer(9,7,2,1) === ArrayBuffer(1,7,2,9).sortWith(_ > _))
}
test("Quick sort"){
val a = Array(1,7,2,9)
Sorting.quickSort(a) //Não funciona para ArrayBuffer
assert(Array(1,2,7,9) === a)
}
test("Exibir conteúdo do array"){
val a = Array(1,7,2,9)
assert("1 and 7 and 2 and 9" == a.mkString(" and "))
assert("<1,7,2,9>" == a.mkString("<", ",", ">"))
//método toString é confuso, vindo do Java
assert("Array(1, 7, 2, 9)" != a.toString)
//o toString do ArrayBuffer é mais claro
assert("ArrayBuffer(1, 7, 2, 9)" == ArrayBuffer(1,7,2,9).toString)
}
} | celioeduardo/scala-impatient | src/test/scala/capitulo03/AlgoritmosComuns.scala | Scala | mit | 1,462 |
package com.ctask
import org.scalatest.{FlatSpec, Matchers}
/**
* Spec file for settings.
*/
class ServerPropertiesSpec extends FlatSpec with Matchers {
behavior of "ServerProperties"
val defaultValueStr = "def"
val defaultValueInt = 3
it should "return the default value if a property is not in the config file" in {
ServerProperties.getPropertyInt("non existent property", defaultValueInt) shouldBe defaultValueInt
ServerProperties.getPropertyStr("non existent property", defaultValueStr) shouldBe defaultValueStr
}
it should "return the default value if a property is empty in the config file" in {
ServerProperties.getPropertyInt("com.ctask.nop.prop", defaultValueInt) shouldBe defaultValueInt
ServerProperties.getPropertyStr("com.ctask.nop.prop", defaultValueStr) shouldBe defaultValueStr
}
it should "give priority to a property defined via java properties over the config file" in {
val propKey = "com.ctask.storage.root"
try {
val javaPropVal = "java prop val "
System.setProperty(propKey, javaPropVal)
ServerProperties.getPropertyStr(propKey, defaultValueStr) shouldBe javaPropVal
} finally {
System.clearProperty(propKey)
}
}
}
| modsrm/ctask | server/src/test/scala/com/ctask/ServerPropertiesSpec.scala | Scala | gpl-3.0 | 1,224 |
object patternUnsoundness extends App {
class C[+T]
case class D[S](_s: S) extends C[S] {
var s: S = _s
}
val x = new D[String]("abc")
val y: C[Object] = x
y match {
case d @ D(x) => d.s = new Integer(1)
}
val z: String = x.s // ClassCast exception
} | DarkDimius/dotty | tests/neg/patternUnsoundness.scala | Scala | bsd-3-clause | 287 |
/*
* Copyright 2017 by Simba Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.spark.sql.simba.index
import org.apache.spark.sql.catalyst.InternalRow
import org.scalatest.FunSuite
/**
* Created by Zhihao Bai on 16-7-19.
*/
class TreapSuite extends FunSuite{
val data = new Array[(Int, InternalRow)](101)
val divisor = 11
for(i <- 0 to 100) {
data(i) = (i % divisor, null)
}
val treap = Treap.apply[Int](data)
test("Treap: rank"){
var rank = 0
var count = 0
for(i <- 0 to 10) {
count = 101 / divisor
if(i == 0 || i == 1) count = count + 1
rank += count
assert(treap.rank(i) == rank)
}
}
test("Treap: getCount"){
var count = 0
for(i <- 0 to 10) {
count = 101 / divisor
if(i == 0 || i == 1) count = count + 1
assert(treap.getCount(i) == count)
}
}
test("Treap: find"){
var count = 0
for(i <- 0 to 10) {
count = 101 / divisor
if(i == 0 || i == 1) count = count + 1
var ints = treap.find(i)
assert(ints.length == count)
for(j <- ints) {
assert(j % divisor == i)
}
}
}
test("Treap: range, simple"){
var ints = treap.range(3, 5)
for(j <- ints){
assert(j % divisor >= 3 && j % divisor <= 5)
}
}
test("Treap: range, complex"){
val ints = treap.range(3, 5, 5, 1.1, isNumeric = true).get
for(j <- ints){
assert(j % divisor >= 3 && j % divisor <= 5)
}
val ints_none = treap.range(0, 10, 1, 0.0, isNumeric = true)
assert(ints_none.isEmpty)
}
}
| InitialDLab/Simba | src/test/scala/org/apache/spark/sql/simba/index/TreapSuite.scala | Scala | apache-2.0 | 2,091 |
/*
* Copyright (c) 2010-2011 Belmont Technology Pty Ltd. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sodatest.api
/**
* SodaFixture is the primary interface between a Test and your code.
*
* Soda Tests use a SodaFixture to create [[org.sodatest.api.SodaEvent]] and [[org.sodatest.api.SodaReport]] instances that are then used
* to affect and query the system under test. Events and Reports differ in that Events are used only
* to affect the System under test, and not to query it, while Reports should be used only to
* query the System, not to affect it. This is known as [[http://en.wikipedia.org/wiki/Command-query_separation Command-Query Separation]] as is an
* important principle in maintaining the simplicity and readability of Soda Tests.
*
* <b>Note:</b> A SodaFixture should return a new instance of a SodaEvent or SodaReport for
* every call to one of the create methods. Events and Reports are not intended to be re-used.
* As a consequence, the creation of these objects should be as computationally simple as possible.
* Any expensive initialisation whose result can be cached for efficiency should be done either in
* the initialisation on the SodaFixture instance or somewhere else external to the Events and Reports.
*
* @see [[org.sodatest.api.reflection.ReflectiveSodaFixture]]
* @see [[org.sodatest.api.java.reflection.ReflectiveSodaFixtureForJava]]
*/
trait SodaFixture {
/**
* Creates and returns a SodaEvent based on the specified name.
*
* Note that createEvent() should <b>always</b> create a new Event instance. SodaEvent instances
* should not be re-used.
*
* @return a SodaEvent, wrapped in a Some, or None if the Fixture doesn't know how to create an
* Event for the given name.
*/
def createEvent(name: String): Option[SodaEvent]
/**
* Creates and returns a SodaReport based on the specified name.
*
* Note that createReport() should <b>always</b> create a new Report instance. SodaReport instances
* should not be re-used.
*
* @return a SodaReport, wrapped in a Some, or None if the Fixture doesn't know how to create a
* Report for the given name.
*/
def createReport(name: String): Option[SodaReport]
}
/**
* Implicit functions that can aid in the authoring of [[org.sodatest.api.SodaFixture]]s
*/
object SodaFixture {
implicit def sodaEvent2Option(e: SodaEvent): Option[SodaEvent] = Some(e)
implicit def sodaReport2Option(r: SodaReport): Option[SodaReport] = Some(r)
implicit def function2EventOption(f: (Map[String, String]) => Unit): Option[SodaEvent] = new SodaEvent {
def apply(parameters: Map[String, String]) = f(parameters)
}
implicit def function2ReportOption(f: (Map[String, String]) => Seq[Seq[String]]): Option[SodaReport] = new SodaReport {
def apply(parameters: Map[String, String]) = f(parameters)
}
}
| GrahamLea/SodaTest | sodatest-api/src/main/scala/org/sodatest/api/SodaFixture.scala | Scala | apache-2.0 | 3,405 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.tools.export.formats
import com.typesafe.scalalogging.LazyLogging
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.locationtech.geomesa.fs.storage.api.FileSystemStorage.FileSystemWriter
import org.locationtech.geomesa.fs.storage.common.jobs.StorageConfiguration
import org.locationtech.geomesa.fs.storage.orc.OrcFileSystemWriter
import org.locationtech.geomesa.parquet.ParquetFileSystemStorage.ParquetFileSystemWriter
import org.locationtech.geomesa.utils.io.PathUtils
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
/**
* Export to a FileSystem data store format
*/
abstract class FileSystemExporter(path: String) extends FeatureExporter {
private lazy val handle = PathUtils.getHandle(path)
private var writer: FileSystemWriter = _
protected def createWriter(sft: SimpleFeatureType): FileSystemWriter
override def start(sft: SimpleFeatureType): Unit = {
if (writer != null) {
writer.close()
}
writer = createWriter(sft)
}
override def export(features: Iterator[SimpleFeature]): Option[Long] = {
require(writer != null, "Must call 'start' before 'export'")
var i = 0L
features.foreach { f => writer.write(f); i += 1 }
Some(i)
}
override def bytes: Long = handle.length
override def close(): Unit = {
if (writer != null) {
writer.close()
}
}
}
object FileSystemExporter extends LazyLogging {
class ParquetFileSystemExporter(path: String) extends FileSystemExporter(path) {
override protected def createWriter(sft: SimpleFeatureType): FileSystemWriter = {
val conf = new Configuration()
StorageConfiguration.setSft(conf, sft)
try { Class.forName("org.xerial.snappy.Snappy") } catch {
case _: ClassNotFoundException =>
logger.warn("SNAPPY compression is not available on the classpath - falling back to GZIP")
conf.set("parquet.compression", "GZIP")
}
// use PathUtils.getUrl to handle local files, otherwise default can be in hdfs
new ParquetFileSystemWriter(sft, new Path(PathUtils.getUrl(path).toURI), conf)
}
}
class OrcFileSystemExporter(path: String) extends FileSystemExporter(path) {
override protected def createWriter(sft: SimpleFeatureType): FileSystemWriter = {
// use PathUtils.getUrl to handle local files, otherwise default can be in hdfs
new OrcFileSystemWriter(sft, new Configuration(), new Path(PathUtils.getUrl(path).toURI))
}
}
}
| aheyne/geomesa | geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/export/formats/FileSystemExporter.scala | Scala | apache-2.0 | 2,990 |
package poly.collection
/**
* @author Tongfei Chen
*/
object specgroup {
type unsp = scala.annotation.unspecialized
type sp = scala.specialized
final val fd = new Specializable.Group((Float, Double))
final val di = new Specializable.Group((Int, Double))
final val fdi = new Specializable.Group((Int, Float, Double))
final val dil = new Specializable.Group((Int, Double, Long))
final val fdil = new Specializable.Group((Int, Float, Double, Long))
final val fdib = new Specializable.Group((Int, Float, Double, Boolean))
final val fdilb = new Specializable.Group((Int, Float, Double, Boolean, Long))
final val dib = new Specializable.Group((Int, Double, Boolean))
final val ib = new Specializable.Group((Int, Boolean))
final val il = new Specializable.Group((Int, Long))
final val ilb = new Specializable.Group((Int, Long, Boolean))
/**
* Specializes what [[scala.Tuple1]] specialized.
*/
final val spTuple1 = new Specializable.Group((Int, Long, Double))
/**
* Specializes what [[scala.Tuple2]] specialized.
*/
final val spTuple2 = new Specializable.Group((Int, Long, Double, Char, Boolean))
/**
* Specializes what the argument of [[scala.Function1]] specialized.
*/
final val spFunc1 = new Specializable.Group((Int, Long, Float, Double))
/**
* Specializes what the result type of [[scala.Function1]]/[[scala.Function2]] specialized.
*/
final val spFuncR = new Specializable.Group((Unit, Boolean, Int, Float, Long, Double))
/**
* Specializes what the argument of [[scala.Function2]] specialized.
*/
final val spFunc2 = new Specializable.Group((Int, Long, Double))
}
| ctongfei/poly-collection | core/src/main/scala/poly/collection/specgroup.scala | Scala | mit | 1,648 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600e.v3
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.box.CtValidation
import uk.gov.hmrc.ct.ct600e.v3.retriever.CT600EBoxRetriever
class E170Spec extends WordSpec with Matchers with MockitoSugar {
val boxRetriever = mock[CT600EBoxRetriever]
"E170" should {
"calculate" when {
"both E170A and E170B has a value" in {
when(boxRetriever.e170A()).thenReturn(E170A(Some(337)))
when(boxRetriever.e170B()).thenReturn(E170B(Some(1000)))
E170.calculate(boxRetriever) shouldBe E170(Some(1337))
}
"either E170A or E170B has a value" in {
when(boxRetriever.e170A()).thenReturn(E170A(Some(337)))
when(boxRetriever.e170B()).thenReturn(E170B(None))
E170.calculate(boxRetriever) shouldBe E170(Some(337))
}
}
"return none" when{
"both E170A and E170B is None" in {
when(boxRetriever.e170A()).thenReturn(E170A(None))
when(boxRetriever.e170B()).thenReturn(E170B(None))
E170.calculate(boxRetriever) shouldBe E170(None)
}
}
}
}
| liquidarmour/ct-calculations | src/test/scala/uk/gov/hmrc/ct/ct600e/v3/E170Spec.scala | Scala | apache-2.0 | 1,757 |
/*
* Copyright 2010-2014 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mongodb
package record
package field
import org.bson.types.ObjectId
import org.specs2.mutable.Specification
import net.liftweb.common._
import net.liftweb.json.ext.EnumSerializer
import net.liftweb.record.field.{EnumField, OptionalEnumField}
import net.liftweb.util.Helpers._
import com.mongodb._
package enumfieldspecs {
object WeekDay extends Enumeration {
type WeekDay = Value
val Mon, Tue, Wed, Thu, Fri, Sat, Sun = Value
}
case class JsonObj(dow: WeekDay.WeekDay) extends JsonObject[JsonObj] {
def meta = JsonObj
}
object JsonObj extends JsonObjectMeta[JsonObj]
class EnumRec extends MongoRecord[EnumRec] with ObjectIdPk[EnumRec] {
def meta = EnumRec
object dow extends EnumField(this, WeekDay)
object dowOptional extends OptionalEnumField(this, WeekDay)
object jsonobj extends JsonObjectField[EnumRec, JsonObj](this, JsonObj) {
def defaultValue = JsonObj(WeekDay.Mon)
}
override def equals(other: Any): Boolean = other match {
case that: EnumRec =>
this.id.get == that.id.get &&
this.dow.value == that.dow.value &&
this.dowOptional.valueBox == that.dowOptional.valueBox &&
this.jsonobj.value == that.jsonobj.value
case _ => false
}
}
object EnumRec extends EnumRec with MongoMetaRecord[EnumRec] {
override def collectionName = "enumrecs"
override def formats = super.formats + new EnumSerializer(WeekDay)
}
}
/**
* Systems under specification for EnumField.
*/
class EnumFieldSpec extends Specification with MongoTestKit {
"EnumField Specification".title
import enumfieldspecs._
"EnumField" should {
"work with default values" in {
checkMongoIsRunning
val er = EnumRec.createRecord.save()
val erFromDb = EnumRec.find(er.id.get)
erFromDb must beLike {
case Full(er2) =>
er2 mustEqual er
er2.dow.value mustEqual WeekDay.Mon
er2.dowOptional.valueBox mustEqual Empty
er2.jsonobj.value mustEqual JsonObj(WeekDay.Mon)
}
}
"work with set values" in {
checkMongoIsRunning
val er = EnumRec.createRecord
.dow(WeekDay.Tue)
.jsonobj(JsonObj(WeekDay.Sun))
.save()
val erFromDb = EnumRec.find(er.id.get)
erFromDb must beLike {
case Full(er2) =>
er2 mustEqual er
er2.dow.value mustEqual WeekDay.Tue
er2.jsonobj.value mustEqual JsonObj(WeekDay.Sun)
}
}
"work with Empty optional values" in {
checkMongoIsRunning
val er = EnumRec.createRecord
er.dowOptional.setBox(Empty)
er.save()
val erFromDb = EnumRec.find(er.id.get)
erFromDb must beLike {
case Full(er2) =>
er2 mustEqual er
er2.dowOptional.valueBox mustEqual Empty
}
}
"work with Full optional values" in {
checkMongoIsRunning
val er = EnumRec.createRecord
er.dowOptional.setBox(Full(WeekDay.Sat))
er.save()
val erFromDb = EnumRec.find(er.id.get)
erFromDb must beLike {
case Full(er2) =>
er2 mustEqual er
er2.dowOptional.valueBox mustEqual Full(WeekDay.Sat)
}
}
}
}
| lzpfmh/framework-2 | persistence/mongodb-record/src/test/scala/net/liftweb/mongodb/record/field/EnumFieldSpec.scala | Scala | apache-2.0 | 3,848 |
package models
import play.api.libs.json.{Json, OFormat}
import reactivemongo.bson.BSONObjectID
import reactivemongo.play.json._
/**
* Created by Ankesh on 3/28/2017.
*/
case class Blog(_id: Option[BSONObjectID],title:String, author:String, body:String) {
}
object Blog {
implicit val formatter: OFormat[Blog] = Json.format[Blog]
}
| Dashbrd/scala-blog | app/models/Blog.scala | Scala | apache-2.0 | 340 |
package com.gigaspaces.csvwriter
import org.springframework.transaction.annotation.{Propagation, Transactional}
import com.gigaspaces.document.SpaceDocument
import org.openspaces.core.GigaSpace
import com.gigaspaces.csvwriter.AppSettings._
/**
* Created by IntelliJ IDEA.
* User: jason
* Date: 4/11/14
* Time: 8:00 PM
*/
class SpaceDocumentWriter(gigaSpace: GigaSpace){
@Transactional(propagation = Propagation.REQUIRED)
def write(spaceDocs: Seq[SpaceDocument]): Unit = {
val docsSize = spaceDocs.size
require(docsSize <= batchSize, s"Batch size too large for space docs: $docsSize")
spaceDocs.foreach{doc => gigaSpace.write(doc)}
}
}
| jasonnerothin/gs-csvwriter | src/main/scala/com/gigaspaces/csvwriter/SpaceDocumentWriter.scala | Scala | apache-2.0 | 668 |
package com.stefansavev.randomprojections.implementation
import com.stefansavev.randomprojections.datarepr.sparse.SparseVector
object HadamardUtils {
//assume k is a power of 2
//TODO: make it work without k being a power of 2
val eps = 0.0001
def recurse(from: Int, to: Int, input: Array[Double], output: Array[Double]): Unit = {
if (to - from == 1){
output(from) = input(from)
}
else if (to - from == 2){
val a = input(from)
val b = input(from + 1)
output(from) = a + b
output(from + 1) = a - b
}
else if (to - from == 4){
var a = input(from)
var b = input(from + 1)
output(from) = a + b
output(from + 1) = a - b
a = input(from + 2)
b = input(from + 3)
output(from + 2) = a + b
output(from + 3) = a - b
a = output(from)
b = output(from + 2)
output(from) = a + b
output(from + 2) = a - b
a = output(from + 1)
b = output(from + 3)
output(from + 1) = a + b
output(from + 3) = a - b
}
else{
val mid = from + (to - from)/2
recurse(from, mid, input, output)
recurse(mid, to, input, output)
var j1 = from
var j2 = mid
while(j1 < mid){
val a = output(j1)
val b = output(j2)
output(j1) = a + b
output(j2) = a - b
j1 += 1
j2 += 1
}
}
}
object MaxAbsValue{
//in some cases we put normalized data, but not in query time
val V: Double = 0.0001 //need to add test cases because for large V this method does not work //0.01
}
def argAbsValueMax(dim: Int, values: Array[Double]): Int = {
//sometimes the values here come normalized and sometimes not, need to fix that
var i = 0
var maxAbsValue = MaxAbsValue.V // 0.001 //try channging this after
var sign = 1
var maxIdx = 2*dim //last cell is reserved for empty
while(i < dim){ //forcing split into two
val v = values(i)
if (v > maxAbsValue){
maxAbsValue = v
sign = 1
maxIdx = i
}
else if (-v > maxAbsValue){
maxAbsValue = -v
sign = -1
maxIdx = i
}
i += 1
}
//println("max abs. value " + (maxAbsValue, maxIdx) )
if (maxIdx != 2*dim){
if (sign > 0) maxIdx else maxIdx + dim
}
else{
maxIdx
}
}
def constrainedArgAbsValueMax(dim: Int, values: Array[Double], availableData: Array[RandomTree]): Int = {
//sometimes the values here come normalized and sometimes not, need to fix that
var i = 0
var maxAbsValue = MaxAbsValue.V // 0.001 //try channging this after
var sign = 1
var maxIdx = 2*dim //last cell is reserved for empty
while(i < dim){ //forcing split into two
val v = values(i)
if (v > maxAbsValue && availableData(i) != null ){
maxAbsValue = v
sign = 1
maxIdx = i
}
else if (-v > maxAbsValue && availableData(i + dim) != null){
maxAbsValue = -v
sign = -1
maxIdx = i
}
i += 1
}
//println("max abs. value " + (maxAbsValue, maxIdx) )
if (maxIdx != 2*dim){
if (sign > 0) maxIdx else maxIdx + dim
}
else{
maxIdx
}
}
def getAbsValue(dim: Int, values: Array[Double], prevBucketIndex: Int): Double = {
if (prevBucketIndex >= 2*dim){
0.0
}
else {
val prevAbsMax = if (prevBucketIndex < dim) values(prevBucketIndex) else -values(prevBucketIndex - dim)
prevAbsMax
}
}
def nextArgAbsMax(dim: Int, values: Array[Double], prevBucketIndex: Int): Int = {
val prevAbsMax = getAbsValue(dim, values, prevBucketIndex) //if (prevBucketIndex < values.length) values(prevBucketIndex) else -values(prevBucketIndex - values.length)
var i = 0
var maxAbsValue = MaxAbsValue.V //0.001
var sign = 1
var maxIdx = 2*dim
while(i < dim){ //forcing split into two
val v = values(i)
if (v > maxAbsValue && v < prevAbsMax){
maxAbsValue = v
sign = 1
maxIdx = i
}
else if (-v > maxAbsValue && -v < prevAbsMax){
maxAbsValue = -v
sign = -1
maxIdx = i
}
i += 1
}
if (maxIdx != -1){
if (sign > 0) maxIdx else maxIdx + dim
}
else{
maxIdx
}
}
def largestPowerOf2(k: Int): Int = {
var j = 0
var i = 1
while(i <= k){
j = i
i = i + i
}
j
}
def multiplyInto(input: Array[Double], output: Array[Double]): Unit = {
val k = largestPowerOf2(input.length)
var i = 0
while(i < output.length){
output(i) = 0.0
i += 1
}
recurse(0, k, input, output)
}
def multiplyInto(dim: Int, input: Array[Double], output: Array[Double]): Unit = {
val k = dim
var i = 0
while(i < dim){
output(i) = 0.0
i += 1
}
recurse(0, k, input, output)
}
def normalizeOutput(dim: Int, output: Array[Double]): Unit = {
val norm = Math.sqrt(dim)
var i = 0
while(i < dim){
output(i) /= norm
i += 1
}
}
def roundUp(dim: Int): Int = {
val powOf2 = largestPowerOf2(dim)
val k = if (powOf2 == dim) dim else 2*powOf2
k
}
def roundDown(dim: Int): Int = {
largestPowerOf2(dim)
}
def computeHadamardFeatures(signs: SparseVector, query: Array[Double], input: Array[Double], output: Array[Double]): Unit = {
val dim = signs.dim
//TODO: move to function (significant overlap with code in Signatures)
var j = 0
while(j < input.length){
input(j)= 0.0
j += 1
}
j = 0
while(j < signs.ids.length){
val index = signs.ids(j)
val b = signs.values(j)
val a = query(index)
input(j) = a * b
j += 1
}
HadamardUtils.multiplyInto(input.length, input, output)
}
}
| codeaudit/random-projections-at-berlinbuzzwords | src/main/scala/com/stefansavev/randomprojections/implementation/HadamardUtils.scala | Scala | apache-2.0 | 5,810 |
/*
* Copyright 2016 Nikolay Donets
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.nikdon.telepooz.model
/**
* You can provide an animation for your game so that it looks stylish in chats (check out Lumberjack for an example).
* This object represents an animation file to be displayed in the message containing a game.
*
* @param file_id Unique file identifier
* @param thumb Animation thumbnail as defined by sender
* @param file_name Original animation filename as defined by sender
* @param mime_type MIME type of the file as defined by sender
* @param file_size File size
*/
case class Animation(
file_id: String,
thumb: Option[PhotoSize] = None,
file_name: Option[String] = None,
mime_type: Option[String] = None,
file_size: Option[Int] = None
)
| nikdon/telepooz | src/main/scala/com/github/nikdon/telepooz/model/Animation.scala | Scala | apache-2.0 | 1,332 |
package generator.graphql
import cats.data.Validated.{Invalid, Valid}
import io.apibuilder.generator.v0.models.{File, InvocationForm}
import io.apibuilder.graphql.GraphQLCodeGenerator
import io.apibuilder.validation.{ApiBuilderService, MultiService}
import lib.generator.CodeGenerator
object GraphQLApolloGenerator extends CodeGenerator {
override def invoke(form: InvocationForm): Either[Seq[String], Seq[File]] = {
GraphQLCodeGenerator.Default.generate(toMultiService(form)) match {
case Valid(r) => Right(r.files)
case Invalid(errors) => Left(errors.toNonEmptyList.toList)
}
}
def toMultiService(form: InvocationForm): MultiService = {
MultiService(
(Seq(form.service) ++ form.importedServices.getOrElse(Nil)).map(ApiBuilderService.apply).toList
)
}
}
| mbryzek/apidoc-generator | graphql-generator/src/main/scala/generator/graphql/GraphQLApolloGenerator.scala | Scala | mit | 802 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package iht.viewmodels.estateReports
import iht.connector.IhtConnector
import iht.models.application.IhtApplication
import iht.utils.CustomLanguageUtils.Dates
import iht.utils.{ApplicationStatus => AppStatus}
import play.api.Logging
import play.api.i18n.Messages
import play.api.mvc.Call
case class YourEstateReportsRowViewModel(deceasedName: String,
ihtRefNo: String,
dateOfDeath: String,
currentStatus: String,
linkLabel: String,
link: Call,
linkScreenreader: String)
object YourEstateReportsRowViewModel extends Logging {
def apply(nino: String, ihtApp: IhtApplication, ihtConnector: IhtConnector, currentStatus: String)
(implicit messages: Messages): YourEstateReportsRowViewModel = {
val ihtRef = ihtApp.ihtRefNo
new YourEstateReportsRowViewModel(deceasedName = s"${ihtApp.firstName} ${ihtApp.lastName}",
ihtRefNo = ihtApp.ihtRefNo,
dateOfDeath = Dates.formatDate(ihtApp.dateOfDeath)(messages).toString,
currentStatus = getApplicationStatusMessage(currentStatus)(messages),
linkLabel = getLinkLabel(currentStatus)(messages),
link = getLink(currentStatus, ihtRef),
linkScreenreader = getLinkScreenreader(currentStatus, s"${ihtApp.firstName} ${ihtApp.lastName}")(messages)
)
}
private def getApplicationStatusMessage(currentStatus: String)(implicit messages: Messages) = {
currentStatus match {
case AppStatus.NotStarted => messages("iht.notStarted")
case AppStatus.InProgress => messages("iht.inProgress")
case AppStatus.KickOut => messages("iht.inProgress")
case AppStatus.InReview => messages("iht.inReview")
case AppStatus.UnderEnquiry => messages("iht.inReview")
case AppStatus.Closed => messages("iht.closed")
case AppStatus.ClearanceGranted => messages("iht.closed")
case AppStatus.IneligibleApplication => messages("iht.ineligibleApplication")
}
}
private def getLinkLabel(currentStatus: String)(implicit messages: Messages) = {
currentStatus match {
case AppStatus.NotStarted => messages("iht.start")
case AppStatus.InProgress => messages("iht.continue")
case AppStatus.KickOut => messages("iht.continue")
case _ => messages("page.iht.home.button.viewApplication.label")
}
}
private def getLinkScreenreader(currentStatus: String, deceasedName: String)(implicit messages: Messages) = {
currentStatus match {
case AppStatus.NotStarted => messages("page.iht.home.button.startApplication.screenReader", deceasedName)
case AppStatus.InProgress => messages("page.iht.home.button.continueApplication.screenReader", deceasedName)
case AppStatus.KickOut => messages("page.iht.home.button.continueApplication.screenReader", deceasedName)
case _ => messages("page.iht.home.button.viewApplication.screenReader", deceasedName)
}
}
private def getLink(currentStatus: String, ihtRef: String) = {
currentStatus match {
case AppStatus.NotStarted | AppStatus.InProgress | AppStatus.KickOut =>
iht.controllers.application.routes.EstateOverviewController.onPageLoadWithIhtRef(ihtRef)
case AppStatus.InReview | AppStatus.UnderEnquiry =>
iht.controllers.application.status.routes.ApplicationInReviewController.onPageLoad(ihtRef)
case AppStatus.Closed =>
iht.controllers.application.status.routes.ApplicationClosedController.onPageLoad(ihtRef)
case AppStatus.ClearanceGranted =>
iht.controllers.application.status.routes.ApplicationClosedController.onPageLoad(ihtRef)
case AppStatus.IneligibleApplication =>
logger.warn("Ineligible Application status found")
throw new RuntimeException("Ineligible Application status found")
case _ =>
logger.error("Unknown Application status found")
throw new RuntimeException("Unknown Application status found")
}
}
}
| hmrc/iht-frontend | app/iht/viewmodels/estateReports/YourEstateReportsRowViewModel.scala | Scala | apache-2.0 | 4,673 |
package co.ledger.wallet.web.ripple.content
import co.ledger.wallet.web.ripple.core.database.{DatabaseDeclaration, Model, ModelCreator, QueryHelper}
/**
*
* SampleModel
* ledger-wallet-ripple-chrome
*
* Created by Pierre Pollastri on 07/06/2016.
*
* The MIT License (MIT)
*
* Copyright (c) 2016 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
/***
* A sample model to demonstrate API capabilities
*/
class SampleModel extends Model("sample") {
val aInt = int("aInt").unique().index()
val aString = string("aString")
}
object SampleModel extends QueryHelper[SampleModel] with ModelCreator[SampleModel] {
override def database: DatabaseDeclaration = SamplesDatabaseDeclaration
override def creator: ModelCreator[SampleModel] = this
override def newInstance(): SampleModel = new SampleModel
} | LedgerHQ/ledger-wallet-ripple | src/main/scala/co/ledger/wallet/web/ripple/content/SampleModel.scala | Scala | mit | 1,888 |
/*
* LKExporterTest.scala
*/
package at.logic.gapt.formats.calculi.xml
import at.logic.gapt.formats.xml.{ HOLTermXMLExporter, LKExporter }
import at.logic.gapt.proofs.HOLSequent
import org.specs2.mutable._
import scala.xml.Utility.trim
import at.logic.gapt.proofs.lk.base._
import at.logic.gapt.expr._
import at.logic.gapt.expr.StringSymbol
import at.logic.gapt.expr.To
class LkExporterTest extends Specification {
val exporter = new LKExporter {}
// helper to create 0-ary predicate constants
def pc( sym: String ) = HOLAtom( Const( StringSymbol( sym ), To ), List() )
"LKExporter" should {
"export correctly a sequent A, B :- C, D" in {
trim( exporter.exportSequent( HOLSequent( List( "A", "B" ) map ( pc ), List( "C", "D" ) map ( pc ) ) ) ) must beEqualTo( trim(
<sequent>
<formulalist>
<constantatomformula symbol="A"/>
<constantatomformula symbol="B"/>
</formulalist>
<formulalist>
<constantatomformula symbol="C"/>
<constantatomformula symbol="D"/>
</formulalist>
</sequent>
) )
}
"export correctly a sequent list {A1, B1 :- C1, D1, A2, B2 :- C2, D2}" in {
trim( exporter.exportSequentList( "testlist", List(
HOLSequent( List( "A1", "B1" ) map ( pc ), List( "C1", "D1" ) map ( pc ) ),
HOLSequent( List( "A2", "B2" ) map ( pc ), List( "C2", "D2" ) map ( pc ) )
) ) ) must beEqualTo( trim(
<sequentlist symbol="testlist">
<sequent>
<formulalist>
<constantatomformula symbol="A1"/>
<constantatomformula symbol="B1"/>
</formulalist>
<formulalist>
<constantatomformula symbol="C1"/>
<constantatomformula symbol="D1"/>
</formulalist>
</sequent>
<sequent>
<formulalist>
<constantatomformula symbol="A2"/>
<constantatomformula symbol="B2"/>
</formulalist>
<formulalist>
<constantatomformula symbol="C2"/>
<constantatomformula symbol="D2"/>
</formulalist>
</sequent>
</sequentlist>
) )
}
}
}
/*
"parse correctly an axiom P :- P" in {
(new NodeReader(<proof symbol="p" calculus="LK">
<rule type="axiom">
<sequent>
<formulalist>
<constantatomformula symbol="P"/>
</formulalist>
<formulalist>
<constantatomformula symbol="P"/>
</formulalist>
</sequent>
</rule>
</proof>) with XMLProofParser).getProof() must
beLike{ case Axiom( conc )
if conc.getSequent.multisetEquals( Sequent( pc("P")::Nil,
pc("P")::Nil ))
=> true }
}
"parse a permutation parameter (1 2)" in {
XMLUtils.permStringToCycles("(1 2)", 2) must
beDeeplyEqual( (2::1::Nil).map(i => i - 1).toArray )
}
"parse a permutation parameter (1 2 3)(5 6)" in {
XMLUtils.permStringToCycles("(1 2 3)(5 6)", 6) must
beDeeplyEqual( (3::1::2::4::6::5::Nil).map( i => i - 1 ).toArray )
}
"parse a permutation parameter (3 4 5) of size 5" in {
XMLUtils.permStringToCycles("(3 4 5)", 5) must
beDeeplyEqual( (1::2::5::3::4::Nil).map( i => i - 1 ).toArray )
}
"parse a permutation rule" in {
(new NodeReader(<rule type="permr" param="(1 3)(2)">
<sequent>
<formulalist>
<constantatomformula symbol="A"/>
<constantatomformula symbol="B"/>
</formulalist>
<formulalist>
<constantatomformula symbol="E"/>
<constantatomformula symbol="D"/>
<constantatomformula symbol="C"/>
</formulalist>
</sequent>
<rule type="axiom">
<sequent>
<formulalist>
<constantatomformula symbol="A"/>
<constantatomformula symbol="B"/>
</formulalist>
<formulalist>
<constantatomformula symbol="C"/>
<constantatomformula symbol="D"/>
<constantatomformula symbol="E"/>
</formulalist>
</sequent>
</rule>
</rule>) with XMLProofParser).getProof() must
beLike{ case Axiom( conc ) => true }
}
"parse a simple contraction rule" in {
(new NodeReader(<rule type="contrl" param="2">
<sequent>
<formulalist>
<constantatomformula symbol="A"/>
</formulalist>
<formulalist/>
</sequent>
<rule type="axiom">
<sequent>
<formulalist>
<constantatomformula symbol="A"/>
<constantatomformula symbol="A"/>
</formulalist>
<formulalist/>
</sequent>
</rule>
</rule>) with XMLProofParser).getProof().root.getSequent must beMultisetEqual(
Sequent(pc("A")::Nil, Nil))
}
"parse an involved contraction rule" in {
(new NodeReader(<rule type="contrl" param="2,1,2,1,1">
<sequent>
<formulalist>
<constantatomformula symbol="A"/>
<constantatomformula symbol="B"/>
<constantatomformula symbol="C"/>
<constantatomformula symbol="C"/>
<constantatomformula symbol="D"/>
</formulalist>
<formulalist/>
</sequent>
<rule type="axiom">
<sequent>
<formulalist>
<constantatomformula symbol="A"/>
<constantatomformula symbol="A"/>
<constantatomformula symbol="B"/>
<constantatomformula symbol="C"/>
<constantatomformula symbol="C"/>
<constantatomformula symbol="C"/>
<constantatomformula symbol="D"/>
</formulalist>
<formulalist/>
</sequent>
</rule>
</rule>) with XMLProofParser).getProof().root.getSequent must beMultisetEqual(
Sequent(pc("A")::pc("B")::pc("C")::pc("C")::pc("D")::Nil, Nil))
}
"parse correctly a proof of A, A :- A and A" in {
(new NodeReader(<proof symbol="p" calculus="LK">
<rule type="andr">
<sequent>
<formulalist>
<constantatomformula symbol="A"/>
<constantatomformula symbol="A"/>
</formulalist>
<formulalist>
<conjunctiveformula type="and">
<constantatomformula symbol="A"/>
<constantatomformula symbol="A"/>
</conjunctiveformula>
</formulalist>
</sequent>
<rule type="axiom">
<sequent>
<formulalist>
<constantatomformula symbol="A"/>
</formulalist>
<formulalist>
<constantatomformula symbol="A"/>
</formulalist>
</sequent>
</rule>
<rule type="axiom">
<sequent>
<formulalist>
<constantatomformula symbol="A"/>
</formulalist>
<formulalist>
<constantatomformula symbol="A"/>
</formulalist>
</sequent>
</rule>
</rule>
</proof>) with XMLProofParser).getProof().root.getSequent must beMultisetEqual(
Sequent(pc("A")::pc("A")::Nil, And(pc("A"), pc("A"))::Nil))
}
"parse correctly a proof with one orr1 rule and one permr rule" in {
(new NodeReader(<proof symbol="p" calculus="LK">
<rule type="orr1">
<sequent>
<formulalist/>
<formulalist>
<constantatomformula symbol="B"/>
<conjunctiveformula type="or">
<constantatomformula symbol="A"/>
<constantatomformula symbol="C"/>
</conjunctiveformula>
</formulalist>
</sequent>
<rule type="permr" param="(1 2)">
<sequent>
<formulalist/>
<formulalist>
<constantatomformula symbol="B"/>
<constantatomformula symbol="A"/>
</formulalist>
</sequent>
<rule type="axiom">
<sequent>
<formulalist/>
<formulalist>
<constantatomformula symbol="A"/>
<constantatomformula symbol="B"/>
</formulalist>
</sequent>
</rule>
</rule>
</rule>
</proof>) with XMLProofParser).getProof().root.getSequent must beMultisetEqual(
Sequent(Nil, pc("B")::Or(pc("A"), pc("C"))::Nil))
}
"parse correctly a proof with some permutations, an andr, and an orr1 rule from a file" in {
val proofs = (new XMLReader(new InputStreamReader(getClass.getClassLoader.getResourceAsStream("xml" + separator + "test3.xml"))) with XMLProofDatabaseParser).getProofs()
proofs.size must beEqualTo(1)
proofs.first.root.getSequent must beMultisetEqual(
Sequent(Nil, pc("A")::pc("C")::pc("F")::
And(pc("B"), pc("E"))::
Or(pc("D"), pc("G"))::Nil))
}
"parse correctly a proof with two orr1 rules and two permr rules from a file" in {
val proofs = (new XMLReader(new InputStreamReader(getClass.getClassLoader.getResourceAsStream("xml" + separator + "test2.xml"))) with XMLProofDatabaseParser).getProofs()
proofs.size must beEqualTo(1)
proofs.first.root.getSequent must beMultisetEqual(
Sequent(Nil, Or(pc("A"),
pc("C"))::
Or(pc("B"),
pc("D"))::Nil))
}
"parse correctly an involved proof from a file" in {
val proofs = (new XMLReader(new InputStreamReader(getClass.getClassLoader.getResourceAsStream("xml" + separator + "test1.xml"))) with XMLProofDatabaseParser).getProofs()
val X = Var( new VariableStringSymbol( "X" ), i -> o )
val t = Const( new ConstantStringSymbol( "t" ), i)
val s = Const( new ConstantStringSymbol( "s" ), i)
val r = Const( new ConstantStringSymbol( "r" ), i)
val f = Const( new ConstantStringSymbol( "f" ), i -> i)
val x = Var( new VariableStringSymbol( "x" ), i )
val Rs = new ConstantStringSymbol( "R" )
val f1 = All( X, And( AppFormula( X, t ), Neg( AppFormula( X, s ) ) ) )
val f2 = And( Imp( Atom( Rs, r::t::Nil ), Atom( Rs, r::App( f, t )::Nil ) ),
Ex( x, And( Atom( Rs, x::s::Nil ), Neg( Atom( Rs, x::App( f, s )::Nil ) ) ) ) )
proofs.size must beEqualTo(1)
proofs.first.root.getSequent must beMultisetEqual( Sequent( f1::Nil, f2::Nil ) )
}
"parse correctly the second-order primeproof" in {
val proofs = (new XMLReader(new InputStreamReader(new GZIPInputStream(new FileInputStream("target" + separator + "xml" + separator + "prime2.xml.gz")))) with XMLProofDatabaseParser).getProofs()
proofs.size must beEqualTo(1)
}
}*/
| loewenheim/gapt | src/test/scala/at/logic/gapt/formats/calculi/xml/LKExporterTest.scala | Scala | gpl-3.0 | 13,561 |
/*
* Copyright (c) 2013 Aviat Networks.
* This file is part of DocReg+Web. Please refer to the NOTICE.txt file for license details.
*/
package vvv.docreg.model
import net.liftweb._
import util._
import common._
import Helpers._
import http._
import provider.HTTPCookie
import vvv.docreg.util.{Environment, StringUtil}
import vvv.docreg.util.StringUtil.ValidEmail
import java.util.{TimeZone, Date}
import java.sql.Timestamp
import vvv.docreg.db.{DbObject, DbSchema}
import org.squeryl.PrimitiveTypeMode._
import vvv.docreg.model.User.loggedInUser
import scala.Predef._
import xml.{Text, NodeSeq}
// http://www.assembla.com/wiki/show/liftweb/How_to_use_Container_Managed_Security
// http://wiki.eclipse.org/Jetty/Tutorial/JAAS#LdapLoginModule
// http://www.mail-archive.com/[email protected]/msg05268.html
class User extends DbObject[User] {
def dbTable = DbSchema.users
var username: String = ""
var dn: String = ""
var name: String = ""
var description: String = ""
var department: String = ""
var location: String = ""
var email: String = ""
var active: Boolean = true
var superuser: Boolean = false
var host: String = ""
var lastSession: Timestamp = new Timestamp(0)
var sessionCount: Long = 0
var localServer: String = ""
var timeZone: String = ""
def watching_?(d: Document) = {
Subscription.isNotified(d.id, id)
}
def bookmarked_?(d: Document) = {
Subscription.isBookmarked(d.id, id)
}
def displayName = name
def shortUsername(): String =
{
username match {
case ValidEmail(name, domain) => name
case other => other.takeWhile(_ != '@')
}
}
def knownOption(): Option[User] = {
Option(this).filter(_.name != "[Unknown]")
}
def profileLink(): NodeSeq = profileLink(displayName)
def profileLink(text: String): NodeSeq = profileLink(Text(text))
def profile(): String = "/user/" + shortUsername()
def preferences(): String = {
"/user/" + shortUsername() + "/preferences"
}
def profileLink(content: NodeSeq): NodeSeq = <a href={ profile() }>{ content }</a>
def profileLabel(focusUserId: Long): NodeSeq = {
if (focusUserId == id) {
<span class="user-selected">{ profileLink() }</span>
}
else {
<span class="user">{ profileLink() }</span>
}
}
def revisions(): List[Revision] = {
inTransaction( from(Revision.dbTable)(r => where(r.authorId === id) select(r) orderBy(r.date desc)).toList )
}
def activity(): Long = {
inTransaction( from(Revision.dbTable)(r => where(r.authorId === id) compute(countDistinct(r.id))) )
}
def impact(): Long = {
inTransaction( from(Revision.dbTable)(r => where(r.authorId === id) compute(countDistinct(r.documentId))) )
}
def history(): List[Document] = {
inTransaction {
join(Document.dbTable, Revision.dbTable)( (d, r) =>
where(r.authorId === id)
select(d)
orderBy(r.date desc)
on(d.id === r.documentId)
).toList.distinct
}
}
def editing(): List[Document] = {
inTransaction {
join(Pending.dbTable, Document.dbTable)( (p, d) =>
where(p.action === PendingAction.editing and p.userId === id)
select(d)
orderBy(p.date desc)
on(p.documentId === d.id)
).toList.distinct
}
}
def getTimeZone(): TimeZone = {
if (timeZone == null){
TimeZone.getDefault
} else {
TimeZone.getTimeZone(timeZone)
}
}
def canLogin_?(): Boolean = {
// TODO need to check for DocReg Access group
active
}
def accessLevel(): AccessLevel.Value = {
if (!active) {
AccessLevel.none
}
else if (superuser) {
AccessLevel.superuser
}
else {
AccessLevel.normal
}
}
}
sealed abstract class SignInFailure
case object IncorrectUsernameOrPassword extends SignInFailure
case class NotAuthorized(user: User) extends SignInFailure
object User extends User with Loggable {
val docRegUserCookie = "DocRegWebUser"
val domain = "@GNET.global.vpn"
object loggedInUser extends SessionVar[Box[User]](checkForUserCookie)
object requestUri extends SessionVar[Option[String]](None)
def signIn(username: String, password: String): Either[User, SignInFailure] = {
val directory = Environment.env.directory
// This gives us an up to date user, i.e. LDAP attributes are reloaded
UserLookup.lookupUser(username, directory) match {
case Full(user) if (directory.login(user.dn, password)) => {
if (user.canLogin_?) Left(user)
else Right(NotAuthorized(user))
}
case _ => {
Right(IncorrectUsernameOrPassword)
}
}
}
def reloadLoggedInUser() {
val u = loggedInUser.toOption.flatMap(_.reload())
loggedInUser(u)
}
def loggedIn_? = !loggedInUser.is.isEmpty
def login(user: User) = {
markSession(user)
loggedInUser(Full(user))
}
def logout() = {
loggedInUser(Empty)
}
def forUsername(username: String): Option[User] = {
inTransaction( dbTable.where(u => u.username like username).headOption )
}
private def markSession(in: User)
{
for (u <- in.reload()) {
u.lastSession = new Timestamp(System.currentTimeMillis())
u.sessionCount = u.sessionCount + 1L
u.host = User.parseHost
User.dbTable.update(u)
logger.info("User '" + u.displayName + "' started session " + host)
}
}
def saveUserCookie() {
loggedInUser.is match {
case Full(u) => S.addCookie(HTTPCookie(docRegUserCookie, u.username).setMaxAge(3600 * 24 * 7).setPath("/"))
case _ => S.addCookie(HTTPCookie(docRegUserCookie, "###").setPath("/"))
}
}
def checkForUserCookie: Box[User] = {
S.cookieValue(docRegUserCookie) match {
case Full(id) =>
val existing: Box[User] = User.forUsername(id)
existing.foreach { u => markSession(u) }
existing
case _ =>
Empty
}
}
def parseHost: String =
{
// Nginx wraps the request ip as X-Real-IP and X-Forwarded-For
S.getRequestHeader("X-Real-IP").getOrElse(S.getRequestHeader("X-Forwarded-For").getOrElse("?"))
}
def sort(a: User, b: User): Boolean =
{
val x = a.displayName.split(" ").head
val y = b.displayName.split(" ").head
x.compareToIgnoreCase(y) < 0
}
def authorized(): List[User] = {
from(dbTable)( u =>
where(u.active === true)
select(u)
orderBy(u.name asc)
).toList
}
}
object AccessLevel extends Enumeration {
type AccessLevel = Value
val none = Value("None")
val normal = Value("Normal")
val superuser = Value("Administrator")
}
object StreamMode extends Enumeration {
val all = Value("All")
val selected = Value("Selected")
val watching = Value("Watching")
val me = Value("@Me")
}
object UserSession {
val modeCookie = "DocRegWebMode"
object mode extends SessionVar[StreamMode.Value](loadModeCookie)
object authorizedProjects extends SessionVar[Set[Long]](loadAuthorized)
object selectedProjects extends SessionVar[Set[Long]](loadSelected)
private def user: Box[User] = {
User.loggedInUser.is
}
def loadAuthorized(): Set[Long] = {
user.map(u =>
ProjectAuthorization.authorizedProjectsFor(u).map(_.id).toSet
).getOrElse(Set.empty[Long])
}
def loadSelected(): Set[Long] = {
user.map(u =>
UserProject.userSelected(u).map(_.id).toSet
).getOrElse(Set.empty[Long])
}
def changeSelected(projectId: Long, selected: Boolean) {
selectedProjects( if (selected) selectedProjects.is + projectId else selectedProjects.is - projectId )
}
def isAuthorized(d: Document): Boolean = {
d.secure_?() == false || authorizedProjects.contains(d.projectId)
}
def partitionAuthorized[T](in: List[T], documentF: T => Document): (List[T], List[T]) = {
in.partition{i =>
isAuthorized(documentF(i))
}
}
private def loadModeCookie(): StreamMode.Value = {
S.cookieValue(modeCookie).toOption.flatMap(x => Option(StreamMode.withName(x))).getOrElse(StreamMode.all)
}
private def saveModeCookie(x: StreamMode.Value) {
S.addCookie(HTTPCookie(modeCookie, x.toString).setMaxAge(3600 * 24 * 365).setPath("/"))
}
def changeMode(x: StreamMode.Value) {
mode(x)
saveModeCookie(x)
}
def inStreamFilter(): (Document, Revision, Project) => Boolean = {
inStreamFilter(UserSession.mode.is)
}
def inStreamFilter(mode: StreamMode.Value): (Document, Revision, Project) => Boolean = {
val authorized = authorizedProjects.is
def filterAuthorized(document: Document, project: Project) = !document.secure_?() || authorized.contains(project.id)
val filterMode: (Document, Revision, Project) => Boolean = mode match {
case StreamMode.all => {
(_,_,_) => true
}
case StreamMode.selected => { // now favourites
val selected = selectedProjects.is
val bookmarks = user.map(Subscription.bookmarksFor(_)).getOrElse(Nil).map(_.id)
(d,_,p) => selected.contains(p.id) || bookmarks.contains(d.id)
}
case StreamMode.watching => {
val subs = user.map(Subscription.watchingFor(_)).getOrElse(Nil).map(_.id)
(d,_,_) => subs.contains(d.id)
}
case StreamMode.me => {
val uid = user.map(_.id) getOrElse -1
(_,r,_) => r.authorId == uid
}
case _ => {
(_,_,_) => false
}
}
(d,r,p) => { filterMode(d,r,p) && filterAuthorized(d,p) }
}
} | scott-abernethy/docreg-web | src/main/scala/vvv/docreg/model/User.scala | Scala | gpl-3.0 | 9,426 |
package lila.gameSearch
import com.sksamuel.elastic4s.ElasticDsl._
import org.elasticsearch.search.sort.SortOrder
case class Sorting(f: String, order: String) {
def definition =
field sort (Sorting.fieldKeys contains f).fold(f, Sorting.default.f) order
(order.toLowerCase == "asc").fold(SortOrder.ASC, SortOrder.DESC)
}
object Sorting {
val fields = List(
Fields.date -> "Date",
Fields.turns -> "Moves",
Fields.averageRating -> "Average Rating")
def fieldKeys = fields map (_._1)
val orders = List(SortOrder.DESC, SortOrder.ASC) map { s => s.toString -> s.toString }
val default = Sorting(Fields.date, "desc")
}
| pavelo65/lila | modules/gameSearch/src/main/Sorting.scala | Scala | mit | 652 |
package ca.hyperreal.sscheme
import org.scalatest._
import prop.PropertyChecks
class Examples extends FreeSpec with PropertyChecks with Matchers
{
"sort" in
{
val env = environment( """
(define sort #f)
(let ()
(define dosort
(lambda (pred? ls n)
(if (= n 1)
(list (car ls))
(let ((i (quotient n 2)))
(merge pred?
(dosort pred? ls i)
(dosort pred? (list-tail ls i) (- n i)))))))
(define merge
(lambda (pred? l1 l2)
(cond
((null? l1) l2)
((null? l2) l1)
((pred? (car l2) (car l1))
(cons (car l2) (merge pred? l1 (cdr l2))))
(else (cons (car l1) (merge pred? (cdr l1) l2))))))
(set! sort
(lambda (pred? l)
(if (null? l) l (dosort pred? l (length l))))))
""" )
interpret( """ (sort < l) """, env add ('l -> SList(5, 7, 3, 9, 2, 1, 6)) ) shouldBe SList(1, 2, 3, 5, 6, 7, 9)
interpret( """ (sort > l) """, env add ('l -> SList(5, 7, 3, 9, 2, 1, 6)) ) shouldBe SList(9, 7, 6, 5, 3, 2, 1)
}
} | edadma/sscheme | src/test/scala/Examples.scala | Scala | mit | 1,005 |
package opennlp.scalabha.tag.hmm
import org.apache.commons.logging.LogFactory
import opennlp.scalabha.tag.support._
import opennlp.scalabha.tag._
import opennlp.scalabha.util.CollectionUtils._
import opennlp.scalabha.util.LogNum._
import opennlp.scalabha.util.Pattern
import opennlp.scalabha.util.Pattern.{ -> }
import opennlp.scalabha.util.LogNum
import scala.collection.GenIterable
import opennlp.scalabha.tag.hmm.support._
import scala.annotation.tailrec
/**
* Factory for training a Hidden Markov Model tagger from a combination of
* labeled data and unlabeled data using the Expectation-Maximization (EM)
* algorithm.
*
* @tparam Sym visible symbols in the sequences
* @tparam Tag tags applied to symbols
*
* @param initialUnsupervisedEmissionDist
* @param estimatedTransitionCountsTransformer factory for generating builders that count tag occurrences and compute distributions during EM
* @param estimatedEmissionCountsTransformer factory for generating builders that count symbol occurrences and compute distributions during EM
* @param startEndSymbol a unique start/end symbol used internally to mark the beginning and end of a sentence
* @param startEndTag a unique start/end tag used internally to mark the beginning and end of a sentence
* @param maxIterations maximum number of iterations to be run during EM
* @param minAvgLogProbChangeForEM stop iterating EM if change in average log probability is less than this threshold
*/
class UnsupervisedHmmTaggerTrainer[Sym, Tag](
initialUnsupervisedEmissionDist: Tag => Sym => LogNum,
override protected val estimatedTransitionCountsTransformer: CondCountsTransformer[Tag, Tag],
override protected val estimatedEmissionCountsTransformer: CondCountsTransformer[Tag, Sym],
override protected val startEndSymbol: Sym,
override protected val startEndTag: Tag,
override protected val maxIterations: Int = 50,
override protected val minAvgLogProbChangeForEM: Double = 0.00001)
extends AbstractEmHmmTaggerTrainer[Sym, Tag]
with UnsupervisedTaggerTrainer[Sym, Tag] {
/**
* Train a Hidden Markov Model tagger only on unlabeled data using the
* Expectation-Maximization (EM) algorithm.
*
* @param tagDict a mapping from symbols to their possible tags
* @param rawTrainSequences unlabeled sequences to be used as unsupervised training data
* @return a trained tagger
*/
override def trainUnsupervised(tagDict: Map[Sym, Set[Tag]], rawTrainSequences: Iterable[IndexedSeq[Sym]]): Tagger[Sym, Tag] = {
LOG.info("Beginning unsupervised training")
LOG.info("Tag dict: %d symbols, %.3f avg tags/symbol".format(tagDict.size, tagDict.values.map(_.size).avg))
// Correct tag dictionary for start/final symbols
val tagDictWithEnds = tagDict + (startEndSymbol -> Set(startEndTag))
// Create the initial distributions
val allTags = tagDictWithEnds.values.flatten.toSet
val initialTransitions = CondFreqDist(DefaultedCondFreqCounts(allTags.mapTo(_ => allTags.mapTo(_ => 1.0).toMap).toMap))
val initialEmissions = initialUnsupervisedEmissionDist
val initialHmm = new HmmTagger(initialTransitions, initialEmissions, tagDictWithEnds, startEndSymbol, startEndTag)
hmmExaminationHook(initialHmm)
// Re-estimate probability distributions using EM
// Do not assume any known counts -- use only EM-estimated counts
trainWithEm(rawTrainSequences, initialHmm)
}
}
/**
* Factory for training a Hidden Markov Model tagger from a combination of
* labeled data and unlabeled data using the Expectation-Maximization (EM)
* algorithm.
*
* @tparam Sym visible symbols in the sequences
* @tparam Tag tags applied to symbols
*
* @param initialTransitionCountsTransformer factory for generating builders that count tag occurrences and compute distributions for input to EM
* @param initialEmissionCountsTransformer factory for generating builders that count symbol occurrences and compute distributions for input to EM
* @param startEndSymbol a unique start/end symbol used internally to mark the beginning and end of a sentence
* @param startEndTag a unique start/end tag used internally to mark the beginning and end of a sentence
* @param maxIterations maximum number of iterations to be run during EM
* @param minAvgLogProbChangeForEM stop iterating EM if change in average log probability is less than this threshold
*/
class SemisupervisedHmmTaggerTrainer[Sym, Tag](
initialTransitionCountsTransformer: CondCountsTransformer[Tag, Tag],
initialEmissionCountsTransformer: CondCountsTransformer[Tag, Sym],
override protected val estimatedTransitionCountsTransformer: CondCountsTransformer[Tag, Tag],
override protected val estimatedEmissionCountsTransformer: CondCountsTransformer[Tag, Sym],
override protected val startEndSymbol: Sym,
override protected val startEndTag: Tag,
override protected val maxIterations: Int = 50,
override protected val minAvgLogProbChangeForEM: Double = 0.00001)
extends SupervisedHmmTaggerTrainer[Sym, Tag](initialTransitionCountsTransformer, initialEmissionCountsTransformer, startEndSymbol, startEndTag)
with AbstractEmHmmTaggerTrainer[Sym, Tag]
with SemisupervisedTaggerTrainer[Sym, Tag] {
/**
* Train a Hidden Markov Model tagger from a combination of labeled data and
* unlabeled data using the Expectation-Maximization (EM) algorithm. Use
* the provided tag dictionary instead of creating one from the labeled data.
*
* @param tagDict a mapping from symbols to their possible tags
* @param rawTrainSequences unlabeled sequences to be used as unsupervised training data
* @param taggedTrainSequences labeled sequences to be used as supervised training data
* @return a trained tagger
*/
override def trainSemisupervised(
tagDict: Map[Sym, Set[Tag]],
rawTrainSequences: Iterable[IndexedSeq[Sym]],
taggedTrainSequences: Iterable[IndexedSeq[(Sym, Tag)]]): Tagger[Sym, Tag] = {
// Correct tag dictionary for start/final symbols
val tagDictWithEnds = tagDict + (startEndSymbol -> Set(startEndTag))
// Get initial counts and probability distributions from the labeled data alone
val (initialTransitionCounts, initialEmissionCounts) = getCountsFromTagged(taggedTrainSequences)
// Create the initial HMM
val initialTransitions = CondFreqDist(initialTransitionCountsTransformer(initialTransitionCounts))
val initialEmissions = CondFreqDist(initialEmissionCountsTransformer(initialEmissionCounts))
val initialHmm = new HmmTagger(initialTransitions, initialEmissions, tagDictWithEnds, startEndSymbol, startEndTag)
hmmExaminationHook(initialHmm)
// Re-estimate probability distributions using EM
val hmm =
trainWithEm(
rawTrainSequences,
initialHmm,
CondFreqCounts(initialTransitionCounts).toDouble, CondFreqCounts(initialEmissionCounts).toDouble)
hmm
}
}
/**
* Factory for training a Hidden Markov Model tagger from a combination of
* labeled data and unlabeled data using the Expectation-Maximization (EM)
* algorithm.
*
* @tparam Sym visible symbols in the sequences
* @tparam Tag tags applied to symbols
*
* @param maxIterations maximum number of iterations to be run during EM
* @param minAvgLogProbChangeForEM stop iterating EM if change in average log probability is less than this threshold
*/
trait AbstractEmHmmTaggerTrainer[Sym, Tag] {
protected val estimatedTransitionCountsTransformer: CondCountsTransformer[Tag, Tag]
protected val estimatedEmissionCountsTransformer: CondCountsTransformer[Tag, Sym]
protected val startEndSymbol: Sym
protected val startEndTag: Tag
protected val maxIterations: Int = 50
protected val minAvgLogProbChangeForEM: Double = 0.00001
protected val LOG = LogFactory.getLog(classOf[AbstractEmHmmTaggerTrainer[Sym, Tag]])
def trainWithEm(
rawTrainSequences: Iterable[IndexedSeq[Sym]],
initialHmm: HmmTagger[Sym, Tag]): HmmTagger[Sym, Tag] = {
trainWithEm(
rawTrainSequences,
initialHmm,
CondFreqCounts[Tag, Tag, Double](),
CondFreqCounts[Tag, Sym, Double]())
}
def trainWithEm(
rawTrainSequences: Iterable[IndexedSeq[Sym]],
initialHmm: HmmTagger[Sym, Tag],
initialTransitionCounts: CondFreqCounts[Tag, Tag, Double],
initialEmissionCounts: CondFreqCounts[Tag, Sym, Double]): HmmTagger[Sym, Tag] = {
if (LOG.isDebugEnabled) {
val unknownWord = (rawTrainSequences.flatten.toSet -- initialHmm.tagDict.keySet).headOption
LOG.debug(" initialEmissions")
for (w <- List(unknownWord, Some("company"), Some("the")).flatten.map(_.asInstanceOf[Sym])) {
val probs = initialHmm.tagDict.values.flatten.toSet.mapTo(initialHmm.emissions(_)(w).logValue)
for ((t, p) <- probs.toList.sortBy(-_._2))
LOG.debug(" p(%s|%s) = %.2f".format(if (w == unknownWord) "unk" else w, t, p))
}
}
reestimateLogNumDistributions(
rawTrainSequences: Iterable[IndexedSeq[Sym]],
initialHmm: HmmTagger[Sym, Tag],
1, Double.NegativeInfinity,
initialTransitionCounts: CondFreqCounts[Tag, Tag, Double],
initialEmissionCounts: CondFreqCounts[Tag, Sym, Double])
}
/**
* Re-estimate probability distributions using EM. Estimate counts for
* each sequence in rawTrainSequences using the forward/backward procedure.
* Calculate probability distributions from these counts. Repeat until
* convergence.
*/
@tailrec
final protected def reestimateLogNumDistributions(
rawTrainSequences: Iterable[IndexedSeq[Sym]],
initialHmm: HmmTagger[Sym, Tag],
iteration: Int,
prevAvgLogProb: Double,
initialTransitionCounts: CondFreqCounts[Tag, Tag, Double],
initialEmissionCounts: CondFreqCounts[Tag, Sym, Double]): HmmTagger[Sym, Tag] = {
// E Step: Use the forward/backward procedure to determine the
// probability of various possible state sequences for
// generating the training data
val (expectedTransitionCounts, expectedEmmissionCounts, avgLogProb) =
estimateCounts(rawTrainSequences, initialHmm, initialTransitionCounts, initialEmissionCounts)
// M Step: Use these probability estimates to re-estimate the
// probability distributions
val transitions = CondFreqDist(estimatedTransitionCountsTransformer(expectedTransitionCounts.toMap))
val emissions = CondFreqDist(new StartEndFixingEmissionCountsTransformer(startEndSymbol, startEndTag, estimatedEmissionCountsTransformer)(expectedEmmissionCounts.toMap))
val hmm = HmmTagger(transitions, emissions, initialHmm.tagDict, initialHmm.startEndSymbol, initialHmm.startEndTag)
LOG.info("\\t" + iteration + ": " + avgLogProb)
hmmExaminationHook(hmm)
// Check each ending condition
if (iteration >= maxIterations) {
LOG.info("DONE: Max number of iterations reached")
hmm
}
else if ((avgLogProb - prevAvgLogProb).abs < minAvgLogProbChangeForEM) { //check if converged
LOG.info("DONE: Change in average log probability is less than " + minAvgLogProbChangeForEM)
hmm
}
else if (avgLogProb < prevAvgLogProb) {
throw new RuntimeException("DIVERGED: log probability decreased on iteration %d".format(iteration))
}
else if (avgLogProb == Double.NegativeInfinity) {
throw new RuntimeException("averageLogProb == -Infinity on iteration %d".format(iteration))
}
else {
// No ending condition met, re-estimate
reestimateLogNumDistributions(
rawTrainSequences: Iterable[IndexedSeq[Sym]],
hmm: HmmTagger[Sym, Tag],
iteration + 1, avgLogProb,
initialTransitionCounts: CondFreqCounts[Tag, Tag, Double],
initialEmissionCounts: CondFreqCounts[Tag, Sym, Double])
}
}
/**
* Estimate transition and emission counts for each sequence in
* rawTrainSequences using the forward/backward procedure.
*
* TODO: This method should be rewritten as a MapReduce job.
*/
protected def estimateCounts(
rawTrainSequences: Iterable[IndexedSeq[Sym]],
hmm: HmmTagger[Sym, Tag],
initialTransitionCounts: CondFreqCounts[Tag, Tag, Double],
initialEmissionCounts: CondFreqCounts[Tag, Sym, Double]) = {
val (expectedTransitionCounts, expectedEmissionCounts, totalSeqProb, numSequences) =
rawTrainSequences.par
.map {
sequence =>
val (estTrCounts, estEmCounts, seqProb) = estimateCountsForSequence(sequence, hmm)
(estTrCounts, estEmCounts, seqProb.logValue, 1) // number of sentences == 1
}
.fold((CondFreqCounts[Tag, Tag, Double](), CondFreqCounts[Tag, Sym, Double](), 0., 0)) {
case ((aTC, aEC, aP, aN), (bTC, bEC, bP, bN)) =>
(aTC ++ bTC, aEC ++ bEC, aP + bP, aN + bN) // sum up all the components
}
(expectedTransitionCounts ++ initialTransitionCounts, expectedEmissionCounts ++ initialEmissionCounts, totalSeqProb / numSequences)
}
/**
* Estimate transition and emission counts for the given sequence using
* the forward/backward procedure.
*/
protected def estimateCountsForSequence(
sequence: IndexedSeq[Sym],
hmm: HmmTagger[Sym, Tag]) = {
val (forwards, forwardProb) = forwardProbabilities(sequence, hmm)
val (backwrds, backwrdProb) = backwrdProbabilities(sequence, hmm)
assert(forwardProb approx backwrdProb, "forward=%s, backward=%s".format(forwardProb.logValue, backwrdProb.logValue))
val seqProb = forwardProb // P(sequence | transition,emissions)
// Get expected transition counts based on forward-backward probabilities
// Let expectedTransitionCounts(t)(i)(j) be the probability of being in
// state i at time t and state j at time t+1
val expectedTransitionCounts = estimateTransitionCounts(sequence, hmm, forwards, backwrds, seqProb)
// Get expected emission counts based on forward-backward probabilities
// Let expectedEmissionCounts(t)(i)(j) be the probability of being in
// state i at time t given the observations and the model
val expectedEmissionCounts = estimateEmissionCounts(sequence, hmm, forwards, backwrds, seqProb)
(expectedTransitionCounts, expectedEmissionCounts, seqProb)
}
/**
* Calculate forward probabilities for the sequence based on the existing
* transition and emission probabilities.
*
* Let forward(t)(j) be the probability of being in state j after seeing the
* first t observations (by summing over all initial paths leading to j).
*
* forward(t)(j) = P(o1,o2,...,ot, q_t=j | lambda)
*/
protected def forwardProbabilities(
sequence: IndexedSeq[Sym],
hmm: HmmTagger[Sym, Tag]): (IndexedSeq[Tag => LogNum], LogNum) = {
// Initialization
// forward(1)(j) = a(start)(j) * b(j)(o1) j in [1,N]
// Recursion
// forward(t)(j) = (1 to N).sum(i => forward(t-1)(i) * a(i)(j)) * bj(ot) j in [1,N], t in [1,T]
// Termination
// P(O | lambda) = forward(final)(sf) = (1 to N).sum(i => forward(T)(i) * aif)
val startEndTag = hmm.startEndTag
val startForward = Map(startEndTag -> LogNum.one)
val (lastForward @ Pattern.Map(`startEndTag` -> forwardProb), forwards) =
(sequence :+ hmm.startEndSymbol).foldLeft((startForward, List[Map[Tag, LogNum]]())) {
case ((prevForward, otherForwards), tok) =>
val currForward =
hmm.tagDict(tok).mapTo { currTag => // each legal tag for the current token
val tProb =
prevForward.sumMap {
case (prevTag, prevFwdScore) => prevFwdScore * hmm.transitions(prevTag)(currTag)
}
val eProb = hmm.emissions(currTag)(tok)
tProb * eProb
}.toMap
(currForward, prevForward :: otherForwards)
}
((lastForward :: forwards).reverse.toIndexedSeq, forwardProb)
}
/**
* Calculate backward probabilities for the sequence based on the existing
* transition and emission probabilities.
*
* Let backwrd(t)(j) be the probability of observing the final set of observations
* from time t+1 to T given that one is in state i at time t
*
* backwrd(j) = P(o1,o2,...,ot, q_t=j | lambda)
*/
protected def backwrdProbabilities(
sequence: IndexedSeq[Sym],
hmm: HmmTagger[Sym, Tag]): (IndexedSeq[Tag => LogNum], LogNum) = {
// Initialization
// backwrd(T)(i) = a(i)(F) i in [1,N]
// Recursion
// backwrd(t)(i) = (1 to N).sum(j => a(i)(j) * b(j)(o_(t+1)) * backwrd(t+1)(j)) i in [1,N], t in [1,T]
// Termination
// P(O | lambda) = backwrd(1)(s0) = (1 to N).sum(i => a(0)(j) * b(j)(o1) * backwrd(1)(j))
val startEndTag = hmm.startEndTag
val finalBackwrd = Map(startEndTag -> LogNum.one)
val (firstBackwrd @ Pattern.Map(`startEndTag` -> backwrdProb), backwrds, lastTok) =
(hmm.startEndSymbol +: sequence).foldRight((finalBackwrd, List[Map[Tag, LogNum]](), hmm.startEndSymbol)) {
case (tok, (nextBackwrd, otherBackwrds, nextTok)) =>
val currBackwrd =
hmm.tagDict(tok).mapTo { currTag =>
nextBackwrd.sumMap {
case (nextTag, nextBkwdScore) =>
hmm.transitions(currTag)(nextTag) * hmm.emissions(nextTag)(nextTok) * nextBkwdScore
}
}.toMap
(currBackwrd, nextBackwrd :: otherBackwrds, tok)
}
((firstBackwrd :: backwrds).toIndexedSeq, backwrdProb)
}
/**
* Estimate transition counts for the sequence based on forward and
* backward probabilities.
*
* estTrans(i,j) = sum_t(fwd(t)(i) * a(i)(j) * b(o(t+1)) * bkw(t+1)(j) / seqProb)
*/
protected def estimateTransitionCounts(
sequence: IndexedSeq[Sym],
hmm: HmmTagger[Sym, Tag],
forwards: IndexedSeq[Tag => LogNum],
backwrds: IndexedSeq[Tag => LogNum],
seqProb: LogNum) = {
val validTagsByToken = sequence.map(hmm.tagDict)
val nextTokens = sequence :+ hmm.startEndSymbol
val currTagSets = Set(hmm.startEndTag) +: validTagsByToken
val nextTagSets = validTagsByToken :+ Set(hmm.startEndTag)
val currForwards = forwards.dropRight(1)
val nextBackwrds = backwrds.drop(1)
val expectedTransitionCounts =
(nextTokens zipEqual currTagSets zipEqual nextTagSets zipEqual currForwards zipEqual nextBackwrds).map {
case ((((nextTok, currTags), nextTags), currForward), nextBackwrd) =>
currTags.mapTo { currTag =>
nextTags.mapTo { nextTag =>
(currForward(currTag) * hmm.transitions(currTag)(nextTag) * hmm.emissions(nextTag)(nextTok) * nextBackwrd(nextTag) / seqProb).toDouble
}.toMap
}.toMap
}
expectedTransitionCounts.map(CondFreqCounts(_)).reduce(_ ++ _)
}
/**
* Estimate emission counts for the sequence based on forward and
* backward probabilities.
*
* estEmiss(t)(j) = fwd(t)(j) * bkw(t)(j) / seqProb
*/
protected def estimateEmissionCounts(
sequence: IndexedSeq[Sym],
hmm: HmmTagger[Sym, Tag],
forwards: IndexedSeq[Tag => LogNum],
backwrds: IndexedSeq[Tag => LogNum],
seqProb: LogNum) = {
// TODO: Probably not necessary to count start/end states since it's
// always the case that P(endSym|endTag)=1
val fullSeq = hmm.startEndSymbol +: sequence :+ hmm.startEndSymbol
val expectedEmissionCounts =
(fullSeq zipEqual forwards zipEqual backwrds).map {
case ((tok, forward), backwrd) =>
hmm.tagDict(tok).mapTo(tag =>
Map(tok -> (forward(tag) * backwrd(tag) / seqProb).toDouble)).toMap
}
expectedEmissionCounts.map(CondFreqCounts(_)).reduce(_ ++ _)
}
protected def hmmExaminationHook(hmm: HmmTagger[Sym, Tag]) {
}
}
| dhgarrette/type-supervised-tagging-2012emnlp | src/main/scala/opennlp/scalabha/tag/hmm/UnsupervisedHmmTaggerTrainer.scala | Scala | apache-2.0 | 19,871 |
package com.ubirch.auth.model
/**
* author: cvandrei
* since: 2017-04-25
*/
case class UserUpdate(displayName: String)
| ubirch/ubirch-auth-service | model/src/main/scala/com/ubirch/auth/model/UserUpdate.scala | Scala | apache-2.0 | 126 |
/**
* Copyright (c) 2007-2011 Eric Torreborre <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of
* the Software. Neither the name of specs nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written permission.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package org.specs.mock
import org.specs._
import org.specs.literate._
import org.specs.execute._
class mockitoSpecification extends HtmlSpecificationWithJUnit("Mockito Specification") with MockitoSpecification {
<wiki>
Mockito is a Java library for mocking.
The following samples are taken from the main documentation which can be found "here":http://mockito.googlecode.com/svn/branches/1.7/javadoc/org/mockito/Mockito.html
h3. Let's verify some behaviour
First of all, we need to import some classes and traits for our examples: {"""
import org.specs.Specification
import org.specs.mock.Mockito
import org.mockito.Mock
import java.util.List
import java.util.LinkedList""" prelude it }
A mock is created with the @mock@ method: {"""
object s extends Specification with Mockito {
// mock creation
val m = mock[List[String]]
// using mock object
m.add("one")
m.clear()""" snip it }
// <ex>It is possible to check that some methods have been called on the mock with the @called@ matcher</ex>:
{""" // verification
there was one(m).add("one")
there was one(m).clear()
} """ add it }{ executeIsNot("error") }
h4. Failures
If one method has not been called on a mock, <ex>the expression @there was one(m).method@ must throw a @FailureException@</ex>: {"""
object s2 extends Specification with Mockito {
val m = mock[List[String]]
there was one(m).clear()
}
s2.failures
""" snip it }
{ >("Wanted but not invoked") }
h4. Argument matchers
{ linkTo(argumentMatchers) } allow flexible verification or stubbing.
h3. How about some stubbing?
<ex>You can mock concrete classes, not only interfaces</ex> {"""
object s3 extends Specification with Mockito {
val m = mock[LinkedList[String]]
// stubbing
m.get(0) returns "first"
m.clear() throws new RuntimeException
}
""" prelude it }{ executeIsNot("error") }
<ex>Calling a stubbed method with @returns@ returns the expected value</ex>. For example, the following prints "first":
{ "s3.m.get(0)" snip it }
{ >("first") }
<ex>Calling a stubbed method with @throws@ throws the expected exception</ex>. For example, the following throws a RuntimeException:
{ "s3.m.clear()" snip it }
{ >("RuntimeException") }
<ex>Calling a non-stubbed method should return a default value</ex>. For example, the following returns @null@ because @get(999)@ was not stubbed:
{ "s3.m.get(999)" snip it }
{ >("null") }
h3. Verifying the number of invocations
The number of invocations (atLeast, atMost) can also be checked: { linkTo(numberOfInvocations) }
h3. Verifying that invocations occur in order
When calls have to happen in a given order of invocations, this can be also checked { linkTo(inOrder) }
h3. Callbacks
In some rare case, you want the stubbed return values to be a function of the input method parameters: { linkTo(callbacks) }
h3. Annotations
<ex>It is possible to use annotations to declare mocks</ex> {"""
object s5 extends Specification with Mockito {
// do we gain anything using Scala, compared to val mockedList = mock[List[String]]?
@Mock val m: List[String] = null
"this needs to be inside an example because otherwise a NPE is thrown" in {
m.clear()
there was one(m).clear()
}
}
""" snip it }
{ "s5.isOk" add it }
{ >("true") }
{ "s5.issues" add_> }
h3. Stubbing consecutive calls (iterator-style stubbing)
Sometimes we need to stub with different return value/exception for the same method call. Typical use case could be mocking iterators. Original version of Mockito did not have this feature to promote simple mocking. For example, instead of iterators one could use Iterable or simply collections. Those offer natural ways of stubbing (e.g. using real collections).
In rare scenarios stubbing consecutive calls could be useful, though: {"""
object s6 extends Specification with Mockito {
val m = mock[List[String]]
m.get(0) returns "hello" thenReturns "world"
} """ snip it }
<ex>The first call returns the first value</ex>:
{ "s6.m.get(0)" add it }
{ >("hello") }
<ex>The second call returns the second value</ex>:
{ "s6.m.get(0)" add it }
{ >("world") }
When several values need to be stubbed this version of returns would also work: {"""
object s7 extends Specification with Mockito {
val m = mock[List[String]]
m.get(0) returns ("hello", "world")
}
""" snip it }
<ex>The first value is "hello"</ex>:
{ "s7.m.get(0)" add it }
{ >("hello") }
<ex>The second value is "world"</ex>:
{ "s7.m.get(0)" add it }
{ >("world") }
h3. Spies
You can create { linkTo(spies) } of real objects. When you use a spy then the real methods are called (unless a method was stubbed).
h3. Return values
Speficic { linkTo(returnValues) } can be returned on unstubbed methods.
</wiki> isSus
include(argumentMatchers)
include(callbacks)
include(inOrder)
include(numberOfInvocations)
include(spies)
include(returnValues)
}
trait MockitoSpecification extends Mockito with Expectations with LiterateSnippets with Wiki { this: Specification =>
}
| Muki-SkyWalker/specs | src/test/scala/org/specs/mock/mockitoSpecification.scala | Scala | mit | 6,583 |
//-----------------------------------------------------------------------
// FILE : Processor.scala
// SUBJECT : Abstract superclass representing tree processors.
// AUTHOR : (C) Copyright 2011 by Peter C. Chapin <[email protected]>
//
//-----------------------------------------------------------------------
package edu.uvm.sprocket
abstract class Processor(root: ASTNode) {
def process() = root
}
| pchapin/sprocket | src/edu/uvm/sprocket/Processor.scala | Scala | bsd-3-clause | 409 |
package com.github.sedovalx.cassandra.service.generation.builders
import javax.lang.model.element.Modifier
import com.datastax.driver.core.{Statement, ResultSetFuture, ResultSet}
import com.datastax.driver.mapping.{Result, MappingManager}
import com.github.sedovalx.cassandra.services.base.AbstractAccessorJava8Adapter
import com.google.common.util.concurrent.ListenableFuture
import com.squareup.javapoet._
import scala.collection.JavaConverters._
/**
* Author alsedov on 12.01.2016
*/
class AccessorAdapterSpecBuilder(entityType: ClassName, accessorPackageName: String, accessorSpec: TypeSpec, types: BuildTypes) {
def buildSpec(): TypeSpec = {
val accessorClassName = ClassName.get(accessorPackageName, accessorSpec.name)
TypeSpec.classBuilder(accessorSpec.name + "Adapter")
.addModifiers(Modifier.PUBLIC)
.superclass(ParameterizedTypeName.get(
ClassName.get(classOf[AbstractAccessorJava8Adapter[_]]),
entityType
))
.addField(accessorClassName, "accessor", Modifier.PRIVATE)
.addMethod(buildConstructor(accessorClassName))
.addMethods(buildMethodSpecs().asJava)
.build()
}
private def buildMethodSpecs(): Seq[MethodSpec] = {
accessorSpec.methodSpecs.asScala.map(it => buildAdapterMethodSpec(it))
}
private def buildConstructor(accessorClassName: ClassName): MethodSpec = {
MethodSpec.constructorBuilder()
.addModifiers(Modifier.PUBLIC)
.addParameter(classOf[MappingManager], "mappingManager")
.addStatement("this.accessor = mappingManager.createAccessor($T.class)", accessorClassName)
.build()
}
private case class ReturnTypeAndStatement(returnType: Option[TypeName], statement: String)
private def getReturnTypeAndStatement(typeName: ClassName, isDeleteMethod: Boolean, sourceMethodCall: String): ReturnTypeAndStatement = {
val defaultResult = ReturnTypeAndStatement(Some(typeName), s"return $sourceMethodCall")
if (typeName.equals(ClassName.get(classOf[ResultSetFuture]))) {
ReturnTypeAndStatement(
Some(types.voidFuture),
s"return toVoidFutureResultSet($sourceMethodCall)"
)
} else if (isDeleteMethod) {
ReturnTypeAndStatement(
None,
sourceMethodCall
)
} else if (!(typeName.equals(ClassName.get(classOf[ResultSet])) || typeName.equals(ClassName.get(classOf[Statement])))) {
// tries to pick only domain types
ReturnTypeAndStatement(
Some(types.optionalEntity),
s"return toOptional($sourceMethodCall)"
)
} else defaultResult
}
private def getReturnTypeAndStatement(typeName: ParameterizedTypeName, isDeleteMethod: Boolean, sourceMethodCall: String): ReturnTypeAndStatement = {
val defaultResult = ReturnTypeAndStatement(Some(typeName), s"return $sourceMethodCall")
typeName.rawType match {
case rt if isGenericResult(rt) && isDeleteMethod =>
// Result[T] -> void
ReturnTypeAndStatement(
None,
sourceMethodCall
)
case rt if isGenericResult(rt) => defaultResult
case fx if isGenericFuture(fx) =>
// a return type of DataStax accessor's method can't has more than one generic parameter
val typeParameter = typeName.typeArguments.asScala.head
typeParameter match {
case _: ClassName =>
ReturnTypeAndStatement(
Some(types.optionalEntityFuture),
s"return toCompletableFutureEntity($sourceMethodCall)"
)
case fxa: ParameterizedTypeName if isGenericResult(fxa.rawType) && isDeleteMethod =>
// ListenableFuture[Result[T]] -> CompletableFuture[Void]
ReturnTypeAndStatement(
Some(types.voidFuture),
s"return toVoidFuture($sourceMethodCall)"
)
case fxa: ParameterizedTypeName if isGenericResult(fxa.rawType) =>
// ListenableFuture[Result[T]] -> CompletableFuture[Iterable[T]]
ReturnTypeAndStatement(
Some(types.entityResultFuture),
s"return toCompletableFutureResult($sourceMethodCall)"
)
case _ => defaultResult
}
case _ => defaultResult
}
}
private def buildAdapterMethodSpec(source: MethodSpec): MethodSpec = {
val sourceMethodCall = s"this.accessor.${getMethodCallString(source)}"
val isDeleteMethod = source.name.startsWith("delete")
val ReturnTypeAndStatement(returnType, statement) = source.returnType match {
case cn: ClassName => getReturnTypeAndStatement(cn, isDeleteMethod, sourceMethodCall)
case ptn: ParameterizedTypeName => getReturnTypeAndStatement(ptn, isDeleteMethod, sourceMethodCall)
}
var targetSpec = MethodSpec.methodBuilder(source.name)
.addModifiers(Modifier.PUBLIC)
.addParameters(source.parameters)
.addStatement(statement)
if (returnType.isDefined){
targetSpec = targetSpec.returns(returnType.get)
}
targetSpec.build()
}
private def isGenericResult(tpe: ClassName): Boolean = ClassName.get(classOf[Result[_]]).equals(tpe)
private def isGenericFuture(tpe: ClassName): Boolean = ClassName.get(classOf[ListenableFuture[_]]).equals(tpe)
private def getMethodCallString(methodSpec: MethodSpec): String = {
val params = methodSpec.parameters.asScala.map(p => p.name).mkString(", ")
s"${methodSpec.name}($params)"
}
}
| sedovalx/cassandra-service-generator | cassandra-service-generator/src/main/scala/com/github/sedovalx/cassandra/service/generation/builders/AccessorAdapterSpecBuilder.scala | Scala | apache-2.0 | 6,055 |
package com.github.novamage.svalidator.binding.binders.typed
import com.github.novamage.svalidator.binding.binders.{JsonTypedBinder, TypedBinder}
import com.github.novamage.svalidator.binding.{BindingConfig, BindingFailure, BindingPass, BindingResult}
import io.circe.ACursor
/** Performs binding of a double field
*
* @param config The configuration to use for error messages
*/
class DoubleBinder(config: BindingConfig)
extends TypedBinder[Double] with JsonTypedBinder[Double] {
def bind(fieldName: String, valueMap: Map[String, Seq[String]], bindingMetadata: Map[String, Any]): BindingResult[Double] = {
try {
BindingPass(valueMap(fieldName).headOption.map(_.trim).filterNot(_.isEmpty).map(_.toDouble).get)
} catch {
case ex: NumberFormatException => BindingFailure(fieldName, config.languageConfig.invalidDoubleMessage(fieldName, valueMap.get(fieldName).flatMap(_.headOption).getOrElse("")), Some(ex))
case ex: NoSuchElementException => BindingFailure(fieldName, config.languageConfig.noValueProvidedMessage(fieldName), Some(ex))
}
}
override def bindJson(currentCursor: ACursor, fieldName: Option[String], bindingMetadata: Map[String, Any]): BindingResult[Double] = {
currentCursor.as[Option[Double]] match {
case Left(parsingFailure) =>
BindingFailure(fieldName, config.languageConfig.invalidDoubleMessage(fieldName.getOrElse(""), currentCursor.focus.map(_.toString()).getOrElse("")), Some(parsingFailure))
case Right(value) =>
try {
BindingPass(value.get)
} catch {
case ex: NoSuchElementException => BindingFailure(fieldName, config.languageConfig.noValueProvidedMessage(fieldName.getOrElse("")), Some(ex))
}
}
}
}
| NovaMage/SValidator | src/main/scala/com/github/novamage/svalidator/binding/binders/typed/DoubleBinder.scala | Scala | mit | 1,745 |
package ohnosequences.loquat.test
import ohnosequences.loquat._
import ohnosequences.awstools._, regions._, ec2._, autoscaling._, s3._
import ohnosequences.statika._
import test.dataProcessing._
import scala.concurrent._, duration._
case object config {
val defaultAMI = AmazonLinuxAMI(Ireland, HVM, InstanceStore)
case object testConfig extends AnyLoquatConfig { config =>
val loquatName = "experimental"
// TODO: create a role for testing loquat
val iamRoleName = "loquat.testing"
val logsS3Prefix = s3"loquat.testing" /
val metadata: AnyArtifactMetadata = ohnosequences.generated.metadata.loquat
val managerConfig = ManagerConfig(
defaultAMI,
m3.medium,
PurchaseModel.spot(0.1)
)
val workersConfig = WorkersConfig(
defaultAMI,
m3.medium,
PurchaseModel.spot(0.1),
AutoScalingGroupSize(0, 5, 20)
)
override val checkInputObjects = false
override val sqsInitialTimeout: FiniteDuration = 20.seconds
}
val N = 10
val dataMappings: List[DataMapping[processingBundle.type]] = (1 to N).toList.map{ _ => test.dataMappings.dataMapping }
case object testLoquat extends Loquat(testConfig, processingBundle)(dataMappings)
val testUser = LoquatUser(
email = "[email protected]",
localCredentials = new com.amazonaws.auth.profile.ProfileCredentialsProvider("default"),
keypairName = "aalekhin"
)
}
| ohnosequences/loquat | src/test/scala/ohnosequences/loquat/test/config.scala | Scala | agpl-3.0 | 1,424 |
package org.jetbrains.plugins.scala
package codeInspection.collections
import com.intellij.testFramework.EditorTestUtil
import org.jetbrains.plugins.scala.codeInspection.InspectionBundle
/**
* Nikolay.Tropin
* 5/30/13
*/
class MapGetOrElseTest extends OperationsOnCollectionInspectionTest {
import EditorTestUtil.{SELECTION_END_TAG => END, SELECTION_START_TAG => START}
override protected val classOfInspection: Class[_ <: OperationOnCollectionInspection] =
classOf[MapGetOrElseInspection]
override protected val hint: String =
InspectionBundle.message("map.getOrElse.hint")
def test_1() {
val selected = s"None.${START}map(x => 1).getOrElse(0)$END"
checkTextHasError(selected)
val text = "None.map(x => 1).getOrElse(0)"
val result = "None.fold(0)(x => 1)"
testQuickFix(text, result, hint)
}
def test_2() {
val selected = s"""class Test {
| Some(0) ${START}map (_ => true) getOrElse false$END
|}""".stripMargin
checkTextHasError(selected)
val text = """class Test {
| Some(0) map (_ => true) getOrElse false
|}""".stripMargin
val result = """class Test {
| Some(0).fold(false)(_ => true)
|}""".stripMargin
testQuickFix(text, result, hint)
}
def test_3() {
val selected = s"""val function: (Any) => Boolean = _ => true
|(None ${START}map function).getOrElse(false)$END""".stripMargin
checkTextHasError(selected)
val text = """val function: (Any) => Int = _ => 0
|(None map function).getOrElse(1)""".stripMargin
val result = """val function: (Any) => Int = _ => 0
|None.fold(1)(function)""".stripMargin
testQuickFix(text, result, hint)
}
def test_4() {
val text = "None.map(x => Seq(0)).getOrElse(List(0))"
checkTextHasNoErrors(text)
}
def test_5() {
val selected = s"None ${START}map {_ => 1} getOrElse {1}$END"
checkTextHasError(selected)
val text = "None map {_ => 1} getOrElse {1}"
val result = "None.fold(1) { _ => 1 }"
testQuickFix(text, result, hint)
}
def test_6() {
val selected = s"""Some(1) ${START}map (s => s + 1) getOrElse {
| val x = 1
| x
|}$END""".stripMargin
checkTextHasError(selected)
val text = """Some(1) map (s => s + 1) getOrElse {
| val x = 1
| x
|}""".stripMargin
val result = """Some(1).fold {
| val x = 1
| x
|}(s => s + 1)""".stripMargin
testQuickFix(text, result, hint)
}
def test_7(): Unit = {
val text = "None.map(x => 0).getOrElse(1.1)"
checkTextHasNoErrors(text)
}
def test_SCL7009() {
val text = "None.map(_ => Seq(1)).getOrElse(Seq.empty)"
checkTextHasNoErrors(text)
}
}
| ilinum/intellij-scala | test/org/jetbrains/plugins/scala/codeInspection/collections/MapGetOrElseTest.scala | Scala | apache-2.0 | 2,945 |
/**
* Copyright 2015, deepsense.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.deepsense.deeplang.doperations
import org.apache.spark.ml
import org.apache.spark.ml.evaluation
import org.apache.spark.ml.tuning.{CrossValidator, ParamGridBuilder}
import spray.json._
import io.deepsense.commons.utils.DoubleUtils
import io.deepsense.deeplang.doperables.dataframe.DataFrame
import io.deepsense.deeplang.doperables.report.Report
import io.deepsense.deeplang.doperables.spark.wrappers.estimators.LinearRegression
import io.deepsense.deeplang.doperables.spark.wrappers.evaluators.RegressionEvaluator
import io.deepsense.deeplang.doperations.exceptions.ColumnDoesNotExistException
import io.deepsense.deeplang.{DKnowledge, DeeplangIntegTestSupport}
import io.deepsense.sparkutils.Linalg
import io.deepsense.sparkutils.Linalg.Vectors
class GridSearchIntegSpec extends DeeplangIntegTestSupport with DefaultJsonProtocol {
private val regularizationParams = Array(0.01, 0.5, 5.0)
"GridSearch" should {
"find best params" in {
val gridSearch = new GridSearch()
val estimator = new LinearRegression()
val dataFrame = buildDataFrame()
val evaluator = new RegressionEvaluator()
gridSearch.setEstimatorParams(estimatorParams)
gridSearch.setNumberOfFolds(2)
val results = gridSearch.executeUntyped(Vector(estimator, dataFrame, evaluator))(executionContext)
val report = results.head.asInstanceOf[Report]
val tables = report.content.tables
val expectedMetrics: Array[Double] = pureSparkImplementation()
val expectedBestMetric = expectedMetrics.toList.min
val bestMetricsTable = tables.head
bestMetricsTable.values.size shouldBe 1
bestMetricsTable.values shouldBe
List(List(Some("10.0"), Some("5.0"), doubleToCell(expectedBestMetric)))
val expectedMetricsTable = List(
List(Some("10.0"), Some("5.0"), doubleToCell(expectedMetrics(2))),
List(Some("10.0"), Some("0.5"), doubleToCell(expectedMetrics(1))),
List(Some("10.0"), Some("0.01"), doubleToCell(expectedMetrics(0))))
val metricsTable = tables(1)
metricsTable.values.size shouldBe 3
metricsTable.values shouldBe expectedMetricsTable
}
"throw an exception in inference" when {
"estimator params are invalid" in {
val gridSearch = new GridSearch()
val estimator = new LinearRegression()
val dataFrame = buildDataFrame()
val evaluator = new RegressionEvaluator()
val params = JsObject(estimatorParams.fields.updated(
"features column", JsObject(
"type" -> JsString("column"),
"value" -> JsString("invalid")
)))
gridSearch.setEstimatorParams(params)
gridSearch.setNumberOfFolds(2)
a[ColumnDoesNotExistException] should be thrownBy {
gridSearch.inferKnowledgeUntyped(
Vector(DKnowledge(estimator), DKnowledge(dataFrame), DKnowledge(evaluator)))(
executionContext.inferContext)
}
}
"evaluator params are invalid" in {
val gridSearch = new GridSearch()
val estimator = new LinearRegression()
val dataFrame = buildDataFrame()
val evaluator = new RegressionEvaluator()
val params = JsObject(evaluator.paramValuesToJson.asJsObject.fields.updated(
"label column", JsObject(
"type" -> JsString("column"),
"value" -> JsString("invalid")
)))
gridSearch.setEvaluatorParams(params)
gridSearch.setNumberOfFolds(2)
a[ColumnDoesNotExistException] should be thrownBy {
gridSearch.inferKnowledgeUntyped(
Vector(DKnowledge(estimator), DKnowledge(dataFrame), DKnowledge(evaluator)))(
executionContext.inferContext)
}
}
}
}
private val estimatorParams = JsObject(
"regularization param" -> seqParam(Seq(0.01, 0.5, 5.0)),
"features column" -> JsObject(
"type" -> JsString("column"),
"value" -> JsString("features")
),
"max iterations" -> JsNumber(10.0)
)
private def seqParam(values: Seq[Double]): JsObject = {
JsObject(
"values" -> JsArray(
JsObject(
"type" -> JsString("seq"),
"value" -> JsObject(
"sequence" -> values.toJson)
)
)
)
}
private def buildDataFrame(): DataFrame = {
val districtFactors = Seq(0.6, 0.8, 1.0)
val priceForMeterSq = 7000
val apartments = Range(40, 300, 5).map { case flatSize =>
val districtFactor = districtFactors(flatSize % districtFactors.length)
Apartment(
Vectors.dense(flatSize, districtFactor),
(flatSize * districtFactor * priceForMeterSq).toLong)
}
DataFrame.fromSparkDataFrame(sparkSQLSession.createDataFrame(apartments))
}
private def pureSparkImplementation(): Array[Double] = {
val lr = new ml.regression.LinearRegression()
val paramGrid = new ParamGridBuilder()
.addGrid(lr.regParam, regularizationParams)
.build()
val cv = new CrossValidator()
.setEstimator(lr)
.setEvaluator(new evaluation.RegressionEvaluator())
.setEstimatorParamMaps(paramGrid)
.setNumFolds(2)
val cvModel = cv.fit(buildDataFrame().sparkDataFrame)
cvModel.avgMetrics
}
private def doubleToCell(d: Double) = Some(DoubleUtils.double2String(d))
}
private case class Apartment(features: Linalg.Vector, label: Double)
| deepsense-io/seahorse-workflow-executor | deeplang/src/it/scala/io/deepsense/deeplang/doperations/GridSearchIntegSpec.scala | Scala | apache-2.0 | 5,989 |
package com.clackjones.connectivitymap.spark
import com.clackjones.connectivitymap.UnitSpec
import scala.util.{Failure, Success, Try}
class SparkCmapHelperFunctionsSpec extends UnitSpec {
"filenameToRefsetName" should "return a Success with the ReferenceSet name generated from the filename" in {
val path = "/path/to/filename/"
val filename =
path + "BRD-A00546892__BIPERIDEN__trt_cp__10.0um__A375__6H__CPC004_A375_6H_X1_B3_DUO52HI53LO:J19.ref.tab.gz"
val result : Try[String] = SparkCmapHelperFunctions.filenameToRefsetName(filename)
result shouldBe a [Success[_]]
result.get shouldEqual "BRD-A00546892__A375"
}
it should "return a Failure object with the original filename string in it" in {
val path = "/path/to/filename/"
val filename =
path + "Invalid_filename"
val result : Try[String] = SparkCmapHelperFunctions.filenameToRefsetName(filename)
result shouldBe a [Failure[_]]
}
}
| hiraethus/scala-connectivity-map | src/test/scala/com/clackjones/connectivitymap/spark/SparkCmapHelperFunctionsSpec.scala | Scala | gpl-3.0 | 949 |
/**
* Copyright 2015 Mohiva Organisation (license at mohiva dot com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.mohiva.play.silhouette.test
import java.util.UUID
import com.mohiva.play.silhouette.api._
import com.mohiva.play.silhouette.api.crypto.{ Base64AuthenticatorEncoder, Signer }
import com.mohiva.play.silhouette.api.repositories.AuthenticatorRepository
import com.mohiva.play.silhouette.api.services.{ AuthenticatorService, IdentityService }
import com.mohiva.play.silhouette.api.util.Clock
import com.mohiva.play.silhouette.impl.authenticators._
import com.mohiva.play.silhouette.impl.util.{ DefaultFingerprintGenerator, SecureRandomIDGenerator }
import play.api.mvc.{ DefaultCookieHeaderEncoding, DefaultSessionCookieBaker, RequestHeader }
import scala.collection.mutable
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{ ExecutionContext, Future }
import scala.reflect.runtime.universe._
import scala.util.Success
/**
* A fake identity.
*
* @param loginInfo The linked login info for an identity.
*/
case class FakeIdentity(loginInfo: LoginInfo) extends Identity
/**
* A fake identity service implementation which can handle a predefined list of identities.
*
* @param identities A list of (login info -> identity) pairs this service is responsible for.
* @tparam I The type of the identity to handle.
*/
class FakeIdentityService[I <: Identity](identities: (LoginInfo, I)*) extends IdentityService[I] {
/**
* Retrieves an identity that matches the specified login info.
*
* @param loginInfo The login info to retrieve an identity.
* @return The retrieved identity or None if no identity could be retrieved for the given login info.
*/
def retrieve(loginInfo: LoginInfo): Future[Option[I]] = {
Future.successful(identities.find(_._1 == loginInfo).map(_._2))
}
}
/**
* A fake authenticator repository which persists authenticators in memory.
*
* @tparam T The type of the authenticator to handle.
*/
class FakeAuthenticatorRepository[T <: StorableAuthenticator] extends AuthenticatorRepository[T] {
/**
* The data store for the OAuth1 info.
*/
var data: mutable.HashMap[String, T] = mutable.HashMap()
/**
* Finds the authenticator for the given ID.
*
* @param id The authenticator ID.
* @return The found authenticator or None if no authenticator could be found for the given ID.
*/
def find(id: String): Future[Option[T]] = {
Future.successful(data.get(id))
}
/**
* Adds a new authenticator.
*
* @param authenticator The authenticator to add.
* @return The added authenticator.
*/
def add(authenticator: T): Future[T] = {
data += (authenticator.id -> authenticator)
Future.successful(authenticator)
}
/**
* Updates an already existing authenticator.
*
* @param authenticator The authenticator to update.
* @return The updated authenticator.
*/
def update(authenticator: T): Future[T] = {
data += (authenticator.id -> authenticator)
Future.successful(authenticator)
}
/**
* Removes the authenticator for the given ID.
*
* @param id The authenticator ID.
* @return An empty future.
*/
def remove(id: String): Future[Unit] = {
data -= id
Future.successful(())
}
}
/**
* A fake session authenticator service.
*/
case class FakeSessionAuthenticatorService() extends SessionAuthenticatorService(
SessionAuthenticatorSettings(),
new DefaultFingerprintGenerator(),
new Base64AuthenticatorEncoder,
new DefaultSessionCookieBaker(),
Clock())
/**
* A fake cookie authenticator service.
*/
case class FakeCookieAuthenticatorService() extends CookieAuthenticatorService(
CookieAuthenticatorSettings(),
None,
new Signer {
def sign(data: String) = data
def extract(message: String) = Success(message)
},
new DefaultCookieHeaderEncoding(),
new Base64AuthenticatorEncoder,
new DefaultFingerprintGenerator(),
new SecureRandomIDGenerator(),
Clock())
/**
* A fake bearer token authenticator service.
*/
case class FakeBearerTokenAuthenticatorService() extends BearerTokenAuthenticatorService(
BearerTokenAuthenticatorSettings(),
new FakeAuthenticatorRepository[BearerTokenAuthenticator],
new SecureRandomIDGenerator(),
Clock())
/**
* A fake JWT authenticator service.
*/
case class FakeJWTAuthenticatorService() extends JWTAuthenticatorService(
JWTAuthenticatorSettings(sharedSecret = UUID.randomUUID().toString),
None,
new Base64AuthenticatorEncoder,
new SecureRandomIDGenerator(),
Clock())
/**
* A fake Dummy authenticator service.
*/
case class FakeDummyAuthenticatorService() extends DummyAuthenticatorService
/**
* A fake authenticator service factory.
*/
object FakeAuthenticatorService {
/**
* Creates a new fake authenticator for the given authenticator type.
*
* @tparam T The type of the authenticator.
* @return A fully configured authenticator instance.
*/
def apply[T <: Authenticator: TypeTag](): AuthenticatorService[T] = {
(typeOf[T] match {
case t if t <:< typeOf[SessionAuthenticator] => FakeSessionAuthenticatorService()
case t if t <:< typeOf[CookieAuthenticator] => FakeCookieAuthenticatorService()
case t if t <:< typeOf[BearerTokenAuthenticator] => FakeBearerTokenAuthenticatorService()
case t if t <:< typeOf[JWTAuthenticator] => FakeJWTAuthenticatorService()
case t if t <:< typeOf[DummyAuthenticator] => FakeDummyAuthenticatorService()
}).asInstanceOf[AuthenticatorService[T]]
}
}
/**
* A fake authenticator.
*
* @param loginInfo The linked login info for an identity.
* @param id The ID of the authenticator.
* @param isValid True if the authenticator is valid, false otherwise.
*/
case class FakeAuthenticator(loginInfo: LoginInfo, id: String = UUID.randomUUID().toString, isValid: Boolean = true)
extends StorableAuthenticator
/**
* A fake authenticator factory.
*/
object FakeAuthenticator {
/**
* Creates a new fake authenticator for the given authenticator type.
*
* @param loginInfo The login info for which the authenticator should be created.
* @param env The Silhouette environment.
* @param requestHeader The request header.
* @tparam E The type of the environment,
* @return A authenticator instance.
*/
def apply[E <: Env](loginInfo: LoginInfo)(implicit env: Environment[E], requestHeader: RequestHeader): E#A = {
env.authenticatorService.create(loginInfo)
}
}
/**
* A fake environment implementation.
*
* @param identities A list of (login info -> identity) pairs to return inside a Silhouette action.
* @param requestProviders The list of request providers.
* @param eventBus The event bus implementation.
* @param executionContext The execution context to handle the asynchronous operations.
* @param tt The type tag of the authenticator type.
* @tparam E The type of the environment.
*/
case class FakeEnvironment[E <: Env](
identities: Seq[(LoginInfo, E#I)],
requestProviders: Seq[RequestProvider] = Seq(),
eventBus: EventBus = EventBus()
)(
implicit
val executionContext: ExecutionContext,
tt: TypeTag[E#A]
) extends Environment[E] {
/**
* The identity service implementation.
*/
val identityService: IdentityService[E#I] = new FakeIdentityService[E#I](identities: _*)
/**
* The authenticator service implementation.
*/
val authenticatorService: AuthenticatorService[E#A] = FakeAuthenticatorService[E#A]()
}
| akkie/play-silhouette | silhouette-testkit/app/com/mohiva/play/silhouette/test/Fakes.scala | Scala | apache-2.0 | 8,027 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import scala.collection.GenSeq
import scala.collection.GenMap
import scala.collection.GenSet
import scala.collection.GenIterable
import scala.collection.GenTraversable
import scala.collection.GenTraversableOnce
import org.scalactic.TripleEquals
import org.scalactic.TypeCheckedTripleEquals
import Matchers._
import exceptions.TestFailedException
class ShouldTripleEqualsSpec extends FunSpec with NonImplicitAssertions {
case class Super(size: Int)
class Sub(sz: Int) extends Super(sz)
val super1: Super = new Super(1)
val sub1: Sub = new Sub(1)
val super2: Super = new Super(2)
val sub2: Sub = new Sub(2)
val nullSuper: Super = null
// SKIP-SCALATESTJS,NATIVE-START
val javaList: java.util.List[Int] = new java.util.ArrayList
javaList.add(1)
javaList.add(2)
val javaArrayList: java.util.ArrayList[Int] = new java.util.ArrayList
javaArrayList.add(1)
javaArrayList.add(2)
val javaMap: java.util.Map[String, Int] = new java.util.HashMap
javaMap.put("one",1)
javaMap.put("two", 2)
val javaHashMap: java.util.HashMap[String, Int] = new java.util.HashMap
javaHashMap.put("one",1)
javaHashMap.put("two", 2)
val javaSet: java.util.Set[Int] = new java.util.HashSet
javaSet.add(1)
javaSet.add(2)
val javaHashSet: java.util.HashSet[Int] = new java.util.HashSet
javaHashSet.add(1)
javaHashSet.add(2)
// SKIP-SCALATESTJS,NATIVE-END
describe("the custom equality should === (operator") {
describe("with TripleEquals") {
it("should compare anything with anything") {
new TripleEquals {
1 should === (1)
intercept[TestFailedException] { 1 should !== (1) }
1 should === (1L)
intercept[TestFailedException] { 1 should !== (1L) }
1L should === (1)
intercept[TestFailedException] { 1L should !== (1) }
"1" should !== (1)
intercept[TestFailedException] { "1" should === (1) }
1 should !== ("1")
intercept[TestFailedException] { 1 should === ("1") }
super1 should !== (super2)
super1 should !== (sub2)
sub2 should !== (super1)
super1 should === (super1)
super1 should === (sub1)
sub1 should === (super1)
val caught1 = intercept[TestFailedException] { super1 should === (null) }
caught1.getMessage should be ("Super(1) did not equal null")
super1 should !== (null)
nullSuper should === (null)
val caught2 = intercept[TestFailedException] { nullSuper should !== (null) }
caught2.getMessage should be ("null equaled null")
val caught3 = intercept[TestFailedException] { nullSuper should === (super1) }
caught3.getMessage should be ("null did not equal Super(1)")
nullSuper should !== (super1)
Map("I" -> 1, "II" -> 2) should === (Map("I" -> 1, "II" -> 2))
Map("I" -> 1, "II" -> 2) should !== (Map("1" -> 1, "2" -> 2))
intercept[TestFailedException] { Map("I" -> 1, "II" -> 2) should === (7) }
Map("I" -> 1, "II" -> 2) should !== (7)
Set(1, 2, 3) should === (Set(1, 2, 3))
Set(1, 2, 3) should !== (Set(2, 3, 4))
intercept[TestFailedException] { Set(1, 2, 3) should === (7) }
Set(1, 2, 3) should !== (7)
List(1, 2, 3) should === (List(1, 2, 3))
List(1, 2, 3) should !== (List(2, 3, 4))
Array(1, 2, 3) should === (Array(1, 2, 3))
Array(1, 2, 3) should !== (Array(2, 3, 4))
Seq(1, 2, 3) should === (Array(1, 2, 3))
Seq(1, 2, 3) should !== (Array(2, 3, 4))
Array(1, 2, 3) should === (Seq(1, 2, 3))
Array(1, 2, 3) should !== (Seq(2, 3, 4))
// SKIP-SCALATESTJS,NATIVE-START
javaList should === (javaArrayList)
intercept[TestFailedException] { javaList should !== (javaArrayList) }
javaArrayList should === (javaList)
intercept[TestFailedException] { javaArrayList should !== (javaList) }
intercept[TestFailedException] { javaList should === (7) }
javaList should !== (7)
javaMap should === (javaHashMap)
intercept[TestFailedException] { javaMap should !== (javaHashMap) }
javaHashMap should === (javaMap)
intercept[TestFailedException] { javaHashMap should !== (javaMap) }
intercept[TestFailedException] { javaMap should === (7) }
javaMap should !== (7)
javaSet should === (javaHashSet)
intercept[TestFailedException] { javaSet should !== (javaHashSet) }
javaHashSet should === (javaSet)
intercept[TestFailedException] { javaHashSet should !== (javaSet) }
intercept[TestFailedException] { javaSet should === (7) }
javaSet should !== (7)
// SKIP-SCALATESTJS,NATIVE-END
() should === (())
() should !== (7)
}
}
it("should be overridable with TypeCheckedTripleEquals locally when TripleEquals imported") {
object O extends TripleEquals
new TypeCheckedTripleEquals {
class Fruit { override def equals(o: Any) = o.isInstanceOf[Fruit] }
trait Crunchy
class Apple extends Fruit with Crunchy
val fr: Fruit = new Apple
val cr: Crunchy = new Apple
val ap: Apple = new Apple
1 should === (1)
intercept[TestFailedException] { 1 should !== (1) }
ap should === (fr)
fr should === (ap)
ap should === (cr)
cr should === (ap)
super1 should !== (super2)
super1 should !== (sub2)
sub2 should !== (super1)
super1 should === (super1)
super1 should === (sub1)
sub1 should === (super1)
// The rest should not compile
// 1 should === (1L)
// 1L should === (1)
// 1 should !== (1L)
// 1L should !== (1)
// "1" should === (1)
// 1 should === ("1")
// "1" should !== (1)
// 1 should !== ("1")
// fr should === (cr)
// cr should === (fr)
}
}
it("should be overridable with TypeCheckedTripleEquals locally when TripleEquals mixed in") {
object O extends TripleEquals {
new TypeCheckedTripleEquals {
class Fruit { override def equals(o: Any) = o.isInstanceOf[Fruit] }
trait Crunchy
class Apple extends Fruit with Crunchy
val fr: Fruit = new Apple
val cr: Crunchy = new Apple
val ap: Apple = new Apple
1 should === (1)
intercept[TestFailedException] { 1 should !== (1) }
ap should === (fr)
fr should === (ap)
ap should === (cr)
cr should === (ap)
super1 should !== (super2)
super1 should !== (sub2)
sub2 should !== (super1)
super1 should === (super1)
super1 should === (sub1)
sub1 should === (super1)
// The rest should not compile
// 1 should === (1L)
// 1L should === (1)
// 1 should !== (1L)
// 1L should !== (1)
// "1" should === (1)
// 1 should === ("1")
// "1" should !== (1)
// 1 should !== ("1")
// fr should === (cr)
// cr should === (fr)
}
}
}
}
describe("with TypeCheckedTripleEquals") {
it("should compare supertypes with subtypes on either side") {
new TypeCheckedTripleEquals {
class Fruit { override def equals(o: Any) = o.isInstanceOf[Fruit] }
trait Crunchy
class Apple extends Fruit with Crunchy
val fr: Fruit = new Apple
val cr: Crunchy = new Apple
val ap: Apple = new Apple
1 should === (1)
intercept[TestFailedException] { 1 should !== (1) }
ap should === (fr)
fr should === (ap)
ap should === (cr)
cr should === (ap)
super1 should !== (super2)
super1 should !== (sub2)
sub2 should !== (super1)
super1 should === (super1)
super1 should === (sub1)
sub1 should === (super1)
val caught1 = intercept[TestFailedException] { super1 should === (null) }
caught1.getMessage should be ("Super(1) did not equal null")
super1 should !== (null)
nullSuper should === (null)
val caught2 = intercept[TestFailedException] { nullSuper should !== (null) }
caught2.getMessage should be ("null equaled null")
val caught3 = intercept[TestFailedException] { nullSuper should === (super1) }
caught3.getMessage should be ("null did not equal Super(1)")
nullSuper should !== (super1)
Map("I" -> 1, "II" -> 2) should === (Map("I" -> 1, "II" -> 2))
Map("I" -> 1, "II" -> 2) should !== (Map("1" -> 1, "2" -> 2))
Set(1, 2, 3) should === (Set(1, 2, 3))
Set(1, 2, 3) should !== (Set(2, 3, 4))
List(1, 2, 3) should === (List(1, 2, 3))
List(1, 2, 3) should !== (List(2, 3, 4))
Array(1, 2, 3) should === (Array(1, 2, 3))
Array(1, 2, 3) should !== (Array(2, 3, 4))
// SKIP-SCALATESTJS,NATIVE-START
javaList should === (javaArrayList)
intercept[TestFailedException] { javaList should !== (javaArrayList) }
javaArrayList should === (javaList)
intercept[TestFailedException] { javaArrayList should !== (javaList) }
javaMap should === (javaHashMap)
intercept[TestFailedException] { javaMap should !== (javaHashMap) }
javaHashMap should === (javaMap)
intercept[TestFailedException] { javaHashMap should !== (javaMap) }
javaSet should === (javaHashSet)
intercept[TestFailedException] { javaSet should !== (javaHashSet) }
javaHashSet should === (javaSet)
intercept[TestFailedException] { javaHashSet should !== (javaSet) }
// SKIP-SCALATESTJS,NATIVE-END
() should === (())
// The rest should not compile
// () should !== (7)
// 1 should === (1L)
// 1L should === (1)
// 1 should !== (1L)
// 1L should !== (1)
// "1" should === (1)
// 1 should === ("1")
// "1" should !== (1)
// 1 should !== ("1")
// fr should === (cr)
// cr should === (fr)
// Array(1, 2, 3) should === (Seq(1, 2, 3))
// Array(1, 2, 3) should !== (Seq(2, 3, 4))
// Seq(1, 2, 3) should === (Array(1, 2, 3))
// Seq(1, 2, 3) should !== (Array(2, 3, 4))
// intercept[TestFailedException] { Map("I" -> 1, "II" -> 2) should === (7) }
// Map("I" -> 1, "II" -> 2) should !== (7)
// intercept[TestFailedException] { Set(1, 2, 3) should === (7) }
// Set(1, 2, 3) should !== (7)
// intercept[TestFailedException] { javaList should === (7) }
// javaList should !== (7)
// intercept[TestFailedException] { javaMap should === (7) }
// javaMap should !== (7)
// intercept[TestFailedException] { javaSet should === (7) }
// javaSet should !== (7)
}
}
it("should be overridable with TripleEquals locally when TypeCheckedTripleEquals imported") {
object O extends TypeCheckedTripleEquals
new TripleEquals {
1 should === (1)
intercept[TestFailedException] { 1 should !== (1) }
1 should === (1L)
intercept[TestFailedException] { 1 should !== (1L) }
1L should === (1)
intercept[TestFailedException] { 1L should !== (1) }
"1" should !== (1)
intercept[TestFailedException] { "1" should === (1) }
1 should !== ("1")
intercept[TestFailedException] { 1 should === ("1") }
super1 should !== (super2)
super1 should !== (sub2)
// sub2 should !== (super1) // compiles on 2.10 but not 2.9
super1 should === (super1)
super1 should === (sub1)
// sub1 should === (super1) // compiles on 2.10 but not 2.9
}
}
it("should be overridable with TripleEquals locally when TypeCheckedTripleEquals mixed in") {
object O extends TypeCheckedTripleEquals {
new TripleEquals {
1 should === (1)
intercept[TestFailedException] { 1 should !== (1) }
1 should === (1L)
intercept[TestFailedException] { 1 should !== (1L) }
1L should === (1)
intercept[TestFailedException] { 1L should !== (1) }
"1" should !== (1)
intercept[TestFailedException] { "1" should === (1) }
1 should !== ("1")
intercept[TestFailedException] { 1 should === ("1") }
super1 should !== (super2)
super1 should !== (sub2)
// sub2 should !== (super1) // compiles on 2.10 but not 2.9
super1 should === (super1)
super1 should === (sub1)
// sub1 should === (super1) // compiles on 2.10 but not 2.9
}
}
}
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/ShouldTripleEqualsSpec.scala | Scala | apache-2.0 | 14,043 |
package io.sphere.util
import java.util.Locale
/** Extractor for Locales, e.g. for use in pattern-matching request paths. */
object LangTag {
final val UNDEFINED: String = "und"
class LocaleOpt(val locale: Locale) extends AnyVal {
// if toLanguageTag returns "und", it means the language tag is undefined
def isEmpty: Boolean = UNDEFINED == locale.toLanguageTag
def get: Locale = locale
}
def unapply(s: String): LocaleOpt = new LocaleOpt(Locale.forLanguageTag(s))
def invalidLangTagMessage(invalidLangTag: String) = s"Invalid language tag: '$invalidLangTag'"
}
| sphereio/sphere-scala-libs | util/src/main/scala/LangTag.scala | Scala | apache-2.0 | 590 |
package com.insweat.hssd.lib.tree
import com.insweat.hssd.lib.tree.structured.StructuredTree
import com.insweat.hssd.lib.tree.structured.StructuredTreeNode
import com.insweat.hssd.lib.essence.SchemaLike
import com.insweat.hssd.lib.essence.EntryData
import scala.collection.immutable.HashMap
import scala.util.control.Breaks
class EntryTree(val schema: SchemaLike) extends StructuredTree {
private var _nodesByID = HashMap[Long, EntryNode]()
def nodesByID = {
if(_nodesByID.isEmpty) {
foreach{ node =>
val ed = EntryData.of(node.asInstanceOf[EntryNode])
_nodesByID += ed.entryID -> node.asInstanceOf[EntryNode]
}
}
_nodesByID
}
def flushNodesByID() {
_nodesByID = HashMap()
}
def findByName(name: String): Option[EntryNode] =
find(_.name == name).map(_.asInstanceOf[EntryNode])
override def insert(
parent: Option[TreeNodeLike],
name: String,
leaf: Boolean): StructuredTreeNode = {
flushNodesByID()
super.insert(parent, name, leaf)
}
override def remove(node: TreeNodeLike): Boolean = {
flushNodesByID()
super.remove(node)
}
}
| insweat/hssd | com.insweat.hssd.lib/src/com/insweat/hssd/lib/tree/EntryTree.scala | Scala | lgpl-3.0 | 1,231 |
/*============================================================================
This Chisel source file is part of a pre-release version of the HardFloat IEEE
Floating-Point Arithmetic Package, by John R. Hauser (with some contributions
from Yunsup Lee and Andrew Waterman, mainly concerning testing).
Copyright 2017 SiFive, Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions, and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions, and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of SiFive nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY SIFIVE AND CONTRIBUTORS "AS IS", AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, ARE
DISCLAIMED. IN NO EVENT SHALL SIFIVE OR CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=============================================================================*/
package types
import Chisel._
import consts._
/*----------------------------------------------------------------------------
| Computes a division or square root for floating-point in recoded form.
| Multiple clock cycles are needed for each division or square-root operation,
| except possibly in special cases.
*----------------------------------------------------------------------------*/
class
DivSqrtRecFNToRaw_small(expWidth: Int, sigWidth: Int, options: Int)
extends Module
{
val io = new Bundle {
/*--------------------------------------------------------------------
*--------------------------------------------------------------------*/
val inReady = Bool(OUTPUT)
val inValid = Bool(INPUT)
val sqrtOp = Bool(INPUT)
val a = Bits(INPUT, expWidth + sigWidth + 1)
val b = Bits(INPUT, expWidth + sigWidth + 1)
val roundingMode = UInt(INPUT, 3)
/*--------------------------------------------------------------------
*--------------------------------------------------------------------*/
val rawOutValid_div = Bool(OUTPUT)
val rawOutValid_sqrt = Bool(OUTPUT)
val roundingModeOut = UInt(OUTPUT, 3)
val invalidExc = Bool(OUTPUT)
val infiniteExc = Bool(OUTPUT)
val rawOut = new RawFloat(expWidth, sigWidth + 2).asOutput
}
/*------------------------------------------------------------------------
*------------------------------------------------------------------------*/
val cycleNum = Reg(init = UInt(0, log2Up(sigWidth + 3)))
val sqrtOp_Z = Reg(Bool())
val majorExc_Z = Reg(Bool())
//*** REDUCE 3 BITS TO 2-BIT CODE:
val isNaN_Z = Reg(Bool())
val isInf_Z = Reg(Bool())
val isZero_Z = Reg(Bool())
val sign_Z = Reg(Bool())
val sExp_Z = Reg(SInt(width = expWidth + 2))
val fractB_Z = Reg(UInt(width = sigWidth - 1))
val roundingMode_Z = Reg(UInt(width = 3))
/*------------------------------------------------------------------------
| (The most-significant and least-significant bits of 'rem_Z' are needed
| only for square roots.)
*------------------------------------------------------------------------*/
val rem_Z = Reg(UInt(width = sigWidth + 2))
val notZeroRem_Z = Reg(Bool())
val sigX_Z = Reg(UInt(width = sigWidth + 2))
/*------------------------------------------------------------------------
*------------------------------------------------------------------------*/
val rawA_S = rawFloatFromRecFN(expWidth, sigWidth, io.a)
val rawB_S = rawFloatFromRecFN(expWidth, sigWidth, io.b)
//*** IMPROVE THESE:
val notSigNaNIn_invalidExc_S_div =
(rawA_S.isZero && rawB_S.isZero) || (rawA_S.isInf && rawB_S.isInf)
val notSigNaNIn_invalidExc_S_sqrt =
! rawA_S.isNaN && ! rawA_S.isZero && rawA_S.sign
val majorExc_S =
Mux(io.sqrtOp,
isSigNaNRawFloat(rawA_S) || notSigNaNIn_invalidExc_S_sqrt,
isSigNaNRawFloat(rawA_S) || isSigNaNRawFloat(rawB_S) ||
notSigNaNIn_invalidExc_S_div ||
(! rawA_S.isNaN && ! rawA_S.isInf && rawB_S.isZero)
)
val isNaN_S =
Mux(io.sqrtOp,
rawA_S.isNaN || notSigNaNIn_invalidExc_S_sqrt,
rawA_S.isNaN || rawB_S.isNaN || notSigNaNIn_invalidExc_S_div
)
val isInf_S = Mux(io.sqrtOp, rawA_S.isInf, rawA_S.isInf || rawB_S.isZero)
val isZero_S = Mux(io.sqrtOp, rawA_S.isZero, rawA_S.isZero || rawB_S.isInf)
val sign_S = rawA_S.sign ^ (! io.sqrtOp && rawB_S.sign)
val specialCaseA_S = rawA_S.isNaN || rawA_S.isInf || rawA_S.isZero
val specialCaseB_S = rawB_S.isNaN || rawB_S.isInf || rawB_S.isZero
val normalCase_S_div = ! specialCaseA_S && ! specialCaseB_S
val normalCase_S_sqrt = ! specialCaseA_S && ! rawA_S.sign
val normalCase_S = Mux(io.sqrtOp, normalCase_S_sqrt, normalCase_S_div)
val sExpQuot_S_div =
rawA_S.sExp +&
Cat(rawB_S.sExp(expWidth), ~rawB_S.sExp(expWidth - 1, 0)).asSInt
//*** IS THIS OPTIMAL?:
val sSatExpQuot_S_div =
Cat(Mux((SInt(BigInt(7)<<(expWidth - 2)) <= sExpQuot_S_div),
UInt(6),
sExpQuot_S_div(expWidth + 1, expWidth - 2)
),
sExpQuot_S_div(expWidth - 3, 0)
).asSInt
val evenSqrt_S = io.sqrtOp && ! rawA_S.sExp(0)
val oddSqrt_S = io.sqrtOp && rawA_S.sExp(0)
/*------------------------------------------------------------------------
*------------------------------------------------------------------------*/
val idle = (cycleNum === UInt(0))
val inReady = (cycleNum <= UInt(1))
val entering = inReady && io.inValid
val entering_normalCase = entering && normalCase_S
val skipCycle2 = (cycleNum === UInt(3)) && sigX_Z(sigWidth + 1)
when (! idle || io.inValid) {
cycleNum :=
Mux(entering & ! normalCase_S, UInt(1), UInt(0)) |
Mux(entering_normalCase,
Mux(io.sqrtOp,
Mux(rawA_S.sExp(0), UInt(sigWidth), UInt(sigWidth + 1)),
UInt(sigWidth + 2)
),
UInt(0)
) |
Mux(! idle && ! skipCycle2, cycleNum - UInt(1), UInt(0)) |
Mux(! idle && skipCycle2, UInt(1), UInt(0))
}
io.inReady := inReady
/*------------------------------------------------------------------------
*------------------------------------------------------------------------*/
when (entering) {
sqrtOp_Z := io.sqrtOp
majorExc_Z := majorExc_S
isNaN_Z := isNaN_S
isInf_Z := isInf_S
isZero_Z := isZero_S
sign_Z := sign_S
}
when (entering_normalCase) {
sExp_Z :=
Mux(io.sqrtOp,
(rawA_S.sExp>>1) +& SInt(BigInt(1)<<(expWidth - 1)),
sSatExpQuot_S_div
)
roundingMode_Z := io.roundingMode
}
when (entering_normalCase && ! io.sqrtOp) {
fractB_Z := rawB_S.sig(sigWidth - 2, 0)
}
/*------------------------------------------------------------------------
*------------------------------------------------------------------------*/
val rem =
Mux(inReady && ! oddSqrt_S, rawA_S.sig<<1, UInt(0)) |
Mux(inReady && oddSqrt_S,
Cat(rawA_S.sig(sigWidth - 1, sigWidth - 2) - UInt(1),
rawA_S.sig(sigWidth - 3, 0)<<3
),
UInt(0)
) |
Mux(! inReady, rem_Z<<1, UInt(0))
val bitMask = (UInt(1)<<cycleNum)>>2
val trialTerm =
Mux(inReady && ! io.sqrtOp, rawB_S.sig<<1, UInt(0)) |
Mux(inReady && evenSqrt_S, UInt(BigInt(1)<<sigWidth), UInt(0)) |
Mux(inReady && oddSqrt_S, UInt(BigInt(5)<<(sigWidth - 1)), UInt(0)) |
Mux(! inReady && ! sqrtOp_Z, Cat(UInt(1, 1), fractB_Z)<<1, UInt(0)) |
Mux(! inReady && sqrtOp_Z, sigX_Z<<1 | bitMask, UInt(0))
val trialRem = rem.zext - trialTerm.zext
val newBit = (SInt(0) <= trialRem)
when (entering_normalCase || (cycleNum > UInt(2))) {
rem_Z := Mux(newBit, trialRem.asUInt, rem)
}
when (entering_normalCase || (! inReady && newBit)) {
notZeroRem_Z := (trialRem =/= SInt(0))
sigX_Z :=
Mux(inReady && ! io.sqrtOp, newBit<<(sigWidth + 1), UInt(0)) |
Mux(inReady && io.sqrtOp, UInt(BigInt(1)<<sigWidth), UInt(0)) |
Mux(inReady && oddSqrt_S, newBit<<(sigWidth - 1), UInt(0)) |
Mux(! inReady, sigX_Z | bitMask, UInt(0))
}
/*------------------------------------------------------------------------
*------------------------------------------------------------------------*/
val rawOutValid = (cycleNum === UInt(1))
io.rawOutValid_div := rawOutValid && ! sqrtOp_Z
io.rawOutValid_sqrt := rawOutValid && sqrtOp_Z
io.roundingModeOut := roundingMode_Z
io.invalidExc := majorExc_Z && isNaN_Z
io.infiniteExc := majorExc_Z && ! isNaN_Z
io.rawOut.isNaN := isNaN_Z
io.rawOut.isInf := isInf_Z
io.rawOut.isZero := isZero_Z
io.rawOut.sign := sign_Z
io.rawOut.sExp := sExp_Z
io.rawOut.sig := sigX_Z<<1 | notZeroRem_Z
}
/*----------------------------------------------------------------------------
*----------------------------------------------------------------------------*/
class
DivSqrtRecFN_small(expWidth: Int, sigWidth: Int, options: Int)
extends Module
{
val io = new Bundle {
/*--------------------------------------------------------------------
*--------------------------------------------------------------------*/
val inReady = Bool(OUTPUT)
val inValid = Bool(INPUT)
val sqrtOp = Bool(INPUT)
val a = Bits(INPUT, expWidth + sigWidth + 1)
val b = Bits(INPUT, expWidth + sigWidth + 1)
val roundingMode = UInt(INPUT, 3)
val detectTininess = UInt(INPUT, 1)
/*--------------------------------------------------------------------
*--------------------------------------------------------------------*/
val outValid_div = Bool(OUTPUT)
val outValid_sqrt = Bool(OUTPUT)
val out = Bits(OUTPUT, expWidth + sigWidth + 1)
val exceptionFlags = Bits(OUTPUT, 5)
}
//------------------------------------------------------------------------
//------------------------------------------------------------------------
val divSqrtRecFNToRaw =
Module(new DivSqrtRecFNToRaw_small(expWidth, sigWidth, options))
io.inReady := divSqrtRecFNToRaw.io.inReady
divSqrtRecFNToRaw.io.inValid := io.inValid
divSqrtRecFNToRaw.io.sqrtOp := io.sqrtOp
divSqrtRecFNToRaw.io.a := io.a
divSqrtRecFNToRaw.io.b := io.b
divSqrtRecFNToRaw.io.roundingMode := io.roundingMode
//------------------------------------------------------------------------
//------------------------------------------------------------------------
io.outValid_div := divSqrtRecFNToRaw.io.rawOutValid_div
io.outValid_sqrt := divSqrtRecFNToRaw.io.rawOutValid_sqrt
val roundRawFNToRecFN =
Module(new RoundRawFNToRecFN(expWidth, sigWidth, 0))
roundRawFNToRecFN.io.invalidExc := divSqrtRecFNToRaw.io.invalidExc
roundRawFNToRecFN.io.infiniteExc := divSqrtRecFNToRaw.io.infiniteExc
roundRawFNToRecFN.io.in := divSqrtRecFNToRaw.io.rawOut
roundRawFNToRecFN.io.roundingMode := divSqrtRecFNToRaw.io.roundingModeOut
roundRawFNToRecFN.io.detectTininess := io.detectTininess
io.out := roundRawFNToRecFN.io.out
io.exceptionFlags := roundRawFNToRecFN.io.exceptionFlags
}
| stanford-ppl/spatial-lang | spatial/core/resources/chiselgen/template-level/templates/hardfloat/DivSqrtRecFN_small.scala | Scala | mit | 12,861 |
package com.github.j5ik2o.forseti.adaptor.handler.model
import com.github.j5ik2o.forseti.domain.exception.InvalidRequestException
import com.github.j5ik2o.forseti.domain.{GrantType, Message, Scope}
import scalaz.{Maybe, \\/}
trait AuthorizationGrant extends Message {
def grantType: InvalidRequestException \\/ GrantType.Value
def nonce: InvalidRequestException \\/ Maybe[String]
}
trait AuthorizationGrantWithScope extends AuthorizationGrant {
def scope: InvalidRequestException \\/ Scope
}
| j5ik2o/forseti | server/server-use-case-port/src/main/scala/com/github/j5ik2o/forseti/adaptor/handler/model/AuthorizationGrant.scala | Scala | mit | 503 |
package ninja.fangs.github
import org.eclipse.egit.github.core.client.GitHubClient
import org.eclipse.egit.github.core.service.RepositoryService
import scalafx.event.ActionEvent
import scalafx.geometry.{Insets, Pos}
import scalafx.scene.Scene
import scalafx.scene.control.{Button, PasswordField, TextField}
import scalafx.scene.layout.{Priority, HBox}
import scalafx.Includes._
class LoginScene extends Scene {
stylesheets = Seq("caspian.css")
root = new HBox {
val username = new TextField {
promptText = "email"
alignmentInParent = Pos.BASELINE_CENTER
}
val password = new PasswordField {
promptText = "password"
alignmentInParent = Pos.BASELINE_CENTER
}
val submit = new Button {
text = "Submit"
alignmentInParent = Pos.BASELINE_CENTER
defaultButton = true
onAction = (e: ActionEvent) => {
val client = new GitHubClient()
client.setCredentials(username.text.value, password.text.value)
val repos = new RepositoryService(client)
val repoScene = new HomeScene(client, repos)
IssuesConsole.stage.scene = repoScene
}
}
content = Seq(username, password, submit)
spacing = 10
padding = Insets(20)
vgrow = Priority.ALWAYS
hgrow = Priority.ALWAYS
}
}
| kfang/scalafx-github | src/main/scala/ninja/fangs/github/LoginScene.scala | Scala | gpl-3.0 | 1,295 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.cloud
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.hive.CarbonHiveIndexMetadataUtil
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll
class CacheRefreshTestCase extends QueryTest with BeforeAndAfterAll {
override protected def beforeAll(): Unit = {
sql("drop database if exists cachedb cascade")
sql("create database cachedb")
sql("use cachedb")
}
override protected def afterAll(): Unit = {
sql("use default")
sql("drop database if exists cachedb cascade")
}
test("test cache refresh") {
sql("create table tbl_cache1(col1 string, col2 int, col3 int) using carbondata")
sql("insert into tbl_cache1 select 'a', 123, 345")
CarbonHiveIndexMetadataUtil.invalidateAndDropTable(
"cachedb", "tbl_cache1", sqlContext.sparkSession)
// discard cached table info in cachedDataSourceTables
val tableIdentifier = TableIdentifier("tbl_cache1", Option("cachedb"))
sqlContext.sparkSession.sessionState.catalog.refreshTable(tableIdentifier)
sql("create table tbl_cache1(col1 string, col2 int, col3 int) using carbondata")
sql("delete from tbl_cache1")
sql("insert into tbl_cache1 select 'b', 123, 345")
checkAnswer(sql("select * from tbl_cache1"),
Seq(Row("b", 123, 345)))
}
}
| zzcclp/carbondata | integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/cloud/CacheRefreshTestCase.scala | Scala | apache-2.0 | 2,202 |
package org.jetbrains.plugins.scala.externalLibraries.kindProjector.inspections
import com.intellij.codeInspection.{LocalQuickFix, ProblemDescriptor, ProblemsHolder}
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import org.jetbrains.plugins.scala.codeInspection.{AbstractFixOnPsiElement, AbstractInspection, ScalaInspectionBundle}
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.externalLibraries.kindProjector.inspections.KindProjectorUseCorrectLambdaKeywordInspection._
import org.jetbrains.plugins.scala.lang.formatting.settings.ScalaCodeStyleSettings
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScParameterizedTypeElement, ScSimpleTypeElement}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createTypeElementFromText
import scala.annotation.nowarn
/**
* Author: Svyatoslav Ilinskiy
* Date: 6/25/15
*/
@nowarn("msg=" + AbstractInspection.DeprecationText)
class KindProjectorUseCorrectLambdaKeywordInspection extends AbstractInspection(inspectionName) {
override def actionFor(implicit holder: ProblemsHolder, isOnTheFly: Boolean): PartialFunction[PsiElement, Any] = {
case param: ScParameterizedTypeElement if param.kindProjectorPluginEnabled =>
val useGreekLambda = ScalaCodeStyleSettings.getInstance(param.getProject).REPLACE_LAMBDA_WITH_GREEK_LETTER
param.children.foreach {
case simple: ScSimpleTypeElement =>
simple.getText match {
case "Lambda" if useGreekLambda =>
val changeKeywordFix = new KindProjectorUseCorrectLambdaKeywordQuickFix(simple, "λ")
holder.registerProblem(simple, ScalaInspectionBundle.message("kind.projector.replace.lambda.with.lamda.char"), changeKeywordFix)
val changeSettingsFix = new ChangeLambdaCodeStyleSetting(!useGreekLambda)
holder.registerProblem(simple, codeStyleSettingUseWordLambda, changeSettingsFix)
case "λ" if !useGreekLambda =>
val changeKeywordFix = new KindProjectorUseCorrectLambdaKeywordQuickFix(simple, "Lambda")
holder.registerProblem(simple, ScalaInspectionBundle.message("kind.projector.replace.lambda.char.with.lambda"), changeKeywordFix)
val changeSettingsFix = new ChangeLambdaCodeStyleSetting(!useGreekLambda)
holder.registerProblem(simple, codeStyleSettingUseGreekLambda, changeSettingsFix)
case _ =>
}
case _ =>
}
}
}
class KindProjectorUseCorrectLambdaKeywordQuickFix(e: PsiElement, replacement: String) extends AbstractFixOnPsiElement(inspectionName, e) {
override protected def doApplyFix(elem: PsiElement)
(implicit project: Project): Unit = {
elem.replace(createTypeElementFromText(replacement))
}
}
class ChangeLambdaCodeStyleSetting(useGreekLambda: Boolean) extends LocalQuickFix {
override def getFamilyName: String = getName
override def getName: String =
if (useGreekLambda) codeStyleSettingUseGreekLambda
else codeStyleSettingUseWordLambda
override def applyFix(project: Project, d: ProblemDescriptor): Unit = {
ScalaCodeStyleSettings.getInstance(project).REPLACE_LAMBDA_WITH_GREEK_LETTER = useGreekLambda
}
}
object KindProjectorUseCorrectLambdaKeywordInspection {
val inspectionName: String = ScalaInspectionBundle.message("kind.projector.use.correct.lambda.keyword")
val inspectionId: String = "KindProjectorUseCorrectLambdaKeyword"
val codeStyleSettingUseGreekLambda: String = ScalaInspectionBundle.message("kind.projector.code.style.setting.use.lamda.char")
val codeStyleSettingUseWordLambda: String = ScalaInspectionBundle.message("kind.projector.code.style.setting.use.lamda.word")
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/externalLibraries/kindProjector/inspections/KindProjectorUseCorrectLambdaKeywordInspection.scala | Scala | apache-2.0 | 3,781 |
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.io.config
import java.io.File
import java.net.URL
import cats.effect.Sync
import cats.implicits._
import laika.config.Config.IncludeMap
import laika.config.{ConfigParser, ConfigResourceError}
import laika.io.runtime.Batch
import laika.parse.hocon.{IncludeAny, IncludeClassPath, IncludeFile, IncludeResource, IncludeUrl, ValidStringValue}
/** Internal utility that provides configuration files requested by include statements in other
* configuration instances.
*
* @author Jens Halm
*/
object IncludeHandler {
case class RequestedInclude(resource: IncludeResource, parent: Option[IncludeResource])
case class LoadedInclude(requestedResource: IncludeResource, resolvedResource: IncludeResource, result: Either[ConfigResourceError, String])
/** Loads the requested resources and maps them to the request instance for later lookup.
*
* If a resource is not present (e.g. file does not exist in the file system or HTTP call
* produced a 404) then the requested resource will not be present as a key in the result map.
*
* If a resource is present, but fails to load or parse correctly, the error will
* be mapped to the requested resource as a `Left`. Successfully loaded and parsed
* resources appear in the result map as a `Right`.
*/
def load[F[_]: Sync : Batch] (includes: Seq[RequestedInclude]): F[IncludeMap] =
if (includes.isEmpty) Sync[F].pure(Map.empty) else {
def prepareFile(include: IncludeFile, requested: IncludeResource, parent: Option[IncludeResource]): F[(IncludeResource, IncludeResource)] = Sync[F].pure {
if (new File(include.resourceId.value).isAbsolute) (include, requested)
else parent.flatMap {
case IncludeFile(id, _) => Option(new File(id.value).getParentFile)
case _ => None
} match {
case Some(parentFile) => (IncludeFile(ValidStringValue(new File(parentFile, include.resourceId.value).getPath), include.isRequired), requested)
case None => (include, requested)
}
}
def prepareClasspath(include: IncludeClassPath, requested: IncludeResource, parent: Option[IncludeResource]): F[(IncludeResource, IncludeResource)] = Sync[F].pure {
if (include.resourceId.value.startsWith("/")) (include.copy(resourceId = ValidStringValue(include.resourceId.value.drop(1))), include)
else parent match {
case Some(p: IncludeClassPath) if p.resourceId.value.contains("/") =>
val parentPath = p.resourceId.value.substring(0, p.resourceId.value.lastIndexOf("/"))
val childPath = s"$parentPath/${include.resourceId.value}"
(IncludeClassPath(ValidStringValue(childPath), include.isRequired), requested)
case _ => (include, requested)
}
}
def prepareUrl(include: IncludeUrl, requested: IncludeResource, parent: Option[IncludeResource]): F[(IncludeResource, IncludeResource)] = Sync[F].delay {
parent match {
case Some(p: IncludeUrl) =>
val parentUrl = new URL(p.resourceId.value)
val childUrl = new URL(parentUrl, include.resourceId.value)
(IncludeUrl(ValidStringValue(childUrl.toString), include.isRequired), requested)
case _ => (include, requested)
}
}
def prepareAny(include: IncludeAny, parent: Option[IncludeResource]): F[(IncludeResource, IncludeResource)] =
Sync[F].delay(new URL(include.resourceId.value))
.flatMap(_ => prepareUrl(IncludeUrl(include.resourceId, include.isRequired), include, parent))
.handleErrorWith { _ =>
parent match {
case Some(_: IncludeClassPath) => prepareClasspath(IncludeClassPath(include.resourceId, include.isRequired), include, parent)
case Some(_: IncludeUrl) => prepareUrl(IncludeUrl(include.resourceId, include.isRequired), include, parent)
case _ => prepareFile(IncludeFile(include.resourceId, include.isRequired), include, parent)
}
}
val preparedIncludes = includes.map {
case RequestedInclude(i: IncludeFile, parent) => prepareFile(i, i, parent)
case RequestedInclude(i: IncludeClassPath, parent) => prepareClasspath(i, i, parent)
case RequestedInclude(i: IncludeUrl, parent) => prepareUrl(i, i, parent)
case RequestedInclude(i: IncludeAny, parent) => prepareAny(i, parent)
}.toVector.sequence
def result(requestedResource: IncludeResource, resolvedResource: IncludeResource, result: F[Option[Either[ConfigResourceError, String]]]): F[Option[LoadedInclude]] =
result.map(_.map(res => LoadedInclude(requestedResource, resolvedResource, res)))
preparedIncludes.flatMap { includes =>
Batch[F].execute(
includes.map {
case (i@IncludeFile(resourceId, _), orig) => result(orig, i, ResourceLoader.loadFile(resourceId.value))
case (i@IncludeClassPath(resourceId, _), orig) => result(orig, i, ResourceLoader.loadClasspathResource(resourceId.value))
case (i@IncludeUrl(resourceId, _), orig) => Sync[F].delay(new URL(resourceId.value))
.flatMap(url => result(orig, i, ResourceLoader.loadUrl(url)))
case _ => Sync[F].pure(Option.empty[LoadedInclude])
}
)
}.flatMap { loadedResources =>
val configParsers = loadedResources.unite.map(loaded => (loaded.requestedResource, loaded.resolvedResource, loaded.result.map(ConfigParser.parse)))
val includeMap = configParsers.map {
case (requested, _, result) =>
(requested, result.flatMap(_.unresolved))
}.toMap
val recursiveIncludes = configParsers.flatMap {
case (_, resolved, Right(parser)) => parser.includes.filterNot(includeMap.contains).map(RequestedInclude(_, Some(resolved)))
case _ => Nil
}
load(recursiveIncludes).map(_ ++ includeMap)
}
}
}
| planet42/Laika | io/src/main/scala/laika/io/config/IncludeHandler.scala | Scala | apache-2.0 | 6,767 |
package org.crockeo.genericplatformer.assets
import com.badlogic.gdx.audio.Music
import com.badlogic.gdx.Gdx
object MusicManager extends AssetManager[Music] {
// The backend for loading an asset
protected def loadBackend(path: String): Music =
Gdx.audio.newMusic(Gdx.files.local(path))
// Loading all assets
override def init = {}
} | crockeo/generic-platformer | src/org/crockeo/genericplatformer/assets/MusicManager.scala | Scala | gpl-3.0 | 349 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.producer
import java.net.SocketTimeoutException
import java.util.Properties
import kafka.api.{ProducerRequest, ProducerResponseStatus}
import kafka.common.TopicAndPartition
import kafka.integration.KafkaServerTestHarness
import kafka.message._
import kafka.server.KafkaConfig
import kafka.utils._
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.common.utils.Time
import org.apache.kafka.common.record.{DefaultRecordBatch, DefaultRecord}
import org.junit.Test
import org.junit.Assert._
@deprecated("This test has been deprecated and it will be removed in a future release", "0.10.0.0")
class SyncProducerTest extends KafkaServerTestHarness {
private val messageBytes = new Array[Byte](2)
// turning off controlled shutdown since testProducerCanTimeout() explicitly shuts down request handler pool.
def generateConfigs = List(KafkaConfig.fromProps(TestUtils.createBrokerConfigs(1, zkConnect, false).head))
private def produceRequest(topic: String,
partition: Int,
message: ByteBufferMessageSet,
acks: Int,
timeout: Int = SyncProducerConfig.DefaultAckTimeoutMs,
correlationId: Int = 0,
clientId: String = SyncProducerConfig.DefaultClientId): ProducerRequest = {
TestUtils.produceRequest(topic, partition, message, acks, timeout, correlationId, clientId)
}
@Test
def testReachableServer() {
val server = servers.head
val props = TestUtils.getSyncProducerConfig(boundPort(server))
val producer = new SyncProducer(new SyncProducerConfig(props))
val firstStart = Time.SYSTEM.milliseconds
var response = producer.send(produceRequest("test", 0,
new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = new Message(messageBytes)), acks = 1))
assertNotNull(response)
assertTrue((Time.SYSTEM.milliseconds - firstStart) < 12000)
val secondStart = Time.SYSTEM.milliseconds
response = producer.send(produceRequest("test", 0,
new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = new Message(messageBytes)), acks = 1))
assertNotNull(response)
assertTrue((Time.SYSTEM.milliseconds - secondStart) < 12000)
response = producer.send(produceRequest("test", 0,
new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = new Message(messageBytes)), acks = 1))
assertNotNull(response)
}
@Test
def testEmptyProduceRequest() {
val server = servers.head
val props = TestUtils.getSyncProducerConfig(boundPort(server))
val correlationId = 0
val clientId = SyncProducerConfig.DefaultClientId
val ackTimeoutMs = SyncProducerConfig.DefaultAckTimeoutMs
val ack: Short = 1
val emptyRequest = new kafka.api.ProducerRequest(correlationId, clientId, ack, ackTimeoutMs, collection.mutable.Map[TopicAndPartition, ByteBufferMessageSet]())
val producer = new SyncProducer(new SyncProducerConfig(props))
val response = producer.send(emptyRequest)
assertTrue(response != null)
assertTrue(!response.hasError && response.status.isEmpty)
}
@Test
def testMessageSizeTooLarge() {
val server = servers.head
val props = TestUtils.getSyncProducerConfig(boundPort(server))
val producer = new SyncProducer(new SyncProducerConfig(props))
createTopic("test", numPartitions = 1, replicationFactor = 1)
val message1 = new Message(new Array[Byte](configs.head.messageMaxBytes + 1))
val messageSet1 = new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = message1)
val response1 = producer.send(produceRequest("test", 0, messageSet1, acks = 1))
assertEquals(1, response1.status.count(_._2.error != Errors.NONE))
assertEquals(Errors.MESSAGE_TOO_LARGE, response1.status(TopicAndPartition("test", 0)).error)
assertEquals(-1L, response1.status(TopicAndPartition("test", 0)).offset)
val safeSize = configs.head.messageMaxBytes - DefaultRecordBatch.RECORD_BATCH_OVERHEAD - DefaultRecord.MAX_RECORD_OVERHEAD
val message2 = new Message(new Array[Byte](safeSize))
val messageSet2 = new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = message2)
val response2 = producer.send(produceRequest("test", 0, messageSet2, acks = 1))
assertEquals(1, response1.status.count(_._2.error != Errors.NONE))
assertEquals(Errors.NONE, response2.status(TopicAndPartition("test", 0)).error)
assertEquals(0, response2.status(TopicAndPartition("test", 0)).offset)
}
@Test
def testMessageSizeTooLargeWithAckZero() {
val server = servers.head
val props = TestUtils.getSyncProducerConfig(boundPort(server))
props.put("request.required.acks", "0")
val producer = new SyncProducer(new SyncProducerConfig(props))
adminZkClient.createTopic("test", 1, 1)
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, "test", 0)
// This message will be dropped silently since message size too large.
producer.send(produceRequest("test", 0,
new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = new Message(new Array[Byte](configs.head.messageMaxBytes + 1))), acks = 0))
// Send another message whose size is large enough to exceed the buffer size so
// the socket buffer will be flushed immediately;
// this send should fail since the socket has been closed
try {
producer.send(produceRequest("test", 0,
new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = new Message(new Array[Byte](configs.head.messageMaxBytes + 1))), acks = 0))
} catch {
case _ : java.io.IOException => // success
}
}
@Test
def testProduceCorrectlyReceivesResponse() {
val server = servers.head
val props = TestUtils.getSyncProducerConfig(boundPort(server))
val producer = new SyncProducer(new SyncProducerConfig(props))
val messages = new ByteBufferMessageSet(NoCompressionCodec, new Message(messageBytes))
// #1 - test that we get an error when partition does not belong to broker in response
val request = TestUtils.produceRequestWithAcks(Array("topic1", "topic2", "topic3"), Array(0), messages, 1,
timeout = SyncProducerConfig.DefaultAckTimeoutMs, clientId = SyncProducerConfig.DefaultClientId)
val response = producer.send(request)
assertNotNull(response)
assertEquals(request.correlationId, response.correlationId)
assertEquals(3, response.status.size)
response.status.values.foreach {
case ProducerResponseStatus(error, nextOffset, timestamp) =>
assertEquals(Errors.UNKNOWN_TOPIC_OR_PARTITION, error)
assertEquals(-1L, nextOffset)
assertEquals(Message.NoTimestamp, timestamp)
}
// #2 - test that we get correct offsets when partition is owned by broker
adminZkClient.createTopic("topic1", 1, 1)
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, "topic1", 0)
adminZkClient.createTopic("topic3", 1, 1)
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, "topic3", 0)
val response2 = producer.send(request)
assertNotNull(response2)
assertEquals(request.correlationId, response2.correlationId)
assertEquals(3, response2.status.size)
// the first and last message should have been accepted by broker
assertEquals(Errors.NONE, response2.status(TopicAndPartition("topic1", 0)).error)
assertEquals(Errors.NONE, response2.status(TopicAndPartition("topic3", 0)).error)
assertEquals(0, response2.status(TopicAndPartition("topic1", 0)).offset)
assertEquals(0, response2.status(TopicAndPartition("topic3", 0)).offset)
// the middle message should have been rejected because broker doesn't lead partition
assertEquals(Errors.UNKNOWN_TOPIC_OR_PARTITION,
response2.status(TopicAndPartition("topic2", 0)).error)
assertEquals(-1, response2.status(TopicAndPartition("topic2", 0)).offset)
}
@Test
def testProducerCanTimeout() {
val timeoutMs = 500
val server = servers.head
val props = TestUtils.getSyncProducerConfig(boundPort(server))
val producer = new SyncProducer(new SyncProducerConfig(props))
val messages = new ByteBufferMessageSet(NoCompressionCodec, new Message(messageBytes))
val request = produceRequest("topic1", 0, messages, acks = 1)
// stop IO threads and request handling, but leave networking operational
// any requests should be accepted and queue up, but not handled
server.requestHandlerPool.shutdown()
val t1 = Time.SYSTEM.milliseconds
try {
producer.send(request)
fail("Should have received timeout exception since request handling is stopped.")
} catch {
case _: SocketTimeoutException => /* success */
}
val t2 = Time.SYSTEM.milliseconds
// make sure we don't wait fewer than timeoutMs for a response
assertTrue((t2-t1) >= timeoutMs)
}
@Test
def testProduceRequestWithNoResponse() {
val server = servers.head
val port = TestUtils.boundPort(server)
val props = TestUtils.getSyncProducerConfig(port)
val correlationId = 0
val clientId = SyncProducerConfig.DefaultClientId
val ackTimeoutMs = SyncProducerConfig.DefaultAckTimeoutMs
val ack: Short = 0
val emptyRequest = new kafka.api.ProducerRequest(correlationId, clientId, ack, ackTimeoutMs, collection.mutable.Map[TopicAndPartition, ByteBufferMessageSet]())
val producer = new SyncProducer(new SyncProducerConfig(props))
val response = producer.send(emptyRequest)
assertTrue(response == null)
}
@Test
def testNotEnoughReplicas() {
val topicName = "minisrtest"
val server = servers.head
val props = TestUtils.getSyncProducerConfig(boundPort(server))
props.put("request.required.acks", "-1")
val producer = new SyncProducer(new SyncProducerConfig(props))
val topicProps = new Properties()
topicProps.put("min.insync.replicas","2")
adminZkClient.createTopic(topicName, 1, 1,topicProps)
TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, topicName, 0)
val response = producer.send(produceRequest(topicName, 0,
new ByteBufferMessageSet(compressionCodec = NoCompressionCodec, messages = new Message(messageBytes)),-1))
assertEquals(Errors.NOT_ENOUGH_REPLICAS, response.status(TopicAndPartition(topicName, 0)).error)
}
}
| MyPureCloud/kafka | core/src/test/scala/unit/kafka/producer/SyncProducerTest.scala | Scala | apache-2.0 | 11,080 |
package org.scalaide.core.internal.lexical
import scalariform.lexer.ScalaLexer
import org.scalaide.ui.syntax.ScalaSyntaxClasses
import org.scalaide.ui.syntax.ScalariformToSyntaxClass
import scala.annotation.tailrec
import scalariform.ScalaVersion
import scalariform.lexer.{ Token => ScalariformToken }
import scalariform.lexer.Tokens._
import scalariform.lexer.{Token => ScalariformToken}
import org.scalaide.core.lexical.ScalaCodeTokenizer
class ScalaCodeTokenizerScalariformBased(val scalaVersion: ScalaVersion) extends ScalaCodeTokenizer {
import ScalaCodeTokenizer.Token
def tokenize(contents: String, offset: Int = 0): IndexedSeq[Token] = {
val token = ScalaLexer.createRawLexer(contents, forgiveErrors = true).toIndexedSeq.init
/**
* Heuristic to distinguish the macro keyword from uses as an identifier. To be 100% accurate requires a full parse,
* which would be too slow, but this is hopefully adequate.
*/
def isMacro(token: ScalariformToken, pos: Int): Boolean =
token.tokenType.isId && token.text == "macro" &&
findMeaningfulToken(pos + 1, shift = 1).exists(token => token.tokenType.isId) &&
findMeaningfulToken(pos - 1, shift = -1).exists(_.tokenType == EQUALS)
/**
* Scan forwards or backwards for nearest token that is neither whitespace nor comment
*/
@tailrec
def findMeaningfulToken(pos: Int, shift: Int): Option[ScalariformToken] =
if (pos <= 0 || pos >= token.length)
None
else {
val tok = token(pos)
tok.tokenType match {
case WS | LINE_COMMENT | MULTILINE_COMMENT =>
findMeaningfulToken(pos + shift, shift)
case _ =>
Some(tok)
}
}
/* Denotes the class of a token. */
def tokenClass(token: ScalariformToken, pos: Int) =
if (isMacro(token, pos)) ScalaSyntaxClasses.KEYWORD
else ScalariformToSyntaxClass(token)
token.zipWithIndex map {
case (tok, i) =>
Token(tok.offset + offset, tok.length, tokenClass(tok, i))
}
}
} | scala-ide/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/core/internal/lexical/ScalaCodeTokenizerScalariformBased.scala | Scala | bsd-3-clause | 2,054 |
package zeroformatter.benchmark
import cats.Eval
import zeroformatter._
@ZeroFormattable
case class Bar(
@Index(0) s: Eval[String],
@Index(1) d: Eval[Double],
@Index(2) i: Eval[Int],
@Index(3) l: Eval[Long],
@Index(4) bs: Eval[Vector[Boolean]]
)
object Bar extends ZeroFormatterBarInstances {
}
| pocketberserker/scala-zero-formatter | benchmark/src/main/scala/zeroformatter/benchmark/Bar.scala | Scala | mit | 308 |
package controllers
import scalaz._
import Scalaz._
import scalaz.Validation._
import net.liftweb.json._
import io.megam.auth.funnel._
import io.megam.auth.funnel.FunnelErrors._
import play.api.mvc._
import controllers.stack.Results
object EventsVm extends Controller with controllers.stack.APIAuthElement {
def post = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result,Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new Error("Invalid header."))
val email = freq.maybeEmail.getOrElse(throw new Error("Email not found (or) invalid."))
val clientAPIBody = freq.clientAPIBody.getOrElse(throw new Error("Body not found (or) invalid."))
models.events.EventsVm.create(email, clientAPIBody) match {
case Success(succ) =>
Status(CREATED)(
FunnelResponse(CREATED, "EventsVm record created successfully.", "Megam::EventsVm").toJson(true))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
def show(limit: String) = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result, Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new Error("Invalid header."))
val email = freq.maybeEmail.getOrElse(throw new Error("Email not found (or) invalid."))
val clientAPIBody = freq.clientAPIBody.getOrElse(throw new Error("Body not found (or) invalid."))
models.events.EventsVm.findById(email, clientAPIBody, limit) match {
case Success(succ) => Ok(Results.resultset(models.Constants.EVENTSVMCOLLECTIONCLAZ, compactRender(Extraction.decompose(succ))))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
def list(limit: String) = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result, Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new Error("Invalid header."))
val email = freq.maybeEmail.getOrElse(throw new Error("Email not found (or) invalid."))
models.events.EventsVm.findByEmail(email, limit) match {
case Success(succ) => Ok(Results.resultset(models.Constants.EVENTSVMCOLLECTIONCLAZ, compactRender(Extraction.decompose(succ))))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
def index = StackAction(parse.tolerantText) { implicit request =>
(Validation.fromTryCatchThrowable[Result, Throwable] {
reqFunneled match {
case Success(succ) => {
val freq = succ.getOrElse(throw new Error("Invalid header."))
val email = freq.maybeEmail.getOrElse(throw new Error("Email not found (or) invalid."))
models.events.EventsVm.IndexEmail(email) match {
case Success(succ) => Ok(Results.resultset(models.Constants.EVENTSVMCOLLECTIONCLAZ, compactRender(Extraction.decompose(succ))))
case Failure(err) =>
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
case Failure(err) => {
val rn: FunnelResponse = new HttpReturningError(err)
Status(rn.code)(rn.toJson(true))
}
}
}).fold(succ = { a: Result => a }, fail = { t: Throwable => Status(BAD_REQUEST)(t.getMessage) })
}
}
| indykish/vertice_gateway | app/controllers/events/EventsVm.scala | Scala | mit | 4,525 |
// Unicode newline in a single-line comment?
// Compiler will expect code on the line.
// Here the code is valid.
/* \n object foo */
// \n object foo
/* \12 object foo */
// \12 object foo
/* \u000a object foo */
// object foo \u000a
// \u000a object foo
/* \n foo */
// \n foo
/* \12 foo */
// \12 foo
/* \u000a foo */
// foo \u000a
// \u000a foo
| lrytz/scala | test/files/pos/u000a.scala | Scala | apache-2.0 | 350 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnetexamples.neuralstyle.end2end
import org.apache.mxnet.{Context, NDArrayCollector, Shape}
import org.kohsuke.args4j.{CmdLineParser, Option}
import org.slf4j.LoggerFactory
import scala.collection.JavaConverters._
object BoostInference {
private val logger = LoggerFactory.getLogger(classOf[BoostInference])
def runInference(modelPath: String, outputPath: String, guassianRadius : Int,
inputImage : String, ctx : Context): Unit = {
NDArrayCollector.auto().withScope {
val dShape = Shape(1, 3, 480, 640)
val clipNorm = 1.0f * dShape.product
// generator
val gens = Array(
GenV4.getModule("g0", dShape, ctx, isTrain = false),
GenV3.getModule("g1", dShape, ctx, isTrain = false),
GenV3.getModule("g2", dShape, ctx, isTrain = false),
GenV4.getModule("g3", dShape, ctx, isTrain = false)
)
gens.zipWithIndex.foreach { case (gen, i) =>
gen.loadParams(s"$modelPath/$i/v3_0002-0026000.params")
}
val contentNp =
DataProcessing.preprocessContentImage(s"$inputImage", dShape, ctx)
var data = Array(contentNp)
for (i <- 0 until gens.length) {
NDArrayCollector.auto().withScope {
gens(i).forward(data.takeRight(1))
val newImg = gens(i).getOutputs()(0)
data :+= newImg
DataProcessing.saveImage(newImg, s"$outputPath/out_$i.jpg", guassianRadius)
logger.info(s"Converted image: $outputPath/out_$i.jpg")
}
}
}
}
def main(args: Array[String]): Unit = {
val stce = new BoostInference
val parser: CmdLineParser = new CmdLineParser(stce)
try {
parser.parseArgument(args.toList.asJava)
assert(stce.modelPath != null
&& stce.inputImage != null
&& stce.outputPath != null)
val ctx = if (stce.gpu == -1) Context.cpu() else Context.gpu(stce.gpu)
runInference(stce.modelPath, stce.outputPath, stce.guassianRadius, stce.inputImage, ctx)
} catch {
case ex: Exception => {
logger.error(ex.getMessage, ex)
parser.printUsage(System.err)
sys.exit(1)
}
}
}
}
class BoostInference {
@Option(name = "--model-path", usage = "the saved model path")
private val modelPath: String = null
@Option(name = "--input-image", usage = "the style image")
private val inputImage: String = null
@Option(name = "--output-path", usage = "the output result path")
private val outputPath: String = null
@Option(name = "--gpu", usage = "which gpu card to use, default is -1, means using cpu")
private val gpu: Int = -1
@Option(name = "--guassian-radius", usage = "the gaussian blur filter radius")
private val guassianRadius: Int = 2
}
| dmlc/mxnet | scala-package/examples/src/main/scala/org/apache/mxnetexamples/neuralstyle/end2end/BoostInference.scala | Scala | apache-2.0 | 3,551 |
package net.liftmodules.ng
package test
import Angular.NgModel
package object model {
case class Test2Obj(str1:String, str2:String) extends NgModel
case class BroadcastObj(num:Int, char:String)
case class StringInt(str:String, num:Int)
}
| extirpate/lift-ng | test-project/src/main/scala/net/liftmodules/ng/test/model/package.scala | Scala | apache-2.0 | 246 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.analysis.EliminateSubQueries
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.sources._
import org.apache.spark.sql.{SaveMode, DataFrame, SQLContext}
import org.apache.spark.sql.catalyst.expressions.{Attribute, Row}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.RunnableCommand
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
/**
* Analyzes the given table in the current database to generate statistics, which will be
* used in query optimizations.
*
* Right now, it only supports Hive tables and it only updates the size of a Hive table
* in the Hive metastore.
*/
private[hive]
case class AnalyzeTable(tableName: String) extends RunnableCommand {
override def run(sqlContext: SQLContext): Seq[Row] = {
sqlContext.asInstanceOf[HiveContext].analyze(tableName)
Seq.empty[Row]
}
}
/**
* Drops a table from the metastore and removes it if it is cached.
*/
private[hive]
case class DropTable(
tableName: String,
ifExists: Boolean) extends RunnableCommand {
override def run(sqlContext: SQLContext): Seq[Row] = {
val hiveContext = sqlContext.asInstanceOf[HiveContext]
val ifExistsClause = if (ifExists) "IF EXISTS " else ""
try {
hiveContext.cacheManager.tryUncacheQuery(hiveContext.table(tableName))
} catch {
// This table's metadata is not in Hive metastore (e.g. the table does not exist).
case _: org.apache.hadoop.hive.ql.metadata.InvalidTableException =>
case _: org.apache.spark.sql.catalyst.analysis.NoSuchTableException =>
// Other Throwables can be caused by users providing wrong parameters in OPTIONS
// (e.g. invalid paths). We catch it and log a warning message.
// Users should be able to drop such kinds of tables regardless if there is an error.
case e: Throwable => log.warn(s"${e.getMessage}", e)
}
hiveContext.invalidateTable(tableName)
hiveContext.runSqlHive(s"DROP TABLE $ifExistsClause$tableName")
hiveContext.catalog.unregisterTable(Seq(tableName))
Seq.empty[Row]
}
}
private[hive]
case class AddJar(path: String) extends RunnableCommand {
override val output: Seq[Attribute] = {
val schema = StructType(
StructField("result", IntegerType, false) :: Nil)
schema.toAttributes
}
override def run(sqlContext: SQLContext): Seq[Row] = {
val hiveContext = sqlContext.asInstanceOf[HiveContext]
val currentClassLoader = Utils.getContextOrSparkClassLoader
// Add jar to current context
val jarURL = new java.io.File(path).toURL
val newClassLoader = new java.net.URLClassLoader(Array(jarURL), currentClassLoader)
Thread.currentThread.setContextClassLoader(newClassLoader)
// We need to explicitly set the class loader associated with the conf in executionHive's
// state because this class loader will be used as the context class loader of the current
// thread to execute any Hive command.
// We cannot use `org.apache.hadoop.hive.ql.metadata.Hive.get().getConf()` because Hive.get()
// returns the value of a thread local variable and its HiveConf may not be the HiveConf
// associated with `executionHive.state` (for example, HiveContext is created in one thread
// and then add jar is called from another thread).
hiveContext.executionHive.state.getConf.setClassLoader(newClassLoader)
// Add jar to isolated hive (metadataHive) class loader.
hiveContext.runSqlHive(s"ADD JAR $path")
// Add jar to executors
hiveContext.sparkContext.addJar(path)
Seq(Row(0))
}
}
private[hive]
case class AddFile(path: String) extends RunnableCommand {
override def run(sqlContext: SQLContext): Seq[Row] = {
val hiveContext = sqlContext.asInstanceOf[HiveContext]
hiveContext.runSqlHive(s"ADD FILE $path")
hiveContext.sparkContext.addFile(path)
Seq.empty[Row]
}
}
private[hive]
case class CreateMetastoreDataSource(
tableName: String,
userSpecifiedSchema: Option[StructType],
provider: String,
options: Map[String, String],
allowExisting: Boolean,
managedIfNoPath: Boolean) extends RunnableCommand {
override def run(sqlContext: SQLContext): Seq[Row] = {
val hiveContext = sqlContext.asInstanceOf[HiveContext]
if (hiveContext.catalog.tableExists(tableName :: Nil)) {
if (allowExisting) {
return Seq.empty[Row]
} else {
throw new AnalysisException(s"Table $tableName already exists.")
}
}
var isExternal = true
val optionsWithPath =
if (!options.contains("path") && managedIfNoPath) {
isExternal = false
options + ("path" -> hiveContext.catalog.hiveDefaultTableFilePath(tableName))
} else {
options
}
hiveContext.catalog.createDataSourceTable(
tableName,
userSpecifiedSchema,
Array.empty[String],
provider,
optionsWithPath,
isExternal)
Seq.empty[Row]
}
}
private[hive]
case class CreateMetastoreDataSourceAsSelect(
tableName: String,
provider: String,
partitionColumns: Array[String],
mode: SaveMode,
options: Map[String, String],
query: LogicalPlan) extends RunnableCommand {
override def run(sqlContext: SQLContext): Seq[Row] = {
val hiveContext = sqlContext.asInstanceOf[HiveContext]
var createMetastoreTable = false
var isExternal = true
val optionsWithPath =
if (!options.contains("path")) {
isExternal = false
options + ("path" -> hiveContext.catalog.hiveDefaultTableFilePath(tableName))
} else {
options
}
var existingSchema = None: Option[StructType]
if (sqlContext.catalog.tableExists(Seq(tableName))) {
// Check if we need to throw an exception or just return.
mode match {
case SaveMode.ErrorIfExists =>
throw new AnalysisException(s"Table $tableName already exists. " +
s"If you are using saveAsTable, you can set SaveMode to SaveMode.Append to " +
s"insert data into the table or set SaveMode to SaveMode.Overwrite to overwrite" +
s"the existing data. " +
s"Or, if you are using SQL CREATE TABLE, you need to drop $tableName first.")
case SaveMode.Ignore =>
// Since the table already exists and the save mode is Ignore, we will just return.
return Seq.empty[Row]
case SaveMode.Append =>
// Check if the specified data source match the data source of the existing table.
val resolved = ResolvedDataSource(
sqlContext, Some(query.schema.asNullable), partitionColumns, provider, optionsWithPath)
val createdRelation = LogicalRelation(resolved.relation)
EliminateSubQueries(sqlContext.table(tableName).logicalPlan) match {
case l @ LogicalRelation(_: InsertableRelation | _: HadoopFsRelation) =>
if (l.relation != createdRelation.relation) {
val errorDescription =
s"Cannot append to table $tableName because the resolved relation does not " +
s"match the existing relation of $tableName. " +
s"You can use insertInto($tableName, false) to append this DataFrame to the " +
s"table $tableName and using its data source and options."
val errorMessage =
s"""
|$errorDescription
|== Relations ==
|${sideBySide(
s"== Expected Relation ==" :: l.toString :: Nil,
s"== Actual Relation ==" :: createdRelation.toString :: Nil
).mkString("\\n")}
""".stripMargin
throw new AnalysisException(errorMessage)
}
existingSchema = Some(l.schema)
case o =>
throw new AnalysisException(s"Saving data in ${o.toString} is not supported.")
}
case SaveMode.Overwrite =>
hiveContext.sql(s"DROP TABLE IF EXISTS $tableName")
// Need to create the table again.
createMetastoreTable = true
}
} else {
// The table does not exist. We need to create it in metastore.
createMetastoreTable = true
}
val data = DataFrame(hiveContext, query)
val df = existingSchema match {
// If we are inserting into an existing table, just use the existing schema.
case Some(schema) => sqlContext.createDataFrame(data.queryExecution.toRdd, schema)
case None => data
}
// Create the relation based on the data of df.
val resolved =
ResolvedDataSource(sqlContext, provider, partitionColumns, mode, optionsWithPath, df)
if (createMetastoreTable) {
// We will use the schema of resolved.relation as the schema of the table (instead of
// the schema of df). It is important since the nullability may be changed by the relation
// provider (for example, see org.apache.spark.sql.parquet.DefaultSource).
hiveContext.catalog.createDataSourceTable(
tableName,
Some(resolved.relation.schema),
partitionColumns,
provider,
optionsWithPath,
isExternal)
}
// Refresh the cache of the table in the catalog.
hiveContext.refreshTable(tableName)
Seq.empty[Row]
}
}
| andrewor14/iolap | sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/commands.scala | Scala | apache-2.0 | 10,344 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.it
import com.typesafe.scalalogging.LazyLogging
import org.scalatest.{AsyncFunSpec, Inspectors, Matchers}
import play.api.libs.json._
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
import scala.util.{Failure, Success, Try}
class SearchTests extends AsyncFunSpec with Matchers with Inspectors with Helpers with fixture.NSHashesAndPrefixes with LazyLogging {
def orderingFor[A : Numeric](rootSegment: String)(field: String, ascending: Boolean)(implicit rds: Reads[A]): Ordering[JsValue] = new Ordering[JsValue] {
override def compare(x: JsValue, y: JsValue): Int = {
val num = implicitly[Numeric[A]]
val xf = Try((x \\ rootSegment \\ field).head.as[A])
val yf = Try((y \\ rootSegment \\ field).head.as[A])
(xf,yf) match {
case (Failure(_), Failure(_)) => 0
case (Failure(_), _) => -1
case (_, Failure(_)) => 1
case (Success(xv), Success(yv)) =>
if (ascending) num.compare(xv, yv)
else num.compare(yv, xv)
}
}
}
def orderingForField[A : Numeric](field: String, ascending: Boolean)(implicit rds: Reads[A]): Ordering[JsValue] = orderingFor("fields")(field,ascending)
val orderingForScore = orderingFor[Float]("extra")("score",ascending=false)
describe("Search API should") {
//Assertions
val ingestGeonames = Future.traverse(Seq(293846, 293918, 294640, 294904, 5052287, 6342919, 6468007)) { n =>
val geoN3 = scala.io.Source.fromURL(this.getClass.getResource(s"/geo/geonames_$n.n3")).mkString
Http.post(_in, geoN3, Some("text/n3;charset=UTF-8"), List("format" -> "n3"), tokenHeader)
}.map { results =>
forAll(results) { res =>
withClue(res) {
res.status should be(200)
jsonSuccessPruner(Json.parse(res.payload)) shouldEqual jsonSuccess
}
}
}
val boxedErrorOnRootWithoutQP = Http.get(cmw, List("op"->"search","recursive"->"","with-history"->"","format"->"json")).map { res =>
withClue(res) {
res.status should be(200)
}
}
val path = cmw / "sws.geonames.org"
val f0 = executeAfterCompletion(ingestGeonames) {
spinCheck(100.millis)(Http.get(
path,
List("op" -> "search", "recursive" -> "", "length" -> "14", "format" -> "json"))) { r =>
(Json.parse(r.payload) \\ "results" \\ "total": @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() == 14
}
}
}
val f1 = f0.flatMap(_ => {
spinCheck(100.millis,true)(Http.get(cmw / "meta" / "ns" / "2_fztg" / "alt", List("format"->"json"))){ r =>
((Json.parse(r.payload) \\ "fields" \\ "mang").head : @unchecked) match {
case JsDefined(JsString(char)) => char == "f"
}
}
})
val searchForExistence = executeAfterCompletion(f1){
spinCheck(100.millis,true)(Http.get(path, List("op" -> "search", "qp" -> "alt.wgs84_pos:", "format" -> "json", "debug-info" -> ""))) { r =>
val j = Json.parse(r.payload)
(j \\ "results" \\ "total" : @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() == 2
}
}
}
val postSearchForExistence = executeAfterCompletion(f1){
spinCheck(100.millis,true)(Http.post(path,
"qp=alt.wgs84_pos:&format=json&debug-info=",
Some("application/x-www-form-urlencoded"),
queryParams = List("op" -> "search"),
headers = tokenHeader)) { r =>
val j = Json.parse(r.payload) \\ "results"
(j \\ "total" : @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() == 2
}
}
}
val testSearchForExistence = searchForExistence.map { res =>
withClue(res) {
val jResults = Json.parse(res.payload) \\ "results"
(jResults \\ "total").as[Long] should be(2L)
}
}
val testPostSameAsGet = searchForExistence.zip(postSearchForExistence).map{
case (res1,res2) => withClue(res1,res2){
val j1 = Json.parse(res1.payload)
val j2 = Json.parse(res2.payload)
(j2 \\ "results" \\ "total").as[Long] should be(2L)
j1 shouldEqual j2
}
}
import cmwell.util.http.SimpleResponse.Implicits.UTF8StringHandler
def getTypesCache = Http.get[String](cmw / "_types-cache").map(_.payload)
val typesCache = executeAfterCompletion(f1)(getTypesCache)
val sortByLatitude = executeAfterCompletion(typesCache){
spinCheck(100.millis,true)(Http.get(
uri = path,
queryParams = List("op" -> "search","sort-by" -> "-lat.wgs84_pos","format" -> "json","with-data" -> "","pretty" -> "","debug-info" -> "")
)){ r =>
val j = Json.parse(r.payload) \\ "results"
(j \\ "total": @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() >= 7
}
}.map { res =>
withClue {
val sb = new StringBuilder
sb ++= res.toString()
sb ++= "\\ntypes cache before: "
sb ++= typesCache.value.toString
sb ++= "\\ntypes cache after: "
sb ++= Try(Await.result(getTypesCache,60.seconds)).getOrElse("getTypesCache failed!!!")
sb.result()
} {
val jInfotonsArr = (Json.parse(res.payload) \\ "results" \\ "infotons").get.asInstanceOf[JsArray].value
implicit val ord = orderingForField[Float]("lat.wgs84_pos", ascending = false)
jInfotonsArr shouldBe sorted
}
}
}
val sortByAltitude = executeAfterCompletion(f1){
spinCheck(100.millis,true)(Http.get(
uri = path,
queryParams = List("op" -> "search","sort-by" -> "-alt.wgs84_pos","format" -> "json","with-data" -> "","pretty" -> "","debug-info" -> "")
)){ r =>
val j = Json.parse(r.payload) \\ "results"
(j \\ "total": @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() >= 2
}
}.map { res =>
withClue(res){
val jInfotonsArr = (Json.parse(res.payload) \\ "results" \\ "infotons").get.asInstanceOf[JsArray].value
implicit val ord = orderingForField[Float]("alt.wgs84_pos", ascending = false)
jInfotonsArr.size should be(7)
jInfotonsArr.take(2) shouldBe sorted
forAll(jInfotonsArr.drop(2)) { jInfoton =>
(jInfoton \\ "fields" \\ "alt.wgs84_pos").toOption shouldBe empty
}
}
}
}
val sortByScoreFilterByIL = executeAfterCompletion(f1){
spinCheck(100.millis,true)(Http.get(
uri = path,
queryParams = List(
"op" -> "search",
"sort-by" -> "system.score",
"format" -> "json",
"qp" -> "*alternateName.geonames:israel,*countryCode.geonames:il",
"pretty" -> "",
"debug-info" -> "")
)){ r =>
val j = Json.parse(r.payload) \\ "results"
(j \\ "total": @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() == 4
}
}.map { res =>
withClue(res){
val jInfotonsArr = (Json.parse(res.payload) \\ "results" \\ "infotons").get.asInstanceOf[JsArray].value
implicit val ord = orderingForScore
jInfotonsArr.size should be(4)
jInfotonsArr.take(2) shouldBe sorted
}
}
}
val recursiveSearch = executeAfterCompletion(f1){
spinCheck(100.millis,true)(Http.get(
uri = path,
queryParams = List("op" -> "search","format" -> "json","pretty" -> "","debug-info" -> "","recursive" -> ""))){ r =>
(Json.parse(r.payload) \\ "results" \\ "total": @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() == 14
}
}.map { res =>
withClue(res) {
val total = (Json.parse(res.payload) \\ "results" \\ "total").as[Int]
total should be(14)
}
}
}
val lastModifiedSearch = executeAfterCompletion(ingestGeonames){
val currentTime = System.currentTimeMillis()
spinCheck(100.millis,true)(Http.get(
uri = path,
queryParams = List("op" -> "search","format" -> "json","pretty" -> "","debug-info" -> "",
"recursive"-> "", "qp" -> ("system.lastModified<<" + currentTime)))){ r =>
(Json.parse(r.payload) \\ "results" \\ "total": @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() == 14
}
}.map { res =>
withClue(res) {
val total = (Json.parse(res.payload) \\ "results" \\ "total").as[Int]
total should be(14)
}
}
}
///sws.geonames.org/?op=search&recursive&qp=type.rdf::http://xmlns.com/foaf/0.1/Document&gqp=<isDefinedBy.rdfs[countryCode.geonames::US]
val gqpFiltering = executeAfterCompletion(f1){
spinCheck(100.millis,true)(Http.get(
uri = path,
queryParams = List(
"op" -> "search",
"format" -> "json",
"pretty" -> "",
"debug-info" -> "",
"recursive" -> "",
"qp" -> "type.rdf::http://xmlns.com/foaf/0.1/Document",
"gqp" -> "<isDefinedBy.rdfs[countryCode.geonames::US]"))){ r =>
(Json.parse(r.payload) \\ "results" \\ "total": @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() == 7
}
}.map { res =>
withClue(res) {
val j = Json.parse(res.payload)
val total = (j \\ "results" \\ "total").as[Int]
total should be(7)
val length = (j \\ "results" \\ "length").as[Int]
length should be(2)
(j \\ "results" \\ "infotons").toOption.fold(fail("son.results.infotons was not found")) {
case JsArray(infotons) => infotons should have size(2)
case notArray => fail(s"json.results.infotons was not an array[$notArray]")
}
}
}
}
val ingestGOT = {
val gotN3 = scala.io.Source.fromURL(this.getClass.getResource("/got/data.n3")).mkString
Http.post(_in, gotN3, Some("text/n3;charset=UTF-8"), List("format" -> "n3"), tokenHeader).map { res =>
withClue(res) {
res.status should be(200)
jsonSuccessPruner(Json.parse(res.payload)) shouldEqual jsonSuccess
}
}
}
val awoiaf = cmw / "awoiaf.westeros.org"
val awoiafSearch = executeAfterCompletion(ingestGOT){
spinCheck(100.millis,true)(Http.get(
awoiaf,
List("op" -> "search", "format" -> "json", "length" -> "1")
)) { r =>
(Json.parse(r.payload) \\ "results" \\ "total": @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() == 11
}
}.map { r =>
withClue(r) {
r.status should be (200)
val j = Json.parse(r.payload)
val total = (j \\ "results" \\ "total").as[Int]
total should be(11)
}
}
}
val searchPersons = executeAfterCompletion(awoiafSearch) {
Http.get(awoiaf, List(
"op" -> "search",
"format" -> "json",
"qp" -> "type.rdf::http://xmlns.com/foaf/0.1/Person")).map { r =>
withClue(r) {
r.status should be(200)
val j = Json.parse(r.payload)
val total = (j \\ "results" \\ "total").as[Int]
total should be(4)
}
}
}
val gqpFilterByHomeType = executeAfterCompletion(awoiafSearch) {
Http.get(awoiaf, List(
"op" -> "search",
"format" -> "json",
"qp" -> "type.rdf::http://xmlns.com/foaf/0.1/Person",
"gqp" -> "<hasTenant.gotns>homeLocation.schema[homeType.gotns::Castle]")).map { r =>
withClue(r) {
r.status should be(200)
val j = Json.parse(r.payload) \\ "results"
val total = (j \\ "total").as[Int]
total should be(4)
val length = (j \\ "length").as[Int]
length should be(3)
}
}
}
val gqpFilterBySkippingGhostNed = executeAfterCompletion(awoiafSearch) {
Http.get(awoiaf, List(
"op" -> "search",
"format" -> "json",
"with-data" -> "",
"qp" -> "type.rdf::http://xmlns.com/foaf/0.1/Person",
"gqp" -> ">childOf.rel<childOf.rel")).map { r =>
withClue(r) {
r.status should be(200)
val j = Json.parse(r.payload) \\ "results"
val total = (j \\ "total").as[Int]
total should be(4)
val length = (j \\ "length").as[Int]
length should be(2)
}
}
}
val gqpFilterAllAlthoughSkippingGhostNed = executeAfterCompletion(awoiafSearch) {
Http.get(awoiaf, List(
"op" -> "search",
"format" -> "json",
"with-data" -> "",
"qp" -> "type.rdf::http://xmlns.com/foaf/0.1/Person",
"gqp" -> ">childOf.rel[type.rdf::http://xmlns.com/foaf/0.1/Person]<childOf.rel")).map { r =>
withClue(r) {
r.status should be(200)
val j = Json.parse(r.payload) \\ "results"
val total = (j \\ "total").as[Int]
total should be(4)
val length = (j \\ "length").as[Int]
length should be(0)
}
}
}
// TODO: empty filter can mean existence -> i.e: no ghost skips w/o explicit filter. not yet implemented though...
// val gqpFilterAllAlthoughSkippingGhostNedWithEmptyFilter = executeAfterCompletion(awoiafSearch) {
// Http.get(awoiaf, List(
// "op" -> "search",
// "format" -> "json",
// "with-data" -> "",
// "qp" -> "type.rdf::http://xmlns.com/foaf/0.1/Person",
// "gqp" -> ">childOf.rel[]<childOf.rel")).map { r =>
// withClue(r) {
// r.status should be(200)
// val j = Json.parse(r.payload) \\ "results"
// val total = (j \\ "total").as[Int]
// total should be(4)
// val length = (j \\ "length").as[Int]
// length should be(0)
// }
// }
// }
val ex2unitPF: PartialFunction[Throwable,Unit] = {
case _: Throwable => ()
}
val deleteAbouts = (for {
_ <- searchForExistence.recover(ex2unitPF)
_ <- postSearchForExistence.recover(ex2unitPF)
_ <- sortByLatitude.recover(ex2unitPF)
_ <- sortByAltitude.recover(ex2unitPF)
_ <- sortByScoreFilterByIL.recover(ex2unitPF)
_ <- recursiveSearch.recover(ex2unitPF)
_ <- gqpFiltering.recover(ex2unitPF)
} yield Seq(293846, 293918, 294640, 294904, 5052287, 6342919, 6468007)).flatMap { seq =>
Future.traverse(seq){ n =>
val aboutPath = path / n.toString / "about.rdf"
Http.delete(uri = aboutPath,headers = tokenHeader)
}.map { results =>
forAll(results){ res =>
withClue(res){
res.status should be(200)
Json.parse(res.payload) should be(jsonSuccess)
}
}
}
}
def recSearch(path: String, queryParams: Seq[(String,String)], expectedTotal: Int) =
executeAfterCompletion(deleteAbouts)(spinCheck(500.millis,true)(Http.get(path, queryParams)){ r =>
(Json.parse(r.payload) \\ "results" \\ "total" : @unchecked) match {
case JsDefined(JsNumber(n)) => n.intValue() == expectedTotal
}
})
val recursiveSearch2 = recSearch(path, List("op" -> "search", "recursive" -> "", "debug-info" -> "", "length" -> "14", "format" -> "json"), 7).map { res =>
withClue(res) {
val total = (Json.parse(res.payload) \\ "results" \\ "total").as[Int]
total should be(7)
}
}
val recursiveSearch3 = executeAfterCompletion(
recursiveSearch2)(
recSearch(
path,
List("op" -> "search","format" -> "json","pretty" -> "","debug-info" -> "","recursive" -> "","with-deleted"->"","length" -> "14"),
14).map { res =>
withClue(res) {
val results = Json.parse(res.payload) \\ "results"
val total = (results \\ "total").as[Int]
total should be(14)
results \\ "infotons" match {
case JsDefined(JsArray(infotons)) => {
val m = infotons.groupBy(jv => (jv \\ "type").as[String])
m should have size (2)
forAll(m.values) { seqByType =>
seqByType should have size (7)
}
}
case somethingElse => fail(s"was expecting an array, but got: $somethingElse")
}
}
})
val recursiveSearch4 = executeAfterCompletion(
recursiveSearch2)(
recSearch(
path,
List("op" -> "search","format" -> "json","pretty" -> "","debug-info" -> "","recursive" -> "","with-history"->"","length" -> "42"),
21).map { res =>
withClue(res) {
val results = Json.parse(res.payload) \\ "results"
val total = (results \\ "total").as[Int]
total should be(21)
results \\ "infotons" match {
case JsDefined(JsArray(infotons)) => {
val m = infotons.groupBy(jv => (jv \\ "type").as[String])
m should have size (2)
forAll(m) {
case ("DeletedInfoton", deletedInfotons) => deletedInfotons should have size (7)
case ("ObjectInfoton", deletedInfotons) => deletedInfotons should have size (14)
case x @ (_, _) => logger.error(s"Unexpected input. Received: $x"); ???
}
}
case somethingElse => fail(s"was expecting an array, but got: $somethingElse")
}
}
})
val searchUnderscoreAll = recSearch(cmw / "", Seq("op"->"search", "recursive"->"", "qp"->"_all:Tikva", "format"->"json"), 1).map { r =>
(Json.parse(r.payload) \\ "results" \\ "total").as[Int] should be(1)
}
it("verify boxed error bug on root search without qp is solved")(boxedErrorOnRootWithoutQP)
it("ingest geonames data successfully")(ingestGeonames)
it("retrieve only infotons with `alt.wgs84_pos` field")(testSearchForExistence)
it("retrieve only infotons with `alt.wgs84_pos` field with POST")(testPostSameAsGet)
it("get sorted by latitude (all share property)")(sortByLatitude)
it("get sorted by altitude (some share property)")(sortByAltitude)
it("get sorted by system.score (filtered by qp)")(sortByScoreFilterByIL)
it("get nested object using recursive query param")(recursiveSearch)
it("filter results with gqp indirect properties")(gqpFiltering)
it("ingest GOT data successfully")(ingestGOT)
it("verify all GOT data is searcheable")(awoiafSearch)
it("retrieve only infotons of type person from GOT")(searchPersons)
it("filter person results by 'homeType' indirect property")(gqpFilterByHomeType)
it("filter person results with 'ghost skipping' an intermediate missing infoton")(gqpFilterBySkippingGhostNed)
it("filter all person results with 'ghost skipping' an intermediate missing infoton if it also contains a filter")(gqpFilterAllAlthoughSkippingGhostNed)
it("search by timestamp last modified")(lastModifiedSearch)
// TODO: uncomment when implemented:
// scalastyle:off
// it("filter all person results with 'ghost skipping' an intermediate missing infoton if it also contains an empty filter")(gqpFilterAllAlthoughSkippingGhostNedWithEmptyFilter)
// scalastyle:on
describe("delete infotons and search for in") {
it("succeed deleting nested objects")(deleteAbouts)
it("not get nested objects using recursive query param after deletes")(recursiveSearch2)
it("get nested deleted objects using recursive and with-deleted after deletes")(recursiveSearch3)
it("get nested deleted & historic objects using recursive and with-history after deletes")(recursiveSearch4)
}
//TODO: dcSync style search
it("search using _all API")(searchUnderscoreAll)
}
} | thomsonreuters/CM-Well | server/cmwell-it/src/it/scala/cmwell/it/SearchTests.scala | Scala | apache-2.0 | 20,465 |
package collins.controllers.actions.state
import scala.concurrent.Future
import play.api.data.Form
import play.api.data.Forms.tuple
import play.api.libs.concurrent.Execution.Implicits.defaultContext
import collins.controllers.validators.ParamValidation
import collins.controllers.SecureController
import collins.controllers.Api
import collins.controllers.actions.SecureAction
import collins.controllers.actions.RequestDataHolder
import collins.validation.StringUtil
import collins.models.{State, Status => AStatus}
import collins.util.security.SecuritySpecification
/**
* Update a state
*
* @apigroup AssetState
* @apimethod POST
* @apiurl /api/state/:name
* @apiparam :name String Old name, reference
* @apiparam name Option[String] new name, between 2 and 32 characters
* @apiparam status Option[String] Status name to bind this state to, or Any to bind to all status
* @apiparam label Option[String] A friendly display label between 2 and 32 characters
* @apiparam description Option[String] A longer description of the state between 2 and 255 characters
* @apirespond 200 success
* @apirespond 400 invalid input
* @apirespond 404 invalid state name
* @apirespond 409 name already in use or trying to modify system name
* @apirespond 500 error saving state
* @apiperm controllers.AssetStateApi.updateState
* @collinsshell {{{
* collins-shell state update OLDNAME [--name=NAME --label=LABEL --description='DESCRIPTION' --status=Status]
* }}}
* @curlexample {{{
* curl -v -u blake:admin:first --basic \\
* -d name='NEW_NAME' \\
* http://localhost:9000/api/state/OLD_NAME
* }}}
*/
case class UpdateAction(
name: String,
spec: SecuritySpecification,
handler: SecureController
) extends SecureAction(spec, handler) with ParamValidation {
import CreateAction.Messages._
import DeleteAction.Messages.systemName
case class ActionDataHolder(state: State) extends RequestDataHolder
val stateForm = Form(tuple(
"status" -> validatedOptionalText(2),
"name" -> validatedOptionalText(2, 32),
"label" -> validatedOptionalText(2, 32),
"description" -> validatedOptionalText(2, 255)
))
override def validate(): Validation = stateForm.bindFromRequest()(request).fold(
err => Left(RequestDataHolder.error400(fieldError(err))),
form => {
val (statusOpt, nameOpt, labelOpt, descriptionOpt) = form
StringUtil.trim(name).filter(s => s.length > 1 && s.length <= 32).flatMap { s =>
State.findByName(s)
}.map { state =>
val statusId = getStatusId(statusOpt)
if (State.isSystemState(state)) {
Left(RequestDataHolder.error409(systemName))
} else if (statusOpt.isDefined && !statusId.isDefined) {
Left(RequestDataHolder.error400(invalidStatus))
} else {
validateName(nameOpt)
.right.map { validatedNameOpt =>
val named = stateWithName(state, validatedNameOpt)
val stated = stateWithStatus(named, statusId)
val labeled = stateWithLabel(stated, labelOpt)
ActionDataHolder(stateWithDescription(labeled, descriptionOpt))
}
}
}.getOrElse {
Left(RequestDataHolder.error404(invalidName))
}
}
)
override def execute(rdh: RequestDataHolder) = Future {
rdh match {
case ActionDataHolder(state) => State.update(state) match {
case ok if ok >= 0 => Api.statusResponse(true, Status.Ok)
case notok => Api.statusResponse(false, Status.InternalServerError)
}
}
}
protected def fieldError(f: Form[_]) = f match {
case e if e.error("name").isDefined => invalidName
case e if e.error("label").isDefined => invalidLabel
case e if e.error("description").isDefined => invalidDescription
case e if e.error("status").isDefined => invalidStatus
case n => fuck
}
protected def stateWithName(state: State, name: Option[String]): State =
name.map(s => state.copy(name = s)).getOrElse(state)
protected def stateWithStatus(state: State, status: Option[Int]): State =
status.map(id => state.copy(status = id)).getOrElse(state)
protected def stateWithLabel(state: State, label: Option[String]): State =
label.map(l => state.copy(label = l)).getOrElse(state)
protected def stateWithDescription(state: State, desc: Option[String]): State =
desc.map(d => state.copy(description = d)).getOrElse(state)
protected def validateName(nameOpt: Option[String]): Either[RequestDataHolder,Option[String]] = {
val validatedName: Either[String,Option[String]] = nameOpt match {
case None =>
Right(None)
case Some(n) =>
StringUtil.trim(n).filter(s => s.length > 1 && s.length <= 32) match {
case None => Left(invalidName)
case Some(s) => Right(Some(s))
}
}
validatedName match {
case Left(err) =>
Left(RequestDataHolder.error400(err))
case Right(None) => Right(None)
case Right(Some(s)) => State.findByName(s) match {
case None => Right(Some(s))
case Some(_) => Left(RequestDataHolder.error409(invalidName))
}
}
}
protected def getStatusId(status: Option[String]): Option[Int] = status.flatMap { s =>
(s.toUpperCase == State.ANY_NAME.toUpperCase) match {
case true => Some(State.ANY_STATUS)
case false => AStatus.findByName(s).map(_.id)
}
}
}
| funzoneq/collins | app/collins/controllers/actions/state/UpdateAction.scala | Scala | apache-2.0 | 5,396 |
/*
Deduction Tactics
Copyright (C) 2012-2015 Raymond Dodge
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.rayrobdod.boardGame.swingView
import org.scalatest.FunSpec
import java.awt.{Color}
import java.awt.image.BufferedImage
import java.awt.image.BufferedImage.TYPE_INT_ARGB
import javax.swing.Icon
import scala.collection.immutable.Seq
import com.rayrobdod.boardGame.view.Swing.flattenImageLayers
final class FlattenLayersTest extends FunSpec {
import FlattenLayersTest._
describe("Swing::flattenImageLayers") {
it ("empty input results in transparent image") {
val expected = solidTrans
val result = flattenImageLayers(Nil)
assert(compareImages(expected, result))
}
it ("single image results in that image") {
val expected = solidRed
val result = flattenImageLayers(Seq(solidRed))
assert(compareImages(expected, result))
}
it ("images combine via transparency blending") {
val expected = topGreenBottomRed
val result = flattenImageLayers(Seq(solidRed, topGreen))
assert(compareImages(expected, result))
}
it ("images combine via transparency blending (2)") {
val expected = solidRed
val result = flattenImageLayers(Seq(topGreen, solidRed))
assert(compareImages(expected, result))
}
}
}
object FlattenLayersTest {
val solidTrans = new BufferedImage(64, 64, TYPE_INT_ARGB)
val solidRed = new BufferedImage(64, 64, TYPE_INT_ARGB)
val solidBlue = new BufferedImage(64, 64, TYPE_INT_ARGB)
val topGreen = new BufferedImage(64, 64, TYPE_INT_ARGB)
val topGreenBottomRed = new BufferedImage(64, 64, TYPE_INT_ARGB)
solidBlue.createGraphics().setColor(Color.blue)
solidBlue.createGraphics().fillRect(0,0,64,64)
solidRed.createGraphics().setColor(Color.red)
solidRed.createGraphics().fillRect(0,0,64,64)
solidRed.createGraphics().setColor(Color.green)
topGreen.createGraphics().fillRect(0,0,64,32)
solidRed.createGraphics().setColor(Color.red)
topGreenBottomRed.createGraphics().fillRect(0,0,64,64)
solidRed.createGraphics().setColor(Color.green)
topGreenBottomRed.createGraphics().fillRect(0,0,64,32)
/** assumes 64 by 64 images */
def compareImages(b:BufferedImage, a:Icon):Boolean = {
val a2 = new BufferedImage(64, 64, TYPE_INT_ARGB)
a.paintIcon(null, a2.createGraphics(), 0, 0)
compareImages(b, a2)
}
/** assumes 64 by 64 images */
def compareImages(b:BufferedImage, a:BufferedImage):Boolean = {
(0 until 64).flatMap{x => (0 until 64).map{y =>
a.getRGB(x,y) == b.getRGB(x,y)
}}.forall{x => x}
}
}
| rayrobdod/boardGame | ViewSwing/src/test/scala/FlattenLayersTest.scala | Scala | gpl-3.0 | 3,102 |
trait Transition[From, To]
infix type ==>[From, To] = Transition[From, To]
type A = A.type
object A
type B = B.type
object B
type C = C.type
object C
// Compiles
given (A ==> B) = ???
// was Compile error
given (A ==> C) = ???
| dotty-staging/dotty | tests/pos/i10927.scala | Scala | apache-2.0 | 233 |
package controllers
import play.api._
import play.api.mvc._
import play.api.Logger
class Application extends Controller {
def wildCardFunction = Action { implicit request =>
Logger.debug("===========================================")
Logger.debug("METHOD:")
Logger.debug(" |-- "+request.method)
Logger.debug("HEADERS:")
for (a <- request.headers.toMap) {
Logger.debug(" |-- "+a.toString)
}
Logger.debug("BODY:")
Logger.debug(" |--"+request.body.toString)
Ok("Your new application is ready.")
}
}
| nerorevenge/httpPlayground | app/controllers/Application.scala | Scala | apache-2.0 | 546 |
package com.magmanics.licensing.ui.content.product
import com.magmanics.licensing.model._
import com.vaadin.data.util.BeanItemContainer
import com.vaadin.ui.AbstractSelect.ItemCaptionMode
import com.vaadin.ui.Table
import scala.beans.BeanInfo
/**
* @author James Baxter - 23/09/2014.
*/
class ProductOptionsTable extends Table {
setSelectable(true)
// setMultiSelect(true)
setImmediate(true)
setPageLength(5)
val container = new BeanItemContainer[ProductOptionViewModel](classOf[ProductOptionViewModel])
setContainerDataSource(container)
setItemCaptionMode(ItemCaptionMode.PROPERTY)
setVisibleColumns("name", "type", "default")
setColumnHeaders("Name", "Type", "Default")
setSelectable(true)
setNullSelectionAllowed(true)
def setProductOptions(productOptions: Set[ProductOption[_ <: Any]]) = {
container.removeAllItems()
productOptions.toList.sortBy(_.name).map {
case t: TextOption => ProductOptionViewModel(t.id, t.name, t.default, "Text")
case b: BoolOption => ProductOptionViewModel(b.id, b.name, String.valueOf(b.default), "Radio")
case l: ListOption => ProductOptionViewModel(l.id, l.name, l.default, "List")
}.foreach(container.addBean)
}
def getSelected: ProductOptionViewModel = {
getValue.asInstanceOf[ProductOptionViewModel]
}
} | manicmonkey/licensing | Licensing-UI-Vaadin/src/main/scala/com/magmanics/licensing/ui/content/product/ProductOptionsTable.scala | Scala | gpl-3.0 | 1,315 |
package com.eevolution.context.dictionary.infrastructure.service
import java.util.UUID
import akka.NotUsed
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.User
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.api.{Service, ServiceCall}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/e-Evolution
* Created by [email protected] , www.e-evolution.com
*/
/**
* User Service
*/
trait UserService extends Service with api.service.UserService {
override def getAll() : ServiceCall[NotUsed, List[User]]
override def getById(id: Int): ServiceCall[NotUsed, User]
override def getByUUID(uuid :UUID): ServiceCall[NotUsed, User]
override def getAllByPage(pageNo: Option[Int], pageSize: Option[Int]): ServiceCall[NotUsed, PaginatedSequence[User]]
def descriptor = {
import Service._
named("user").withCalls(
pathCall("/api/v1_0_0/user/all", getAll _) ,
pathCall("/api/v1_0_0/user/:id", getById _),
pathCall("/api/v1_0_0/user/:uuid", getByUUID _) ,
pathCall("/api/v1_0_0/user?pageNo&pageSize", getAllByPage _)
)
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/service/UserService.scala | Scala | gpl-3.0 | 1,951 |
/* Copyright (C) 2012-2013 Treode, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.treode.cps.stub.scheduler
import java.util.{Collection => JCollection, List => JList}
import java.util.concurrent.{Future => JFuture, Callable, ScheduledExecutorService, ScheduledFuture, TimeUnit}
import scala.collection.mutable
import scala.util.Random
private case class ScheduledTask (trigger: Long, r: Runnable)
private trait ExecutorStub extends ScheduledExecutorService {
/** False if the subclass has no tasks to execute _immediately_. */
protected def isQuietNow: Boolean
/** Perform one _immediate_ task; the subclass may assume isQuietNow is false. */
protected def executeOneNow(): Unit
// Order scheduled tasks by the trigger time.
private [this] val order = new Ordering [ScheduledTask] {
def compare (x: ScheduledTask, y: ScheduledTask): Int = x.trigger compare y.trigger
}
// Our tasks scheduled for some future time; the subclass manages only immediate tasks, and
// this superclass handles moving scheduled (time delayed) tasks into the immediate queue after
// the desired delay.
private [this] val scheduled =
new mutable.SynchronizedPriorityQueue [ScheduledTask] () (order)
// If the user requests a delay of two hours, but there are not two hours worth of immediate
// activities, we jump in time to trigger the delayed tasks sooner. This tracks the running
// total of jumps, which is then added to new tasks.
private [this] var time = 0L
/** False until there are no more tasks enqueued. */
def isQuiet (timers: Boolean): Boolean = isQuietNow && (!timers || scheduled.isEmpty)
/** Perform one task; isQuiet must be false. */
def executeOne (timers: Boolean) {
if (timers && (scheduled.headOption exists (_.trigger < time))) {
// A timer has triggered, move its task to the immediate queue.
time += 1
execute (scheduled.dequeue.r)
} else if (isQuietNow) {
// There's no immediate task, jump time to that of the next scheduled task.
val t = scheduled.dequeue
time = t.trigger + 1
execute (t.r)
} else {
// Execute the next immediate task.
time += 1
executeOneNow()
}}
def schedule (r: Runnable, delay: Long, unit: TimeUnit): ScheduledFuture [_] = {
val msec = TimeUnit.MILLISECONDS.convert (delay, unit)
scheduled.enqueue (ScheduledTask (time + msec, r))
// The stub kits ignore the result.
null .asInstanceOf [ScheduledFuture [_]]
}
// The Java interfaces require us to make up stuff.
private def unsupported [A] = throw new UnsupportedOperationException
// The stub kits do not use any of these, but the Java interface for ExecutorService demands
// their implementations.
def awaitTermination (timeout: Long, unit: TimeUnit): Boolean = unsupported
def invokeAll [A] (tasks: JCollection [_ <: Callable [A]]): JList [JFuture [A]] = unsupported
def invokeAll [A] (tasks: JCollection [_ <: Callable [A]], timeout: Long, unit: TimeUnit): JList [JFuture [A]] = unsupported
def invokeAny [A] (tasks: JCollection [_ <: Callable [A]]): A = unsupported
def invokeAny [A] (tasks: JCollection [_ <: Callable [A]], timeout: Long, unit: TimeUnit): A = unsupported
def isShutdown(): Boolean = unsupported
def isTerminated(): Boolean = unsupported
def shutdown(): Unit = unsupported
def shutdownNow(): JList [Runnable] = unsupported
def submit [A] (task: Callable [A]): JFuture [A] = unsupported
def submit (task: Runnable): JFuture [_] = unsupported
def submit [A] (task: Runnable, result: A): JFuture [A] = unsupported
// The stub kits do not use any of these either , but the Java interface for
// ScheduledExecutorService demands their implementations.
def schedule [A] (callable: Callable [A], delay: Long, unit: TimeUnit): ScheduledFuture [A] = unsupported
def scheduleAtFixedRate (command: Runnable, initialDelay: Long, period: Long, unit: TimeUnit): ScheduledFuture [_] = unsupported
def scheduleWithFixedDelay (command: Runnable, initialDelay: Long, delay: Long, unit: TimeUnit): ScheduledFuture [_] = unsupported
}
/** An executor that chooses the next enqueued tasks and performs it. */
private class SequentialStub extends ExecutorStub {
private [this] val queue = mutable.Queue [Runnable] ()
def execute (r: Runnable) = queue.enqueue (r)
protected def isQuietNow: Boolean = queue.isEmpty
protected def executeOneNow() = queue.dequeue.run()
}
/** An executor that randomly chooses one enqueued task and performs it. */
private class RandomStub (r: Random) extends ExecutorStub {
private [this] val queue = ChoosyQueue [Runnable] ()
def execute (r: Runnable): Unit = queue.enqueue (r)
protected def isQuietNow: Boolean = queue.isEmpty
protected def executeOneNow(): Unit = queue .dequeue (r) .run()
}
| Treode/cps | src/stub/scala/com/treode/cps/stub/scheduler/executors.scala | Scala | apache-2.0 | 5,362 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.linalg
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{GenericInternalRow, UnsafeArrayData}
import org.apache.spark.sql.types._
/**
* User-defined type for [[Matrix]] in [[mllib-local]] which allows easy interaction with SQL
* via [[org.apache.spark.sql.Dataset]].
*/
private[spark] class MatrixUDT extends UserDefinedType[Matrix] {
override def sqlType: StructType = {
// type: 0 = sparse, 1 = dense
// the dense matrix is built by numRows, numCols, values and isTransposed, all of which are
// set as not nullable, except values since in the future, support for binary matrices might
// be added for which values are not needed.
// the sparse matrix needs colPtrs and rowIndices, which are set as
// null, while building the dense matrix.
StructType(Seq(
StructField("type", ByteType, nullable = false),
StructField("numRows", IntegerType, nullable = false),
StructField("numCols", IntegerType, nullable = false),
StructField("colPtrs", ArrayType(IntegerType, containsNull = false), nullable = true),
StructField("rowIndices", ArrayType(IntegerType, containsNull = false), nullable = true),
StructField("values", ArrayType(DoubleType, containsNull = false), nullable = true),
StructField("isTransposed", BooleanType, nullable = false)
))
}
override def serialize(obj: Matrix): InternalRow = {
val row = new GenericInternalRow(7)
obj match {
case sm: SparseMatrix =>
row.setByte(0, 0)
row.setInt(1, sm.numRows)
row.setInt(2, sm.numCols)
row.update(3, UnsafeArrayData.fromPrimitiveArray(sm.colPtrs))
row.update(4, UnsafeArrayData.fromPrimitiveArray(sm.rowIndices))
row.update(5, UnsafeArrayData.fromPrimitiveArray(sm.values))
row.setBoolean(6, sm.isTransposed)
case dm: DenseMatrix =>
row.setByte(0, 1)
row.setInt(1, dm.numRows)
row.setInt(2, dm.numCols)
row.setNullAt(3)
row.setNullAt(4)
row.update(5, UnsafeArrayData.fromPrimitiveArray(dm.values))
row.setBoolean(6, dm.isTransposed)
}
row
}
override def deserialize(datum: Any): Matrix = {
datum match {
case row: InternalRow =>
require(row.numFields == 7,
s"MatrixUDT.deserialize given row with length ${row.numFields} but requires length == 7")
val tpe = row.getByte(0)
val numRows = row.getInt(1)
val numCols = row.getInt(2)
val values = row.getArray(5).toDoubleArray()
val isTransposed = row.getBoolean(6)
tpe match {
case 0 =>
val colPtrs = row.getArray(3).toIntArray()
val rowIndices = row.getArray(4).toIntArray()
new SparseMatrix(numRows, numCols, colPtrs, rowIndices, values, isTransposed)
case 1 =>
new DenseMatrix(numRows, numCols, values, isTransposed)
}
}
}
override def userClass: Class[Matrix] = classOf[Matrix]
override def equals(o: Any): Boolean = {
o match {
case v: MatrixUDT => true
case _ => false
}
}
// see [SPARK-8647], this achieves the needed constant hash code without constant no.
override def hashCode(): Int = classOf[MatrixUDT].getName.hashCode()
override def typeName: String = "matrix"
override def pyUDT: String = "pyspark.ml.linalg.MatrixUDT"
private[spark] override def asNullable: MatrixUDT = this
}
| mike0sv/spark | mllib/src/main/scala/org/apache/spark/ml/linalg/MatrixUDT.scala | Scala | apache-2.0 | 4,306 |
/*
* Copyright 2016 Jesper de Jong
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jesperdj.example.client.whiteboard
import com.jesperdj.example.client.whiteboard.domain.Note
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Controller
import org.springframework.ui.Model
import org.springframework.web.bind.annotation.{RequestBody, RequestMapping, RequestMethod, ResponseBody}
@Controller
class WhiteboardClientController @Autowired()(whiteboardClient: WhiteboardClient) {
@RequestMapping(Array("/"))
def index(model: Model): String = {
model.addAttribute("notes", whiteboardClient.getAllNotes)
"index"
}
@RequestMapping(method = Array(RequestMethod.POST), path = Array("/add"), consumes = Array("application/json"))
@ResponseBody
def add(@RequestBody note: Note): Note = whiteboardClient.addNote(note)
}
| jesperdj/spring-cloud-demo | whiteboard-client/src/main/scala/com/jesperdj/example/client/whiteboard/WhiteboardClientController.scala | Scala | apache-2.0 | 1,408 |
package dregex
import java.util.regex.Pattern
import dregex.TestUtil.using
import org.scalatest.funsuite.AnyFunSuite
class CaseInsensitiveTest extends AnyFunSuite {
test("case insensitive") {
using(Regex.compile("a", Pattern.CASE_INSENSITIVE)) { r =>
assertResult(true)(r.matches("A"))
assertResult(true)(r.matches("a"))
}
using(Regex.compile("á", Pattern.CASE_INSENSITIVE)) { r =>
assertResult(false)(r.matches("Á"))
assertResult(true)(r.matches("á"))
}
using(Regex.compile("á", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CHARACTER_CLASS)) { r =>
assertResult(true)(r.matches("Á"))
assertResult(true)(r.matches("á"))
}
using(Regex.compile("á", Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE)) { r =>
assertResult(true)(r.matches("Á"))
assertResult(true)(r.matches("á"))
}
}
}
| marianobarrios/dregex | src/test/scala/dregex/CaseInsensitiveTest.scala | Scala | bsd-2-clause | 881 |
package org.openurp.edu.eams.system.firstlogin.impl
import org.beangle.commons.lang.Chars
import org.openurp.edu.eams.system.firstlogin.PasswordValidator
class DefaultPasswordValidator extends PasswordValidator {
def validate(password: String): String = {
if (password.length < 6) return "密码的长度不应小于六位"
var hasDigit = false
var hasUpper = false
var hasLower = false
for (c <- password.toCharArray()) {
if (java.lang.Character.isDigit(c)) {
hasDigit = true
//continue
} else if (Chars.isAsciiAlpha(c)) {
if (java.lang.Character.isUpperCase(c)) hasUpper = true else hasLower = true
}
}
if (hasLower && hasUpper && hasDigit) "" else "新密码至少包含一位大写字母、一位小写字母和一位数字"
}
}
| openurp/edu-eams-webapp | web/src/main/scala/org/openurp/edu/eams/system/firstlogin/impl/DefaultPasswordValidator.scala | Scala | gpl-3.0 | 812 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rdd
import scala.language.existentials
import java.io.{IOException, ObjectOutputStream}
import scala.collection.mutable.ArrayBuffer
import org.apache.spark.{InterruptibleIterator, Partition, Partitioner, SparkEnv, TaskContext}
import org.apache.spark.{Dependency, OneToOneDependency, ShuffleDependency}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.util.collection.{ExternalAppendOnlyMap, AppendOnlyMap, CompactBuffer}
import org.apache.spark.util.Utils
import org.apache.spark.serializer.Serializer
import org.apache.spark.shuffle.ShuffleHandle
private[spark] sealed trait CoGroupSplitDep extends Serializable
private[spark] case class NarrowCoGroupSplitDep(
rdd: RDD[_],
splitIndex: Int,
var split: Partition
) extends CoGroupSplitDep {
@throws(classOf[IOException])
private def writeObject(oos: ObjectOutputStream): Unit = Utils.tryOrIOException {
// Update the reference to parent split at the time of task serialization
split = rdd.partitions(splitIndex)
oos.defaultWriteObject()
}
}
private[spark] case class ShuffleCoGroupSplitDep(handle: ShuffleHandle) extends CoGroupSplitDep
private[spark] class CoGroupPartition(idx: Int, val deps: Array[CoGroupSplitDep])
extends Partition with Serializable {
override val index: Int = idx
override def hashCode(): Int = idx
}
/**
* :: DeveloperApi ::
* A RDD that cogroups its parents. For each key k in parent RDDs, the resulting RDD contains a
* tuple with the list of values for that key.
*
* Note: This is an internal API. We recommend users use RDD.cogroup(...) instead of
* instantiating this directly.
* @param rdds parent RDDs.
* @param part partitioner used to partition the shuffle output
*/
@DeveloperApi
class CoGroupedRDD[K](@transient var rdds: Seq[RDD[_ <: Product2[K, _]]], part: Partitioner)
extends RDD[(K, Array[Iterable[_]])](rdds.head.context, Nil) {
// For example, `(k, a) cogroup (k, b)` produces k -> Array(ArrayBuffer as, ArrayBuffer bs).
// Each ArrayBuffer is represented as a CoGroup, and the resulting Array as a CoGroupCombiner.
// CoGroupValue is the intermediate state of each value before being merged in compute.
private type CoGroup = CompactBuffer[Any]
private type CoGroupValue = (Any, Int) // Int is dependency number
private type CoGroupCombiner = Array[CoGroup]
private var serializer: Option[Serializer] = None
/** Set a serializer for this RDD's shuffle, or null to use the default (spark.serializer) */
def setSerializer(serializer: Serializer): CoGroupedRDD[K] = {
this.serializer = Option(serializer)
this
}
override def getDependencies: Seq[Dependency[_]] = {
rdds.map { rdd: RDD[_ <: Product2[K, _]] =>
if (rdd.partitioner == Some(part)) {
logDebug("Adding one-to-one dependency with " + rdd)
new OneToOneDependency(rdd)
} else {
logDebug("Adding shuffle dependency with " + rdd)
new ShuffleDependency[K, Any, CoGroupCombiner](rdd, part, serializer)
}
}
}
override def getPartitions: Array[Partition] = {
val array = new Array[Partition](part.numPartitions)
for (i <- 0 until array.size) {
// Each CoGroupPartition will have a dependency per contributing RDD
array(i) = new CoGroupPartition(i, rdds.zipWithIndex.map { case (rdd, j) =>
// Assume each RDD contributed a single dependency, and get it
dependencies(j) match {
case s: ShuffleDependency[_, _, _] =>
new ShuffleCoGroupSplitDep(s.shuffleHandle)
case _ =>
new NarrowCoGroupSplitDep(rdd, i, rdd.partitions(i))
}
}.toArray)
}
array
}
override val partitioner: Some[Partitioner] = Some(part)
override def compute(s: Partition, context: TaskContext): Iterator[(K, Array[Iterable[_]])] = {
val sparkConf = SparkEnv.get.conf
val externalSorting = sparkConf.getBoolean("spark.shuffle.spill", true)
val split = s.asInstanceOf[CoGroupPartition]
val numRdds = split.deps.size
// A list of (rdd iterator, dependency number) pairs
val rddIterators = new ArrayBuffer[(Iterator[Product2[K, Any]], Int)]
for ((dep, depNum) <- split.deps.zipWithIndex) dep match {
case NarrowCoGroupSplitDep(rdd, _, itsSplit) =>
// Read them from the parent
val it = rdd.iterator(itsSplit, context).asInstanceOf[Iterator[Product2[K, Any]]]
rddIterators += ((it, depNum))
case ShuffleCoGroupSplitDep(handle) =>
// Read map outputs of shuffle
val it = SparkEnv.get.shuffleManager
.getReader(handle, split.index, split.index + 1, context)
.read()
rddIterators += ((it, depNum))
}
if (!externalSorting) {
val map = new AppendOnlyMap[K, CoGroupCombiner]
val update: (Boolean, CoGroupCombiner) => CoGroupCombiner = (hadVal, oldVal) => {
if (hadVal) oldVal else Array.fill(numRdds)(new CoGroup)
}
val getCombiner: K => CoGroupCombiner = key => {
map.changeValue(key, update)
}
rddIterators.foreach { case (it, depNum) =>
while (it.hasNext) {
val kv = it.next()
getCombiner(kv._1)(depNum) += kv._2
}
}
new InterruptibleIterator(context,
map.iterator.asInstanceOf[Iterator[(K, Array[Iterable[_]])]])
} else {
val map = createExternalMap(numRdds)
for ((it, depNum) <- rddIterators) {
map.insertAll(it.map(pair => (pair._1, new CoGroupValue(pair._2, depNum))))
}
context.taskMetrics.incMemoryBytesSpilled(map.memoryBytesSpilled)
context.taskMetrics.incDiskBytesSpilled(map.diskBytesSpilled)
new InterruptibleIterator(context,
map.iterator.asInstanceOf[Iterator[(K, Array[Iterable[_]])]])
}
}
private def createExternalMap(numRdds: Int)
: ExternalAppendOnlyMap[K, CoGroupValue, CoGroupCombiner] = {
val createCombiner: (CoGroupValue => CoGroupCombiner) = value => {
val newCombiner = Array.fill(numRdds)(new CoGroup)
newCombiner(value._2) += value._1
newCombiner
}
val mergeValue: (CoGroupCombiner, CoGroupValue) => CoGroupCombiner =
(combiner, value) => {
combiner(value._2) += value._1
combiner
}
val mergeCombiners: (CoGroupCombiner, CoGroupCombiner) => CoGroupCombiner =
(combiner1, combiner2) => {
var depNum = 0
while (depNum < numRdds) {
combiner1(depNum) ++= combiner2(depNum)
depNum += 1
}
combiner1
}
new ExternalAppendOnlyMap[K, CoGroupValue, CoGroupCombiner](
createCombiner, mergeValue, mergeCombiners)
}
override def clearDependencies() {
super.clearDependencies()
rdds = null
}
}
| Dax1n/spark-core | core/src/main/scala/org/apache/spark/rdd/CoGroupedRDD.scala | Scala | apache-2.0 | 7,565 |
package de.frosner.ddq.constraints
sealed trait ConstraintStatus {
val stringValue: String
}
object ConstraintSuccess extends ConstraintStatus {
val stringValue = "Success"
}
object ConstraintFailure extends ConstraintStatus {
val stringValue = "Failure"
}
case class ConstraintError(throwable: Throwable) extends ConstraintStatus {
val stringValue = "Error"
}
| FRosner/drunken-data-quality | src/main/scala/de/frosner/ddq/constraints/ConstraintStatus.scala | Scala | apache-2.0 | 373 |
package se.uu.farmbio.cp
import java.io.Serializable
import org.apache.spark.Logging
import org.apache.spark.mllib.linalg.Vector
import org.apache.spark.mllib.regression.LabeledPoint
object ICPClassifierModel {
def deserialize[A <: UnderlyingAlgorithm](
model: String,
algDeserializer: Deserializer[A]): ICPClassifierModel[A] = {
//If curly brackets are used in the algorithm serialization this won't work
val matches = "\\\\{(.*?)\\\\}".r
.findAllMatchIn(model)
.map(_.matched)
.toArray
if(matches.length != 2) {
throw new IllegalArgumentException("malformed model string")
}
val algStr = matches(0).substring(1, matches(0).length-1)
val alg = algDeserializer.deserialize(algStr)
val alphStr = matches(1).substring(1, matches(1).length-1)
val alph = "\\\\(([-+]?[0-9]*\\\\.?[0-9]+,)*[-+]?[0-9]*\\\\.?[0-9]+\\\\)".r
.findAllMatchIn(alphStr).map{ pairMatch =>
val pairStr = pairMatch.matched
pairStr.substring(1,pairStr.length-1)
.split(",")
.map(_.toDouble)
}.toSeq
new ICPClassifierModelImpl(alg,alph)
}
}
abstract class ICPClassifierModel[A <: UnderlyingAlgorithm]
extends Serializable {
def mondrianPv(features: Vector): Array[Double]
def predict(features: Vector, significance: Double) = {
//Compute region
mondrianPv(features).zipWithIndex.map {
case (pVal, c) =>
if (pVal > significance) {
Set(c.toDouble)
} else {
Set[Double]()
}
}.reduce(_ ++ _)
}
}
private[cp] class ICPClassifierModelImpl[A <: UnderlyingAlgorithm](
val alg: A,
val alphas: Seq[Array[Double]])
extends ICPClassifierModel[A] with Logging {
override def mondrianPv(features: Vector) = {
(0 to alphas.length - 1).map { i =>
//compute non-conformity for new example
val alphaN = alg.nonConformityMeasure(new LabeledPoint(i, features))
//compute p-value
(alphas(i).count(_ >= alphaN) + 1).toDouble /
(alphas(i).length.toDouble + 1)
}.toArray
}
override def predict(features: Vector, significance: Double) = {
//Validate input
alphas.foreach { a =>
require(significance > 0 && significance < 1, s"significance $significance is not in (0,1)")
if (a.length < 1 / significance - 1) {
logWarning(s"too few calibration samples (${a.length}) for significance $significance")
}
}
super.predict(features, significance)
}
override def toString = {
val algStr = alg.toString
val alphStr = alphas
.map(mpv =>
"("+ mpv.map(_.toString).reduce(_+","+_)+")")
.reduce(_+","+_)
s"{$algStr},{$alphStr}"
}
}
class AggregatedICPClassifier[A <: UnderlyingAlgorithm](
val icps: Seq[ICPClassifierModel[A]])
extends ICPClassifierModel[A] {
override def mondrianPv(features: Vector) = {
icps
.flatMap { icp =>
icp.mondrianPv(features)
.zipWithIndex
}
.groupBy(_._2)
.toArray
.sortBy(_._1)
.map {
case (index, seq) =>
val sortedSeq = seq.map(_._1).sorted
val n = sortedSeq.length
val median = if (n % 2 == 0) {
(sortedSeq(n / 2 - 1) + sortedSeq(n / 2)) / 2
} else {
sortedSeq(n / 2)
}
median
}
}
} | mcapuccini/spark-cp | cp/src/main/scala/se/uu/farmbio/cp/ICPClassifierModel.scala | Scala | apache-2.0 | 3,360 |
package com.zkay
import com.zkay.Listing1_13.Balance
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
/*
* Portfolio Statement - event driven (pg 37, listing 1.14)
*/
object Listing1_14 {
def getCurrencyBalance: Future[Balance] = Future.successful(1000)
def getEquityBalance: Future[Balance] = Future.successful(100000)
def getDebtBalance: Future[Balance] = Future.successful(10000)
def getLoanInformation: Future[Balance] = Future.successful(100)
def getRetirementFundBalance: Future[Balance] = Future.successful(10)
val fcurr: Future[Balance] = getCurrencyBalance
val feq: Future[Balance] = getEquityBalance
val fdebt: Future[Balance] = getDebtBalance
val floan: Future[Balance] = getLoanInformation
val fretire: Future[Balance] = getRetirementFundBalance
for {
c <- fcurr
e <- feq
d <- fdebt
l <- floan
r <- fretire
} yield Listing1_13.generatePortfolio(c, e, d, l, r)
} | zkay/bookclub_notes | FunctionalAndReactiveDomainModeling/chapter1/src/main/scala/Listing1_14.scala | Scala | apache-2.0 | 979 |
/*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.testsuite.javalib.lang
import org.junit.Test
import org.junit.Assert._
import org.scalajs.testsuite.utils.Platform._
// scalastyle:off disallow.space.before.token
class ObjectTest {
@Test def testGetClass(): Unit = {
@noinline
def testNoInline(expected: Class[_], x: Any): Unit =
assertSame(expected, x.getClass())
@inline
def test(expected: Class[_], x: Any): Unit = {
testNoInline(expected, x)
assertSame(expected, x.getClass())
}
test(if (executingInJVM) classOf[scala.runtime.BoxedUnit] else classOf[java.lang.Void], ())
test(classOf[java.lang.Boolean], true)
test(classOf[java.lang.Character], 'A')
test(classOf[java.lang.Byte], 0.toByte)
test(classOf[java.lang.Byte], 5.toByte)
test(classOf[java.lang.Short], 300.toShort)
test(classOf[java.lang.Integer], 100000)
test(classOf[java.lang.Long], Long.MaxValue)
test(classOf[java.lang.Float], -0.0f)
test(classOf[java.lang.Float], 1.5f)
test(classOf[java.lang.Float], Float.NaN)
test(if (hasStrictFloats) classOf[java.lang.Double] else classOf[java.lang.Float], 1.4)
test(classOf[java.lang.String], "hello")
test(classOf[java.lang.Object], new Object)
test(classOf[Some[_]], Some(5))
test(classOf[ObjectTest], this)
test(classOf[Array[Array[Int]]], new Array[Array[Int]](1))
test(classOf[Array[Array[Array[String]]]], new Array[Array[Array[String]]](1))
}
@Test def equals(): Unit = {
case class XY(x: Int, y: Int)
val l = List(XY(1, 2), XY(2, 1))
val xy12 = XY(1, 2)
assertTrue(l.contains(xy12))
assertTrue(l.exists(_ == xy12)) // the workaround
}
@Test def isInstanceOfObjectExceptNull(): Unit = {
assertTrue((() : Any).isInstanceOf[Object])
assertTrue((true : Any).isInstanceOf[Object])
assertTrue(('a' : Any).isInstanceOf[Object])
assertTrue((1.toByte : Any).isInstanceOf[Object])
assertTrue((658.toShort : Any).isInstanceOf[Object])
assertTrue((60000 : Any).isInstanceOf[Object])
assertTrue((12345678910112L: Any).isInstanceOf[Object])
assertTrue((6.5f : Any).isInstanceOf[Object])
assertTrue((12.4 : Any).isInstanceOf[Object])
assertTrue((new Object : Any).isInstanceOf[Object])
assertTrue(("hello" : Any).isInstanceOf[Object])
assertTrue((List(1) : Any).isInstanceOf[Object])
assertTrue((Array(1) : Any).isInstanceOf[Object])
assertTrue((Array(Nil) : Any).isInstanceOf[Object])
}
@Test def isInstanceOfObjectNull(): Unit = {
assertFalse((null: Any).isInstanceOf[Object])
}
@Test def asInstanceOfObjectAll(): Unit = {
(() : Any).asInstanceOf[Object]
(true : Any).asInstanceOf[Object]
('a' : Any).asInstanceOf[Object]
(1.toByte : Any).asInstanceOf[Object]
(658.toShort : Any).asInstanceOf[Object]
(60000 : Any).asInstanceOf[Object]
(12345678910112L: Any).asInstanceOf[Object]
(6.5f : Any).asInstanceOf[Object]
(12.4 : Any).asInstanceOf[Object]
(new Object : Any).asInstanceOf[Object]
("hello" : Any).asInstanceOf[Object]
(List(1) : Any).asInstanceOf[Object]
(Array(1) : Any).asInstanceOf[Object]
(Array(Nil) : Any).asInstanceOf[Object]
(null : Any).asInstanceOf[Object]
}
@Test def cloneCtorSideEffects_Issue3192(): Unit = {
var ctorInvokeCount = 0
// This class has an inlineable init
class CloneCtorSideEffectsBug(val x: Int) extends java.lang.Cloneable {
ctorInvokeCount += 1
override def clone(): CloneCtorSideEffectsBug =
super.clone().asInstanceOf[CloneCtorSideEffectsBug]
}
val o = new CloneCtorSideEffectsBug(54)
assertEquals(54, o.x)
assertEquals(1, ctorInvokeCount)
val o2 = o.clone()
assertEquals(54, o2.x)
assertEquals(1, ctorInvokeCount)
}
}
| scala-js/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/lang/ObjectTest.scala | Scala | apache-2.0 | 4,267 |
package container
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
import scala.collection.mutable.ListBuffer
import org.scalatest.junit.JUnitRunner
import common.CommonTest
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import java.io.File
import container.api.SkillFile
import de.ust.skill.common.scala.internal.SkillState
import de.ust.skill.common.scala.api.Write
import de.ust.skill.common.scala.api.Create
import de.ust.skill.common.scala.api.Read
import de.ust.skill.common.scala.api.ReadOnly
@RunWith(classOf[JUnitRunner])
class FullTest extends CommonTest {
@inline final def read(s : String) = {
println(s)
SkillFile.open("src/test/resources/"+s)
}
@inline final def dump(state : SkillFile) {
for (t ← state) {
println(s"Pool[${t.name}${
if (t.superName.isDefined)
" <: "+t.superName.get
else
""
}]")
for (i ← t.all) {
println(s" $i = ${
t.allFields.map {
f ⇒ s"${f.name}: ${i.get(f)}"
}.mkString("[", ", ", "]")
}")
}
println()
}
}
// reflective read
test("read reflective: nodes") { dump(read("node.sf")) }
test("read reflective: two node blocks") { dump(read("twoNodeBlocks.sf")) }
test("read reflective: colored nodes") { dump(read("coloredNodes.sf")) }
test("read reflective: four colored nodes") { dump(read("fourColoredNodes.sf")) }
test("read reflective: empty blocks") { dump(read("emptyBlocks.sf")) }
test("read reflective: two types") { dump(read("twoTypes.sf")) }
test("read reflective: trivial type definition") { dump(read("trivialType.sf")) }
test("read reflective: subtypes") { dump(read("localBasePoolOffset.sf")) }
test("read reflective: container") { dump(read("container.sf")) }
test("read reflective: commutativity path 1") { dump(read("commutativityPath1.sf")) }
test("read reflective: commutativity path 2") { dump(read("commutativityPath2.sf")) }
// compound types
test("create container instances") {
val p = tmpFile("container.create")
locally {
val state = SkillFile.open(p, Create, Write)
state.Container.make(
arr = ArrayBuffer(0, 0, 0),
varr = ArrayBuffer(1, 2, 3),
l = ListBuffer(),
s = Set().to,
f = HashMap("f" -> HashMap(0L -> 0L)),
someSet = Set().to
)
for (c ← state.Container.all)
c.s = c.arr.toSet.to
state.close
}
locally {
val state = SkillFile.open(p, Read, ReadOnly)
val c = state.Container.all.next
assert(c.arr.size === 3)
assert(c.varr.sameElements(1 to 3))
assert(c.l.isEmpty)
assert(c.s.sameElements(0 to 0))
assert(c.f("f")(c.s.head) == 0)
}
}
} | skill-lang/skillScalaTestSuite | src/test/scala/container/FullTest.scala | Scala | bsd-3-clause | 2,786 |
package is.hail.expr.ir.functions
import is.hail.asm4s
import is.hail.asm4s.{coerce => _, _}
import is.hail.expr.ir._
import is.hail.types.physical._
import is.hail.utils._
import is.hail.types.virtual._
import org.apache.spark.sql.Row
import scala.reflect.ClassTag
object UtilFunctions extends RegistryFunctions {
def parseBoolean(s: String): Boolean = s.toBoolean
def parseInt32(s: String): Int = s.toInt
def parseInt64(s: String): Long = s.toLong
private val NAN = 1
private val POS_INF = 2
private val NEG_INF = 3
def parseSpecialNum(s: String): Int = s.length match {
case 3 if s equalsCI "nan" => NAN
case 4 if (s equalsCI "+nan") || (s equalsCI "-nan") => NAN
case 3 if s equalsCI "inf" => POS_INF
case 4 if s equalsCI "+inf" => POS_INF
case 4 if s equalsCI "-inf" => NEG_INF
case 8 if s equalsCI "infinity" => POS_INF
case 9 if s equalsCI "+infinity" => POS_INF
case 9 if s equalsCI "-infinity" => NEG_INF
case _ => 0
}
def parseFloat32(s: String): Float = parseSpecialNum(s) match {
case NAN => Float.NaN
case POS_INF => Float.PositiveInfinity
case NEG_INF => Float.NegativeInfinity
case _ => s.toFloat
}
def parseFloat64(s: String): Double = parseSpecialNum(s) match {
case NAN => Double.NaN
case POS_INF => Double.PositiveInfinity
case NEG_INF => Double.NegativeInfinity
case _ => s.toDouble
}
def isValidBoolean(s: String): Boolean =
(s equalsCI "true") || (s equalsCI "false")
def isValidInt32(s: String): Boolean =
try { s.toInt; true } catch { case _: NumberFormatException => false }
def isValidInt64(s: String): Boolean =
try { s.toLong; true } catch { case _: NumberFormatException => false }
def isValidFloat32(s: String): Boolean = parseSpecialNum(s) match {
case 0 => try { s.toFloat; true } catch { case _: NumberFormatException => false }
case _ => true
}
def isValidFloat64(s: String): Boolean = parseSpecialNum(s) match {
case 0 => try { s.toDouble; true } catch { case _: NumberFormatException => false }
case _ => true
}
def min_ignore_missing(l: Int, lMissing: Boolean, r: Int, rMissing: Boolean): Int =
if (lMissing) r else if (rMissing) l else Math.min(l, r)
def min_ignore_missing(l: Long, lMissing: Boolean, r: Long, rMissing: Boolean): Long =
if (lMissing) r else if (rMissing) l else Math.min(l, r)
def min_ignore_missing(l: Float, lMissing: Boolean, r: Float, rMissing: Boolean): Float =
if (lMissing) r else if (rMissing) l else Math.min(l, r)
def min_ignore_missing(l: Double, lMissing: Boolean, r: Double, rMissing: Boolean): Double =
if (lMissing) r else if (rMissing) l else Math.min(l, r)
def max_ignore_missing(l: Int, lMissing: Boolean, r: Int, rMissing: Boolean): Int =
if (lMissing) r else if (rMissing) l else Math.max(l, r)
def max_ignore_missing(l: Long, lMissing: Boolean, r: Long, rMissing: Boolean): Long =
if (lMissing) r else if (rMissing) l else Math.max(l, r)
def max_ignore_missing(l: Float, lMissing: Boolean, r: Float, rMissing: Boolean): Float =
if (lMissing) r else if (rMissing) l else Math.max(l, r)
def max_ignore_missing(l: Double, lMissing: Boolean, r: Double, rMissing: Boolean): Double =
if (lMissing) r else if (rMissing) l else Math.max(l, r)
def nanmax(l: Double, r: Double): Double =
if (java.lang.Double.isNaN(l)) r else if (java.lang.Double.isNaN(r)) l else Math.max(l, r)
def nanmax(l: Float, r: Float): Float =
if (java.lang.Float.isNaN(l)) r else if (java.lang.Float.isNaN(r)) l else Math.max(l, r)
def nanmin(l: Double, r: Double): Double =
if (java.lang.Double.isNaN(l)) r else if (java.lang.Double.isNaN(r)) l else Math.min(l, r)
def nanmin(l: Float, r: Float): Float =
if (java.lang.Float.isNaN(l)) r else if (java.lang.Float.isNaN(r)) l else Math.min(l, r)
def nanmin_ignore_missing(l: Float, lMissing: Boolean, r: Float, rMissing: Boolean): Float =
if (lMissing) r else if (rMissing) l else nanmin(l, r)
def nanmin_ignore_missing(l: Double, lMissing: Boolean, r: Double, rMissing: Boolean): Double =
if (lMissing) r else if (rMissing) l else nanmin(l, r)
def nanmax_ignore_missing(l: Float, lMissing: Boolean, r: Float, rMissing: Boolean): Float =
if (lMissing) r else if (rMissing) l else nanmax(l, r)
def nanmax_ignore_missing(l: Double, lMissing: Boolean, r: Double, rMissing: Boolean): Double =
if (lMissing) r else if (rMissing) l else nanmax(l, r)
def intMin(a: IR, b: IR): IR = If(ApplyComparisonOp(LT(a.typ), a, b), a, b)
def intMax(a: IR, b: IR): IR = If(ApplyComparisonOp(GT(a.typ), a, b), a, b)
def format(f: String, args: Row): String =
String.format(f, args.toSeq.map(_.asInstanceOf[java.lang.Object]): _*)
def registerAll() {
val thisClass = getClass
registerCode4("valuesSimilar", tv("T"), tv("U"), TFloat64, TBoolean, TBoolean, {
case(_: Type, _: PType, _: PType, _: PType, _: PType) => PBoolean()
}) {
case (er, rt, (lT, l), (rT, r), (tolT, tolerance), (absT, absolute)) =>
assert(lT.virtualType == rT.virtualType, s"\\n lt=${lT.virtualType}\\n rt=${rT.virtualType}")
val lb = boxArg(er, lT)(l)
val rb = boxArg(er, rT)(r)
er.mb.getType(lT.virtualType).invoke[Any, Any, Double, Boolean, Boolean]("valuesSimilar", lb, rb, tolerance, absolute)
}
registerCode1[Int]("triangle", TInt32, TInt32, (_: Type, n: PType) => n) { case (_, rt, (nT, n: Code[Int])) =>
Code.memoize(n, "triangle_n") { n =>
(n * (n + 1)) / 2
}
}
registerCode1[Boolean]("toInt32", TBoolean, TInt32, (_: Type, _: PType) => PInt32()) { case (_, rt, (xT, x: Code[Boolean])) => x.toI }
registerCode1[Boolean]("toInt64", TBoolean, TInt64, (_: Type, _: PType) => PInt64()) { case (_, rt, (xT, x: Code[Boolean])) => x.toI.toL }
registerCode1[Boolean]("toFloat32", TBoolean, TFloat32, (_: Type, _: PType) => PFloat32()) { case (_, rt, (xT, x: Code[Boolean])) => x.toI.toF }
registerCode1[Boolean]("toFloat64", TBoolean, TFloat64, (_: Type, _: PType) => PFloat64()) { case (_, rt, (xT, x: Code[Boolean])) => x.toI.toD }
for ((name, t, rpt, ct) <- Seq[(String, Type, PType, ClassTag[_])](
("Boolean", TBoolean, PBoolean(), implicitly[ClassTag[Boolean]]),
("Int32", TInt32, PInt32(), implicitly[ClassTag[Int]]),
("Int64", TInt64, PInt64(), implicitly[ClassTag[Long]]),
("Float64", TFloat64, PFloat64(), implicitly[ClassTag[Double]]),
("Float32", TFloat32, PFloat32(), implicitly[ClassTag[Float]])
)) {
val ctString: ClassTag[String] = implicitly
registerCode1(s"to$name", TString, t, (_: Type, _: PType) => rpt) {
case (r, rt, (xT: PString, x: Code[Long])) =>
val s = asm4s.coerce[String](wrapArg(r, xT)(x))
Code.invokeScalaObject1(thisClass, s"parse$name", s)(ctString, ct)
}
registerIEmitCode1(s"to${name}OrMissing", TString, t, (_: Type, xPT: PType) => rpt.setRequired(xPT.required)) {
case (cb, r, rt, x) =>
x().flatMap(cb) { case (sc: PStringCode) =>
val sv = cb.newLocal[String]("s", sc.loadString())
IEmitCode(cb,
!Code.invokeScalaObject1[String, Boolean](thisClass, s"isValid$name", sv),
PCode(rt, Code.invokeScalaObject1(thisClass, s"parse$name", sv)(ctString, ct)))
}
}
}
Array(TInt32, TInt64).foreach { t =>
registerIR2("min", t, t, t)((_, a, b) => intMin(a, b))
registerIR2("max", t, t, t)((_, a, b) => intMax(a, b))
}
Array("min", "max").foreach { name =>
registerCode2(name, TFloat32, TFloat32, TFloat32, (_: Type, _: PType, _: PType) => PFloat32()) {
case (r, rt, (t1, v1: Code[Float]), (t2, v2: Code[Float])) =>
Code.invokeStatic2[Math, Float, Float, Float](name, v1, v2)
}
registerCode2(name, TFloat64, TFloat64, TFloat64, (_: Type, _: PType, _: PType) => PFloat64()) {
case (r, rt, (t1, v1: Code[Double]), (t2, v2: Code[Double])) =>
Code.invokeStatic2[Math, Double, Double, Double](name, v1, v2)
}
val ignoreMissingName = name + "_ignore_missing"
val ignoreNanName = "nan" + name
val ignoreBothName = ignoreNanName + "_ignore_missing"
registerCode2(ignoreNanName, TFloat32, TFloat32, TFloat32, (_: Type, _: PType, _: PType) => PFloat32()) {
case (r, rt, (t1, v1: Code[Float]), (t2, v2: Code[Float])) =>
Code.invokeScalaObject2[Float, Float, Float](thisClass, ignoreNanName, v1, v2)
}
registerCode2(ignoreNanName, TFloat64, TFloat64, TFloat64, (_: Type, _: PType, _: PType) => PFloat64()) {
case (r, rt, (t1, v1: Code[Double]), (t2, v2: Code[Double])) =>
Code.invokeScalaObject2[Double, Double, Double](thisClass, ignoreNanName, v1, v2)
}
def ignoreMissingTriplet[T](rt: PType, v1: EmitCode, v2: EmitCode, name: String)(implicit ct: ClassTag[T], ti: TypeInfo[T]): EmitCode = {
val m1 = Code.newLocal[Boolean]("min_max_igno_miss_m1")
val m2 = Code.newLocal[Boolean]("min_max_igno_miss_m2")
EmitCode(
Code(v1.setup, v2.setup, m1 := v1.m, m2 := v2.m),
m1 && m2,
PCode(rt, Code.invokeScalaObject4[T, Boolean, T, Boolean, T](thisClass, name,
m1.mux(coerce[T](defaultValue(ti)), v1.value[T]), m1,
m2.mux(coerce[T](defaultValue(ti)), v2.value[T]), m2)))
}
registerEmitCode2(ignoreMissingName, TInt32, TInt32, TInt32, (_: Type, t1: PType, t2: PType) => PInt32(t1.required && t2.required)) {
case (r, rt, v1, v2) => ignoreMissingTriplet[Int](rt, v1, v2, ignoreMissingName)
}
registerEmitCode2(ignoreMissingName, TInt64, TInt64, TInt64, (_: Type, t1: PType, t2: PType) => PInt64(t1.required && t2.required)) {
case (r, rt, v1, v2) => ignoreMissingTriplet[Long](rt, v1, v2, ignoreMissingName)
}
registerEmitCode2(ignoreMissingName, TFloat32, TFloat32, TFloat32, (_: Type, t1: PType, t2: PType) => PFloat32(t1.required && t2.required)) {
case (r, rt, v1, v2) => ignoreMissingTriplet[Float](rt, v1, v2, ignoreMissingName)
}
registerEmitCode2(ignoreMissingName, TFloat64, TFloat64, TFloat64, (_: Type, t1: PType, t2: PType) => PFloat64(t1.required && t2.required)) {
case (r, rt, v1, v2) => ignoreMissingTriplet[Double](rt, v1, v2, ignoreMissingName)
}
registerEmitCode2(ignoreBothName, TFloat32, TFloat32, TFloat32, (_: Type, t1: PType, t2: PType) => PFloat32(t1.required && t2.required)) {
case (r, rt, v1, v2) => ignoreMissingTriplet[Float](rt, v1, v2, ignoreBothName)
}
registerEmitCode2(ignoreBothName, TFloat64, TFloat64, TFloat64, (_: Type, t1: PType, t2: PType) => PFloat64(t1.required && t2.required)) {
case (r, rt, v1, v2) => ignoreMissingTriplet[Double](rt, v1, v2, ignoreBothName)
}
}
registerCode2("format", TString, tv("T", "tuple"), TString, (_: Type, _: PType, _: PType) => PCanonicalString()) {
case (r, rt, (fmtT: PString, format: Code[Long]), (argsT: PTuple, args: Code[Long])) =>
unwrapReturn(r, rt)(Code.invokeScalaObject2[String, Row, String](thisClass, "format",
asm4s.coerce[String](wrapArg(r, fmtT)(format)),
Code.checkcast[Row](asm4s.coerce[java.lang.Object](wrapArg(r, argsT)(args)))))
}
registerEmitCode2("land", TBoolean, TBoolean, TBoolean, (_: Type, tl: PType, tr: PType) => PBoolean(tl.required && tr.required)) {
case (er, rt, l, r) =>
val lv = l.value[Boolean]
val rv = r.value[Boolean]
// 00 ... 00 rv rm lv lm
val w = er.mb.newLocal[Int]()
// m/m, t/m, m/t
val M = const((1 << 5) | (1 << 6) | (1 << 9))
val setup = Code(l.setup,
w := l.m.mux(const(1),
lv.mux(
const(2),
const(0))),
w.cne(0).mux(
Code(
r.setup,
w := w | r.m.mux(const(4),
rv.mux(
const(8),
const(0)))),
Code._empty))
EmitCode(setup,
((M >> w) & 1).cne(0),
PCode(rt, w.ceq(10)))
}
registerEmitCode2("lor", TBoolean, TBoolean, TBoolean, (_: Type, tl: PType, tr: PType) => PBoolean(tl.required && tr.required)) {
case (er, rt, l, r) =>
val lv = l.value[Boolean]
val rv = r.value[Boolean]
// 00 ... 00 rv rm lv lm
val w = er.mb.newLocal[Int]()
// m/m, f/m, m/f
val M = const((1 << 5) | (1 << 1) | (1 << 4))
val setup = Code(l.setup,
w := l.m.mux(const(1),
lv.mux(
const(2),
const(0))),
w.cne(2).mux(
Code(
r.setup,
w := w | r.m.mux(const(4),
rv.mux(
const(8),
const(0)))),
Code._empty))
EmitCode(setup,
((M >> w) & 1).cne(0),
PCode(rt, w.cne(0)))
}
}
}
| danking/hail | hail/src/main/scala/is/hail/expr/ir/functions/UtilFunctions.scala | Scala | mit | 13,036 |
package org.vitrivr.adampro.query.distance
import org.vitrivr.adampro.data.datatypes.vector.Vector._
import org.vitrivr.adampro.query.distance.Distance._
/**
* ADAMpro
*
* Ivan Giangreco
* June 2016
*
* from Julia: sum(p .* log(p ./ q))
*/
object KullbackLeiblerDivergence extends ElementwiseSummedDistanceFunction with Serializable {
override def element(v1: VectorBase, v2: VectorBase, w: VectorBase): Distance = {
if (math.abs(v1) < 10E-6 || math.abs(v2) < 10E-6) {
0.toFloat
} else {
(v1 * math.log(v1 / v2)).toFloat
}
}
}
| dbisUnibas/ADAMpro | src/main/scala/org/vitrivr/adampro/query/distance/KullbackLeiblerDivergence.scala | Scala | mit | 571 |
package commons.repositories
trait WithId[Underlying, Id <: BaseId[Underlying]] {
def id: Id
}
| Dasiu/play-framework-test-project | app/commons/repositories/WithId.scala | Scala | mit | 100 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.javadsl.persistence.testkit
import akka.actor.{ ActorRef, ActorSystem, Props, actorRef2Scala }
import akka.persistence.PersistentActor
import akka.testkit.{ ImplicitSender, TestKitBase }
import com.lightbend.lagom.persistence.ActorSystemSpec
import com.lightbend.lagom.persistence.PersistenceSpec
import org.scalatest.{ Matchers, WordSpecLike }
import scala.concurrent.duration._
object AbstractEmbeddedPersistentActorSpec {
final case class Cmd(data: String)
final case class Evt(data: String)
case object Get
final case class State(data: Vector[String] = Vector.empty) {
def apply(evt: Evt): State = {
copy(data :+ evt.data)
}
}
def props(persistenceId: String): Props =
Props(new Persistent(persistenceId))
class Persistent(override val persistenceId: String) extends PersistentActor {
var state = State()
override def receiveRecover = {
case evt: Evt => state = state(evt)
}
override def receiveCommand = {
case Cmd(data) =>
persist(Evt(data.toUpperCase)) { evt =>
state = state(evt)
}
case Get => sender() ! state
}
}
}
trait AbstractEmbeddedPersistentActorSpec { spec: ActorSystemSpec =>
import AbstractEmbeddedPersistentActorSpec._
"A persistent actor" must {
"store events in the embedded journal" in within(15.seconds) {
val p = system.actorOf(props("p1"))
println(implicitly[ActorRef])
p ! Get
expectMsg(State())
p ! Cmd("a")
p ! Cmd("b")
p ! Cmd("c")
p ! Get
expectMsg(State(Vector("A", "B", "C")))
// start another with same persistenceId should recover state
val p2 = system.actorOf(props("p1"))
p2 ! Get
expectMsg(State(Vector("A", "B", "C")))
}
}
}
| rstento/lagom | persistence/javadsl/src/test/scala/com/lightbend/lagom/javadsl/persistence/testkit/AbstractEmbeddedPersistentActorSpec.scala | Scala | apache-2.0 | 1,875 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.lang.{Boolean => JavaBoolean}
import java.lang.{Byte => JavaByte}
import java.lang.{Character => JavaChar}
import java.lang.{Double => JavaDouble}
import java.lang.{Float => JavaFloat}
import java.lang.{Integer => JavaInteger}
import java.lang.{Long => JavaLong}
import java.lang.{Short => JavaShort}
import java.math.{BigDecimal => JavaBigDecimal}
import java.nio.charset.StandardCharsets
import java.sql.{Date, Timestamp}
import java.time.{Duration, Instant, LocalDate, LocalDateTime, Period}
import java.util
import java.util.Objects
import javax.xml.bind.DatatypeConverter
import scala.math.{BigDecimal, BigInt}
import scala.reflect.runtime.universe.TypeTag
import scala.util.Try
import org.json4s.JsonAST._
import org.apache.spark.sql.catalyst.{CatalystTypeConverters, InternalRow, ScalaReflection}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.trees.TreePattern
import org.apache.spark.sql.catalyst.trees.TreePattern.{LITERAL, NULL_LITERAL, TRUE_OR_FALSE_LITERAL}
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.catalyst.util.DateTimeUtils.instantToMicros
import org.apache.spark.sql.catalyst.util.IntervalStringStyles.ANSI_STYLE
import org.apache.spark.sql.catalyst.util.IntervalUtils.{durationToMicros, periodToMonths, toDayTimeIntervalString, toYearMonthIntervalString}
import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types._
import org.apache.spark.util.Utils
import org.apache.spark.util.collection.BitSet
import org.apache.spark.util.collection.ImmutableBitSet
object Literal {
val TrueLiteral: Literal = Literal(true, BooleanType)
val FalseLiteral: Literal = Literal(false, BooleanType)
def apply(v: Any): Literal = v match {
case i: Int => Literal(i, IntegerType)
case l: Long => Literal(l, LongType)
case d: Double => Literal(d, DoubleType)
case f: Float => Literal(f, FloatType)
case b: Byte => Literal(b, ByteType)
case s: Short => Literal(s, ShortType)
case s: String => Literal(UTF8String.fromString(s), StringType)
case c: Char => Literal(UTF8String.fromString(c.toString), StringType)
case ac: Array[Char] => Literal(UTF8String.fromString(String.valueOf(ac)), StringType)
case b: Boolean => Literal(b, BooleanType)
case d: BigDecimal =>
val decimal = Decimal(d)
Literal(decimal, DecimalType.fromDecimal(decimal))
case d: JavaBigDecimal =>
val decimal = Decimal(d)
Literal(decimal, DecimalType.fromDecimal(decimal))
case d: Decimal => Literal(d, DecimalType(Math.max(d.precision, d.scale), d.scale))
case i: Instant => Literal(instantToMicros(i), TimestampType)
case t: Timestamp => Literal(DateTimeUtils.fromJavaTimestamp(t), TimestampType)
case l: LocalDateTime => Literal(DateTimeUtils.localDateTimeToMicros(l), TimestampWithoutTZType)
case ld: LocalDate => Literal(ld.toEpochDay.toInt, DateType)
case d: Date => Literal(DateTimeUtils.fromJavaDate(d), DateType)
case d: Duration => Literal(durationToMicros(d), DayTimeIntervalType())
case p: Period => Literal(periodToMonths(p), YearMonthIntervalType)
case a: Array[Byte] => Literal(a, BinaryType)
case a: collection.mutable.WrappedArray[_] => apply(a.array)
case a: Array[_] =>
val elementType = componentTypeToDataType(a.getClass.getComponentType())
val dataType = ArrayType(elementType)
val convert = CatalystTypeConverters.createToCatalystConverter(dataType)
Literal(convert(a), dataType)
case i: CalendarInterval => Literal(i, CalendarIntervalType)
case null => Literal(null, NullType)
case v: Literal => v
case _ =>
throw QueryExecutionErrors.literalTypeUnsupportedError(v)
}
/**
* Returns the Spark SQL DataType for a given class object. Since this type needs to be resolved
* in runtime, we use match-case idioms for class objects here. However, there are similar
* functions in other files (e.g., HiveInspectors), so these functions need to merged into one.
*/
private[this] def componentTypeToDataType(clz: Class[_]): DataType = clz match {
// primitive types
case JavaShort.TYPE => ShortType
case JavaInteger.TYPE => IntegerType
case JavaLong.TYPE => LongType
case JavaDouble.TYPE => DoubleType
case JavaByte.TYPE => ByteType
case JavaFloat.TYPE => FloatType
case JavaBoolean.TYPE => BooleanType
case JavaChar.TYPE => StringType
// java classes
case _ if clz == classOf[LocalDate] => DateType
case _ if clz == classOf[Date] => DateType
case _ if clz == classOf[Instant] => TimestampType
case _ if clz == classOf[Timestamp] => TimestampType
case _ if clz == classOf[LocalDateTime] => TimestampWithoutTZType
case _ if clz == classOf[Duration] => DayTimeIntervalType()
case _ if clz == classOf[Period] => YearMonthIntervalType
case _ if clz == classOf[JavaBigDecimal] => DecimalType.SYSTEM_DEFAULT
case _ if clz == classOf[Array[Byte]] => BinaryType
case _ if clz == classOf[Array[Char]] => StringType
case _ if clz == classOf[JavaShort] => ShortType
case _ if clz == classOf[JavaInteger] => IntegerType
case _ if clz == classOf[JavaLong] => LongType
case _ if clz == classOf[JavaDouble] => DoubleType
case _ if clz == classOf[JavaByte] => ByteType
case _ if clz == classOf[JavaFloat] => FloatType
case _ if clz == classOf[JavaBoolean] => BooleanType
// other scala classes
case _ if clz == classOf[String] => StringType
case _ if clz == classOf[BigInt] => DecimalType.SYSTEM_DEFAULT
case _ if clz == classOf[BigDecimal] => DecimalType.SYSTEM_DEFAULT
case _ if clz == classOf[CalendarInterval] => CalendarIntervalType
case _ if clz.isArray => ArrayType(componentTypeToDataType(clz.getComponentType))
case _ => throw QueryCompilationErrors.arrayComponentTypeUnsupportedError(clz)
}
/**
* Constructs a [[Literal]] of [[ObjectType]], for example when you need to pass an object
* into code generation.
*/
def fromObject(obj: Any, objType: DataType): Literal = new Literal(obj, objType)
def fromObject(obj: Any): Literal = new Literal(obj, ObjectType(obj.getClass))
def create(v: Any, dataType: DataType): Literal = {
Literal(CatalystTypeConverters.convertToCatalyst(v), dataType)
}
def create[T : TypeTag](v: T): Literal = Try {
val ScalaReflection.Schema(dataType, _) = ScalaReflection.schemaFor[T]
val convert = CatalystTypeConverters.createToCatalystConverter(dataType)
Literal(convert(v), dataType)
}.getOrElse {
Literal(v)
}
/**
* Create a literal with default value for given DataType
*/
def default(dataType: DataType): Literal = dataType match {
case NullType => create(null, NullType)
case BooleanType => Literal(false)
case ByteType => Literal(0.toByte)
case ShortType => Literal(0.toShort)
case IntegerType => Literal(0)
case LongType => Literal(0L)
case FloatType => Literal(0.0f)
case DoubleType => Literal(0.0)
case dt: DecimalType => Literal(Decimal(0, dt.precision, dt.scale))
case DateType => create(0, DateType)
case TimestampType => create(0L, TimestampType)
case TimestampWithoutTZType => create(0L, TimestampWithoutTZType)
case it: DayTimeIntervalType => create(0L, it)
case YearMonthIntervalType => create(0, YearMonthIntervalType)
case StringType => Literal("")
case BinaryType => Literal("".getBytes(StandardCharsets.UTF_8))
case CalendarIntervalType => Literal(new CalendarInterval(0, 0, 0))
case arr: ArrayType => create(Array(), arr)
case map: MapType => create(Map(), map)
case struct: StructType =>
create(InternalRow.fromSeq(struct.fields.map(f => default(f.dataType).value)), struct)
case udt: UserDefinedType[_] => Literal(default(udt.sqlType).value, udt)
case other =>
throw QueryExecutionErrors.noDefaultForDataTypeError(dataType)
}
private[expressions] def validateLiteralValue(value: Any, dataType: DataType): Unit = {
def doValidate(v: Any, dataType: DataType): Boolean = dataType match {
case _ if v == null => true
case BooleanType => v.isInstanceOf[Boolean]
case ByteType => v.isInstanceOf[Byte]
case ShortType => v.isInstanceOf[Short]
case IntegerType | DateType | YearMonthIntervalType => v.isInstanceOf[Int]
case LongType | TimestampType | TimestampWithoutTZType | _: DayTimeIntervalType =>
v.isInstanceOf[Long]
case FloatType => v.isInstanceOf[Float]
case DoubleType => v.isInstanceOf[Double]
case _: DecimalType => v.isInstanceOf[Decimal]
case CalendarIntervalType => v.isInstanceOf[CalendarInterval]
case BinaryType => v.isInstanceOf[Array[Byte]]
case StringType => v.isInstanceOf[UTF8String]
case st: StructType =>
v.isInstanceOf[InternalRow] && {
val row = v.asInstanceOf[InternalRow]
st.fields.map(_.dataType).zipWithIndex.forall {
case (dt, i) => doValidate(row.get(i, dt), dt)
}
}
case at: ArrayType =>
v.isInstanceOf[ArrayData] && {
val ar = v.asInstanceOf[ArrayData]
ar.numElements() == 0 || doValidate(ar.get(0, at.elementType), at.elementType)
}
case mt: MapType =>
v.isInstanceOf[MapData] && {
val map = v.asInstanceOf[MapData]
doValidate(map.keyArray(), ArrayType(mt.keyType)) &&
doValidate(map.valueArray(), ArrayType(mt.valueType))
}
case ObjectType(cls) => cls.isInstance(v)
case udt: UserDefinedType[_] => doValidate(v, udt.sqlType)
case _ => false
}
require(doValidate(value, dataType),
s"Literal must have a corresponding value to ${dataType.catalogString}, " +
s"but class ${Utils.getSimpleName(value.getClass)} found.")
}
}
/**
* An extractor that matches non-null literal values
*/
object NonNullLiteral {
def unapply(literal: Literal): Option[(Any, DataType)] = {
Option(literal.value).map(_ => (literal.value, literal.dataType))
}
}
/**
* Extractor for retrieving Float literals.
*/
object FloatLiteral {
def unapply(a: Any): Option[Float] = a match {
case Literal(a: Float, FloatType) => Some(a)
case _ => None
}
}
/**
* Extractor for retrieving Double literals.
*/
object DoubleLiteral {
def unapply(a: Any): Option[Double] = a match {
case Literal(a: Double, DoubleType) => Some(a)
case _ => None
}
}
/**
* Extractor for retrieving Int literals.
*/
object IntegerLiteral {
def unapply(a: Any): Option[Int] = a match {
case Literal(a: Int, IntegerType) => Some(a)
case _ => None
}
}
/**
* Extractor for retrieving String literals.
*/
object StringLiteral {
def unapply(a: Any): Option[String] = a match {
case Literal(s: UTF8String, StringType) => Some(s.toString)
case _ => None
}
}
/**
* Extractor for and other utility methods for decimal literals.
*/
object DecimalLiteral {
def apply(v: Long): Literal = Literal(Decimal(v))
def apply(v: Double): Literal = Literal(Decimal(v))
def unapply(e: Expression): Option[Decimal] = e match {
case Literal(v, _: DecimalType) => Some(v.asInstanceOf[Decimal])
case _ => None
}
def largerThanLargestLong(v: Decimal): Boolean = v > Decimal(Long.MaxValue)
def smallerThanSmallestLong(v: Decimal): Boolean = v < Decimal(Long.MinValue)
}
object LiteralTreeBits {
// Singleton tree pattern BitSet for all Literals that are not true, false, or null.
val literalBits: BitSet = new ImmutableBitSet(TreePattern.maxId, LITERAL.id)
// Singleton tree pattern BitSet for all Literals that are true or false.
val booleanLiteralBits: BitSet = new ImmutableBitSet(
TreePattern.maxId, LITERAL.id, TRUE_OR_FALSE_LITERAL.id)
// Singleton tree pattern BitSet for all Literals that are nulls.
val nullLiteralBits: BitSet = new ImmutableBitSet(TreePattern.maxId, LITERAL.id, NULL_LITERAL.id)
}
/**
* In order to do type checking, use Literal.create() instead of constructor
*/
case class Literal (value: Any, dataType: DataType) extends LeafExpression {
Literal.validateLiteralValue(value, dataType)
override def foldable: Boolean = true
override def nullable: Boolean = value == null
private def timeZoneId = DateTimeUtils.getZoneId(SQLConf.get.sessionLocalTimeZone)
override lazy val treePatternBits: BitSet = {
value match {
case null => LiteralTreeBits.nullLiteralBits
case true | false => LiteralTreeBits.booleanLiteralBits
case _ => LiteralTreeBits.literalBits
}
}
override def toString: String = value match {
case null => "null"
case binary: Array[Byte] => s"0x" + DatatypeConverter.printHexBinary(binary)
case d: ArrayBasedMapData => s"map(${d.toString})"
case other =>
dataType match {
case DateType =>
DateFormatter().format(value.asInstanceOf[Int])
case TimestampType =>
TimestampFormatter.getFractionFormatter(timeZoneId).format(value.asInstanceOf[Long])
case DayTimeIntervalType(startField, endField) =>
toDayTimeIntervalString(value.asInstanceOf[Long], ANSI_STYLE, startField, endField)
case YearMonthIntervalType => toYearMonthIntervalString(value.asInstanceOf[Int], ANSI_STYLE)
case _ =>
other.toString
}
}
override def hashCode(): Int = {
val valueHashCode = value match {
case null => 0
case binary: Array[Byte] => util.Arrays.hashCode(binary)
case other => other.hashCode()
}
31 * Objects.hashCode(dataType) + valueHashCode
}
override def equals(other: Any): Boolean = other match {
case o: Literal if !dataType.equals(o.dataType) => false
case o: Literal =>
(value, o.value) match {
case (null, null) => true
case (a: Array[Byte], b: Array[Byte]) => util.Arrays.equals(a, b)
case (a: ArrayBasedMapData, b: ArrayBasedMapData) =>
a.keyArray == b.keyArray && a.valueArray == b.valueArray
case (a: Double, b: Double) if a.isNaN && b.isNaN => true
case (a: Float, b: Float) if a.isNaN && b.isNaN => true
case (a, b) => a != null && a == b
}
case _ => false
}
override protected def jsonFields: List[JField] = {
// Turns all kinds of literal values to string in json field, as the type info is hard to
// retain in json format, e.g. {"a": 123} can be an int, or double, or decimal, etc.
val jsonValue = (value, dataType) match {
case (null, _) => JNull
case (i: Int, DateType) => JString(toString)
case (l: Long, TimestampType) => JString(toString)
case (other, _) => JString(other.toString)
}
("value" -> jsonValue) :: ("dataType" -> dataType.jsonValue) :: Nil
}
override def eval(input: InternalRow): Any = value
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val javaType = CodeGenerator.javaType(dataType)
if (value == null) {
ExprCode.forNullValue(dataType)
} else {
def toExprCode(code: String): ExprCode = {
ExprCode.forNonNullValue(JavaCode.literal(code, dataType))
}
dataType match {
case BooleanType | IntegerType | DateType | YearMonthIntervalType =>
toExprCode(value.toString)
case FloatType =>
value.asInstanceOf[Float] match {
case v if v.isNaN =>
toExprCode("Float.NaN")
case Float.PositiveInfinity =>
toExprCode("Float.POSITIVE_INFINITY")
case Float.NegativeInfinity =>
toExprCode("Float.NEGATIVE_INFINITY")
case _ =>
toExprCode(s"${value}F")
}
case DoubleType =>
value.asInstanceOf[Double] match {
case v if v.isNaN =>
toExprCode("Double.NaN")
case Double.PositiveInfinity =>
toExprCode("Double.POSITIVE_INFINITY")
case Double.NegativeInfinity =>
toExprCode("Double.NEGATIVE_INFINITY")
case _ =>
toExprCode(s"${value}D")
}
case ByteType | ShortType =>
ExprCode.forNonNullValue(JavaCode.expression(s"($javaType)$value", dataType))
case TimestampType | TimestampWithoutTZType | LongType | _: DayTimeIntervalType =>
toExprCode(s"${value}L")
case _ =>
val constRef = ctx.addReferenceObj("literal", value, javaType)
ExprCode.forNonNullValue(JavaCode.global(constRef, dataType))
}
}
}
override def sql: String = (value, dataType) match {
case (_, NullType | _: ArrayType | _: MapType | _: StructType) if value == null => "NULL"
case _ if value == null => s"CAST(NULL AS ${dataType.sql})"
case (v: UTF8String, StringType) =>
// Escapes all backslashes and single quotes.
"'" + v.toString.replace("\\\\", "\\\\\\\\").replace("'", "\\\\'") + "'"
case (v: Byte, ByteType) => v + "Y"
case (v: Short, ShortType) => v + "S"
case (v: Long, LongType) => v + "L"
// Float type doesn't have a suffix
case (v: Float, FloatType) =>
val castedValue = v match {
case _ if v.isNaN => "'NaN'"
case Float.PositiveInfinity => "'Infinity'"
case Float.NegativeInfinity => "'-Infinity'"
case _ => s"'$v'"
}
s"CAST($castedValue AS ${FloatType.sql})"
case (v: Double, DoubleType) =>
v match {
case _ if v.isNaN => s"CAST('NaN' AS ${DoubleType.sql})"
case Double.PositiveInfinity => s"CAST('Infinity' AS ${DoubleType.sql})"
case Double.NegativeInfinity => s"CAST('-Infinity' AS ${DoubleType.sql})"
case _ => v + "D"
}
case (v: Decimal, t: DecimalType) => v + "BD"
case (v: Int, DateType) =>
s"DATE '$toString'"
case (v: Long, TimestampType) =>
s"TIMESTAMP '$toString'"
case (i: CalendarInterval, CalendarIntervalType) =>
s"INTERVAL '${i.toString}'"
case (v: Array[Byte], BinaryType) => s"X'${DatatypeConverter.printHexBinary(v)}'"
case (i: Long, DayTimeIntervalType(startField, endField)) =>
toDayTimeIntervalString(i, ANSI_STYLE, startField, endField)
case (i: Int, YearMonthIntervalType) => toYearMonthIntervalString(i, ANSI_STYLE)
case _ => value.toString
}
}
| cloud-fan/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala | Scala | apache-2.0 | 19,275 |
package ml.wolfe.term
/**
* @author riedel
*/
trait ConvertValueTerm[A <: Term[Dom], D <: Dom] extends Term[D] with NAry {
val term:A
val domain:D
def f(arg:term.domain.Value):domain.Value
type ArgumentType = A
def arguments = IndexedSeq(term)
private var currentExecution:Execution = null
private var mapped:domain.Value = null.asInstanceOf[domain.Value]
private var inputValue:term.domain.Value = _
def vars = term.vars
override def evaluatorImpl(in: Settings) = new AbstractEvaluator(in) {
val termEval = term.evaluatorImpl(in)
def updateInput()(implicit execution: Execution):Boolean = {
termEval.eval()
val oldValue = inputValue
inputValue = term.domain.toValue(termEval.output)
oldValue != inputValue
}
def eval()(implicit execution: Execution): Unit = {
if (updateInput() || currentExecution != execution) {
//updateInput()
mapped = f(inputValue)
currentExecution = execution
}
domain.copyValue(mapped, output)
}
val output = domain.createSetting()
}
override def toString = s"TermMap($term)"
}
trait ConvertValuesTerm[A <: Term[Dom], D <: Dom] extends Term[D] {
val term:A
val domain:D
def f(arg:term.domain.Value):Term[D]
lazy val prototype = f(term.domain.zero)
lazy val this2proto = VariableMapping(vars,prototype.vars)
def vars = (term.vars ++ prototype.vars).distinct
override def evaluatorImpl(in: Settings) = new AbstractEvaluator(in) {
val termEval = term.evaluatorImpl(in)
var currentExecution:Execution = null
def eval()(implicit execution: Execution): Unit = {
if (currentExecution != execution) {
termEval.eval()
val value = term.domain.toValue(termEval.output)
val mapped = f(value)
val mappedEval = mapped.evaluatorImpl(input.linkedSettings(vars,mapped.vars))
mappedEval.eval()
output shallowAssign mappedEval.output
}
}
val output = domain.createSetting()
}
}
| wolfe-pack/wolfe | wolfe-core/src/main/scala/ml/wolfe/term/ConvertValueTerm.scala | Scala | apache-2.0 | 2,016 |
package spark
import java.io.ByteArrayInputStream
import java.io.EOFException
import java.net.URL
import java.util.concurrent.LinkedBlockingQueue
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicBoolean
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.atomic.AtomicReference
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.HashMap
class ParallelShuffleFetcher extends ShuffleFetcher with Logging {
val parallelFetches = System.getProperty("spark.parallel.fetches", "3").toInt
def fetch[K, V](shuffleId: Int, reduceId: Int, func: (K, V) => Unit) {
logInfo("Fetching outputs for shuffle %d, reduce %d".format(shuffleId, reduceId))
// Figure out a list of input IDs (mapper IDs) for each server
val ser = SparkEnv.get.serializer.newInstance()
val inputsByUri = new HashMap[String, ArrayBuffer[Int]]
val serverUris = SparkEnv.get.mapOutputTracker.getServerUris(shuffleId)
for ((serverUri, index) <- serverUris.zipWithIndex) {
inputsByUri.getOrElseUpdate(serverUri, ArrayBuffer()) += index
}
// Randomize them and put them in a LinkedBlockingQueue
val serverQueue = new LinkedBlockingQueue[(String, ArrayBuffer[Int])]
for (pair <- Utils.randomize(inputsByUri))
serverQueue.put(pair)
// Create a queue to hold the fetched data
val resultQueue = new LinkedBlockingQueue[Array[Byte]]
// Atomic variables to communicate failures and # of fetches done
var failure = new AtomicReference[FetchFailedException](null)
// Start multiple threads to do the fetching (TODO: may be possible to do it asynchronously)
for (i <- 0 until parallelFetches) {
new Thread("Fetch thread " + i + " for reduce " + reduceId) {
override def run() {
while (true) {
val pair = serverQueue.poll()
if (pair == null)
return
val (serverUri, inputIds) = pair
//logInfo("Pulled out server URI " + serverUri)
for (i <- inputIds) {
if (failure.get != null)
return
val url = "%s/shuffle/%d/%d/%d".format(serverUri, shuffleId, i, reduceId)
logInfo("Starting HTTP request for " + url)
try {
val conn = new URL(url).openConnection()
conn.connect()
val len = conn.getContentLength()
if (len == -1)
throw new SparkException("Content length was not specified by server")
val buf = new Array[Byte](len)
val in = conn.getInputStream()
var pos = 0
while (pos < len) {
val n = in.read(buf, pos, len-pos)
if (n == -1)
throw new SparkException("EOF before reading the expected " + len + " bytes")
else
pos += n
}
// Done reading everything
resultQueue.put(buf)
in.close()
} catch {
case e: Exception =>
logError("Fetch failed from " + url, e)
failure.set(new FetchFailedException(serverUri, shuffleId, i, reduceId, e))
return
}
}
//logInfo("Done with server URI " + serverUri)
}
}
}.start()
}
// Wait for results from the threads (either a failure or all servers done)
var resultsDone = 0
var totalResults = inputsByUri.map{case (uri, inputs) => inputs.size}.sum
while (failure.get == null && resultsDone < totalResults) {
try {
val result = resultQueue.poll(100, TimeUnit.MILLISECONDS)
if (result != null) {
//logInfo("Pulled out a result")
val in = ser.inputStream(new ByteArrayInputStream(result))
try {
while (true) {
val pair = in.readObject().asInstanceOf[(K, V)]
func(pair._1, pair._2)
}
} catch {
case e: EOFException => {} // TODO: cleaner way to detect EOF, such as a sentinel
}
resultsDone += 1
//logInfo("Results done = " + resultsDone)
}
} catch { case e: InterruptedException => {} }
}
if (failure.get != null) {
throw failure.get
}
}
}
| jperla/spark-advancers | core/src/main/scala/spark/ParallelShuffleFetcher.scala | Scala | bsd-3-clause | 4,412 |
package dotty.tools.dotc.util
/** Defines a key type with which to tag properties, such as attachments
* or context properties
*/
object Property {
/** The class of keys for properties of type V */
class Key[+V]
/**
* The class of keys for sticky properties of type V
*
* Sticky properties are properties that should be copied over when their container
* is copied.
*/
class StickyKey[+V] extends Key[V]
}
| som-snytt/dotty | compiler/src/dotty/tools/dotc/util/Property.scala | Scala | apache-2.0 | 435 |
package java.nio
import scala.scalajs.js.typedarray._
object ShortBuffer {
private final val HashSeed = 383731478 // "java.nio.ShortBuffer".##
def allocate(capacity: Int): ShortBuffer =
wrap(new Array[Short](capacity))
def wrap(array: Array[Short], offset: Int, length: Int): ShortBuffer =
HeapShortBuffer.wrap(array, 0, array.length, offset, length, false)
def wrap(array: Array[Short]): ShortBuffer =
wrap(array, 0, array.length)
// Extended API
def wrap(array: Int16Array): ShortBuffer =
TypedArrayShortBuffer.wrap(array)
}
abstract class ShortBuffer private[nio] (
_capacity: Int, private[nio] val _array: Array[Short],
private[nio] val _arrayOffset: Int)
extends Buffer(_capacity) with Comparable[ShortBuffer] {
private[nio] type ElementType = Short
private[nio] type BufferType = ShortBuffer
private[nio] type TypedArrayType = Int16Array
def this(_capacity: Int) = this(_capacity, null, -1)
def slice(): ShortBuffer
def duplicate(): ShortBuffer
def asReadOnlyBuffer(): ShortBuffer
def get(): Short
def put(s: Short): ShortBuffer
def get(index: Int): Short
def put(index: Int, s: Short): ShortBuffer
@noinline
def get(dst: Array[Short], offset: Int, length: Int): ShortBuffer =
GenBuffer(this).generic_get(dst, offset, length)
def get(dst: Array[Short]): ShortBuffer =
get(dst, 0, dst.length)
@noinline
def put(src: ShortBuffer): ShortBuffer =
GenBuffer(this).generic_put(src)
@noinline
def put(src: Array[Short], offset: Int, length: Int): ShortBuffer =
GenBuffer(this).generic_put(src, offset, length)
final def put(src: Array[Short]): ShortBuffer =
put(src, 0, src.length)
@inline final def hasArray(): Boolean =
GenBuffer(this).generic_hasArray()
@inline final def array(): Array[Short] =
GenBuffer(this).generic_array()
@inline final def arrayOffset(): Int =
GenBuffer(this).generic_arrayOffset()
def compact(): ShortBuffer
def isDirect(): Boolean
// toString(): String inherited from Buffer
@noinline
override def hashCode(): Int =
GenBuffer(this).generic_hashCode(ShortBuffer.HashSeed)
override def equals(that: Any): Boolean = that match {
case that: ShortBuffer => compareTo(that) == 0
case _ => false
}
@noinline
def compareTo(that: ShortBuffer): Int =
GenBuffer(this).generic_compareTo(that)(_.compareTo(_))
def order(): ByteOrder
// Internal API
private[nio] def load(index: Int): Short
private[nio] def store(index: Int, elem: Short): Unit
@inline
private[nio] def load(startIndex: Int,
dst: Array[Short], offset: Int, length: Int): Unit =
GenBuffer(this).generic_load(startIndex, dst, offset, length)
@inline
private[nio] def store(startIndex: Int,
src: Array[Short], offset: Int, length: Int): Unit =
GenBuffer(this).generic_store(startIndex, src, offset, length)
}
| jmnarloch/scala-js | javalib/src/main/scala/java/nio/ShortBuffer.scala | Scala | bsd-3-clause | 2,925 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server
import java.util
import java.util.Collections
import kafka.utils.MockTime
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.protocol.Errors
import org.apache.kafka.common.record.Records
import org.apache.kafka.common.requests.FetchMetadata.{FINAL_EPOCH, INVALID_SESSION_ID}
import org.apache.kafka.common.requests.{FetchRequest, FetchResponse, FetchMetadata => JFetchMetadata}
import org.junit.Assert._
import org.junit.rules.Timeout
import org.junit.{Rule, Test}
class FetchSessionTest {
@Rule
def globalTimeout = Timeout.millis(120000)
@Test
def testNewSessionId(): Unit = {
val cache = new FetchSessionCache(3, 100)
for (i <- 0 to 10000) {
val id = cache.newSessionId()
assertTrue(id > 0)
}
}
def assertCacheContains(cache: FetchSessionCache, sessionIds: Int*) = {
var i = 0
for (sessionId <- sessionIds) {
i = i + 1
assertTrue("Missing session " + i + " out of " + sessionIds.size + "(" + sessionId + ")",
cache.get(sessionId).isDefined)
}
assertEquals(sessionIds.size, cache.size())
}
private def dummyCreate(size: Int)() = {
val cacheMap = new FetchSession.CACHE_MAP(size)
for (i <- 0 to (size - 1)) {
cacheMap.add(new CachedPartition("test", i))
}
cacheMap
}
@Test
def testSessionCache(): Unit = {
val cache = new FetchSessionCache(3, 100)
assertEquals(0, cache.size())
val id1 = cache.maybeCreateSession(0, false, 10, dummyCreate(10))
val id2 = cache.maybeCreateSession(10, false, 20, dummyCreate(20))
val id3 = cache.maybeCreateSession(20, false, 30, dummyCreate(30))
assertEquals(INVALID_SESSION_ID, cache.maybeCreateSession(30, false, 40, dummyCreate(40)))
assertEquals(INVALID_SESSION_ID, cache.maybeCreateSession(40, false, 5, dummyCreate(5)))
assertCacheContains(cache, id1, id2, id3)
cache.touch(cache.get(id1).get, 200)
val id4 = cache.maybeCreateSession(210, false, 11, dummyCreate(11))
assertCacheContains(cache, id1, id3, id4)
cache.touch(cache.get(id1).get, 400)
cache.touch(cache.get(id3).get, 390)
cache.touch(cache.get(id4).get, 400)
val id5 = cache.maybeCreateSession(410, false, 50, dummyCreate(50))
assertCacheContains(cache, id3, id4, id5)
assertEquals(INVALID_SESSION_ID, cache.maybeCreateSession(410, false, 5, dummyCreate(5)))
val id6 = cache.maybeCreateSession(410, true, 5, dummyCreate(5))
assertCacheContains(cache, id3, id5, id6)
}
@Test
def testResizeCachedSessions(): Unit = {
val cache = new FetchSessionCache(2, 100)
assertEquals(0, cache.totalPartitions())
assertEquals(0, cache.size())
assertEquals(0, cache.evictionsMeter.count())
val id1 = cache.maybeCreateSession(0, false, 2, dummyCreate(2))
assertTrue(id1 > 0)
assertCacheContains(cache, id1)
val session1 = cache.get(id1).get
assertEquals(2, session1.size())
assertEquals(2, cache.totalPartitions())
assertEquals(1, cache.size())
assertEquals(0, cache.evictionsMeter.count())
val id2 = cache.maybeCreateSession(0, false, 4, dummyCreate(4))
val session2 = cache.get(id2).get
assertTrue(id2 > 0)
assertCacheContains(cache, id1, id2)
assertEquals(6, cache.totalPartitions())
assertEquals(2, cache.size())
assertEquals(0, cache.evictionsMeter.count())
cache.touch(session1, 200)
cache.touch(session2, 200)
val id3 = cache.maybeCreateSession(200, false, 5, dummyCreate(5))
assertTrue(id3 > 0)
assertCacheContains(cache, id2, id3)
assertEquals(9, cache.totalPartitions())
assertEquals(2, cache.size())
assertEquals(1, cache.evictionsMeter.count())
cache.remove(id3)
assertCacheContains(cache, id2)
assertEquals(1, cache.size())
assertEquals(1, cache.evictionsMeter.count())
assertEquals(4, cache.totalPartitions())
val iter = session2.partitionMap.iterator()
iter.next()
iter.remove()
assertEquals(3, session2.size())
assertEquals(4, session2.cachedSize)
cache.touch(session2, session2.lastUsedMs)
assertEquals(3, cache.totalPartitions())
}
val EMPTY_PART_LIST = Collections.unmodifiableList(new util.ArrayList[TopicPartition]())
@Test
def testFetchRequests(): Unit = {
val time = new MockTime()
val cache = new FetchSessionCache(10, 1000)
val fetchManager = new FetchManager(time, cache)
// Verify that SESSIONLESS requests get a SessionlessFetchContext
val context = fetchManager.newContext(JFetchMetadata.LEGACY,
new util.HashMap[TopicPartition, FetchRequest.PartitionData](), EMPTY_PART_LIST, true)
assertEquals(classOf[SessionlessFetchContext], context.getClass)
// Create a new fetch session with a FULL fetch request
val reqData2 = new util.LinkedHashMap[TopicPartition, FetchRequest.PartitionData]
reqData2.put(new TopicPartition("foo", 0), new FetchRequest.PartitionData(0, 0, 100))
reqData2.put(new TopicPartition("foo", 1), new FetchRequest.PartitionData(10, 0, 100))
val context2 = fetchManager.newContext(JFetchMetadata.INITIAL, reqData2, EMPTY_PART_LIST, false)
assertEquals(classOf[FullFetchContext], context2.getClass)
val reqData2Iter = reqData2.entrySet().iterator()
context2.foreachPartition((topicPart, data) => {
val entry = reqData2Iter.next()
assertEquals(entry.getKey, topicPart)
assertEquals(entry.getValue, data)
})
assertEquals(0, context2.getFetchOffset(new TopicPartition("foo", 0)).get)
assertEquals(10, context2.getFetchOffset(new TopicPartition("foo", 1)).get)
val respData2 = new util.LinkedHashMap[TopicPartition, FetchResponse.PartitionData[Records]]
respData2.put(new TopicPartition("foo", 0), new FetchResponse.PartitionData(
Errors.NONE, 100, 100, 100, null, null))
respData2.put(new TopicPartition("foo", 1), new FetchResponse.PartitionData(
Errors.NONE, 10, 10, 10, null, null))
val resp2 = context2.updateAndGenerateResponseData(respData2)
assertEquals(Errors.NONE, resp2.error())
assertTrue(resp2.sessionId() != INVALID_SESSION_ID)
assertEquals(respData2, resp2.responseData())
// Test trying to create a new session with an invalid epoch
val context3 = fetchManager.newContext(
new JFetchMetadata(resp2.sessionId(), 5), reqData2, EMPTY_PART_LIST, false)
assertEquals(classOf[SessionErrorContext], context3.getClass)
assertEquals(Errors.INVALID_FETCH_SESSION_EPOCH,
context3.updateAndGenerateResponseData(respData2).error())
// Test trying to create a new session with a non-existent session id
val context4 = fetchManager.newContext(
new JFetchMetadata(resp2.sessionId() + 1, 1), reqData2, EMPTY_PART_LIST, false)
assertEquals(classOf[SessionErrorContext], context4.getClass)
assertEquals(Errors.FETCH_SESSION_ID_NOT_FOUND,
context4.updateAndGenerateResponseData(respData2).error())
// Continue the first fetch session we created.
val reqData5 = new util.LinkedHashMap[TopicPartition, FetchRequest.PartitionData]
val context5 = fetchManager.newContext(
new JFetchMetadata(resp2.sessionId(), 1), reqData5, EMPTY_PART_LIST, false)
assertEquals(classOf[IncrementalFetchContext], context5.getClass)
val reqData5Iter = reqData2.entrySet().iterator()
context5.foreachPartition((topicPart, data) => {
val entry = reqData5Iter.next()
assertEquals(entry.getKey, topicPart)
assertEquals(entry.getValue, data)
})
assertEquals(10, context5.getFetchOffset(new TopicPartition("foo", 1)).get)
val resp5 = context5.updateAndGenerateResponseData(respData2)
assertEquals(Errors.NONE, resp5.error())
assertEquals(resp2.sessionId(), resp5.sessionId())
assertEquals(0, resp5.responseData().size())
// Test setting an invalid fetch session epoch.
val context6 = fetchManager.newContext(
new JFetchMetadata(resp2.sessionId(), 5), reqData2, EMPTY_PART_LIST, false)
assertEquals(classOf[SessionErrorContext], context6.getClass)
assertEquals(Errors.INVALID_FETCH_SESSION_EPOCH,
context6.updateAndGenerateResponseData(respData2).error())
// Test generating a throttled response for the incremental fetch session
val reqData7 = new util.LinkedHashMap[TopicPartition, FetchRequest.PartitionData]
val context7 = fetchManager.newContext(
new JFetchMetadata(resp2.sessionId(), 2), reqData7, EMPTY_PART_LIST, false)
val resp7 = context7.getThrottledResponse(100)
assertEquals(Errors.NONE, resp7.error())
assertEquals(resp2.sessionId(), resp7.sessionId())
assertEquals(100, resp7.throttleTimeMs())
// Close the incremental fetch session.
val prevSessionId = resp5.sessionId
var nextSessionId = prevSessionId
do {
val reqData8 = new util.LinkedHashMap[TopicPartition, FetchRequest.PartitionData]
reqData8.put(new TopicPartition("bar", 0), new FetchRequest.PartitionData(0, 0, 100))
reqData8.put(new TopicPartition("bar", 1), new FetchRequest.PartitionData(10, 0, 100))
val context8 = fetchManager.newContext(
new JFetchMetadata(prevSessionId, FINAL_EPOCH), reqData8, EMPTY_PART_LIST, false)
assertEquals(classOf[SessionlessFetchContext], context8.getClass)
assertEquals(0, cache.size())
val respData8 = new util.LinkedHashMap[TopicPartition, FetchResponse.PartitionData[Records]]
respData8.put(new TopicPartition("bar", 0),
new FetchResponse.PartitionData(Errors.NONE, 100, 100, 100, null, null))
respData8.put(new TopicPartition("bar", 1),
new FetchResponse.PartitionData(Errors.NONE, 100, 100, 100, null, null))
val resp8 = context8.updateAndGenerateResponseData(respData8)
assertEquals(Errors.NONE, resp8.error())
nextSessionId = resp8.sessionId()
} while (nextSessionId == prevSessionId)
}
@Test
def testIncrementalFetchSession(): Unit = {
val time = new MockTime()
val cache = new FetchSessionCache(10, 1000)
val fetchManager = new FetchManager(time, cache)
// Create a new fetch session with foo-0 and foo-1
val reqData1 = new util.LinkedHashMap[TopicPartition, FetchRequest.PartitionData]
reqData1.put(new TopicPartition("foo", 0), new FetchRequest.PartitionData(0, 0, 100))
reqData1.put(new TopicPartition("foo", 1), new FetchRequest.PartitionData(10, 0, 100))
val context1 = fetchManager.newContext(JFetchMetadata.INITIAL, reqData1, EMPTY_PART_LIST, false)
assertEquals(classOf[FullFetchContext], context1.getClass)
val respData1 = new util.LinkedHashMap[TopicPartition, FetchResponse.PartitionData[Records]]
respData1.put(new TopicPartition("foo", 0), new FetchResponse.PartitionData(
Errors.NONE, 100, 100, 100, null, null))
respData1.put(new TopicPartition("foo", 1), new FetchResponse.PartitionData(
Errors.NONE, 10, 10, 10, null, null))
val resp1 = context1.updateAndGenerateResponseData(respData1)
assertEquals(Errors.NONE, resp1.error())
assertTrue(resp1.sessionId() != INVALID_SESSION_ID)
assertEquals(2, resp1.responseData().size())
// Create an incremental fetch request that removes foo-0 and adds bar-0
val reqData2 = new util.LinkedHashMap[TopicPartition, FetchRequest.PartitionData]
reqData2.put(new TopicPartition("bar", 0), new FetchRequest.PartitionData(15, 0, 0))
val removed2 = new util.ArrayList[TopicPartition]
removed2.add(new TopicPartition("foo", 0))
val context2 = fetchManager.newContext(
new JFetchMetadata(resp1.sessionId(), 1), reqData2, removed2, false)
assertEquals(classOf[IncrementalFetchContext], context2.getClass)
val parts2 = Set(new TopicPartition("foo", 1), new TopicPartition("bar", 0))
val reqData2Iter = parts2.iterator
context2.foreachPartition((topicPart, data) => {
assertEquals(reqData2Iter.next(), topicPart)
})
assertEquals(None, context2.getFetchOffset(new TopicPartition("foo", 0)))
assertEquals(10, context2.getFetchOffset(new TopicPartition("foo", 1)).get)
assertEquals(15, context2.getFetchOffset(new TopicPartition("bar", 0)).get)
assertEquals(None, context2.getFetchOffset(new TopicPartition("bar", 2)))
val respData2 = new util.LinkedHashMap[TopicPartition, FetchResponse.PartitionData[Records]]
respData2.put(new TopicPartition("foo", 1), new FetchResponse.PartitionData(
Errors.NONE, 10, 10, 10, null, null))
respData2.put(new TopicPartition("bar", 0), new FetchResponse.PartitionData(
Errors.NONE, 10, 10, 10, null, null))
val resp2 = context2.updateAndGenerateResponseData(respData2)
assertEquals(Errors.NONE, resp2.error())
assertEquals(1, resp2.responseData().size())
assertTrue(resp2.sessionId() > 0)
}
@Test
def testZeroSizeFetchSession(): Unit = {
val time = new MockTime()
val cache = new FetchSessionCache(10, 1000)
val fetchManager = new FetchManager(time, cache)
// Create a new fetch session with foo-0 and foo-1
val reqData1 = new util.LinkedHashMap[TopicPartition, FetchRequest.PartitionData]
reqData1.put(new TopicPartition("foo", 0), new FetchRequest.PartitionData(0, 0, 100))
reqData1.put(new TopicPartition("foo", 1), new FetchRequest.PartitionData(10, 0, 100))
val context1 = fetchManager.newContext(JFetchMetadata.INITIAL, reqData1, EMPTY_PART_LIST, false)
assertEquals(classOf[FullFetchContext], context1.getClass)
val respData1 = new util.LinkedHashMap[TopicPartition, FetchResponse.PartitionData[Records]]
respData1.put(new TopicPartition("foo", 0), new FetchResponse.PartitionData(
Errors.NONE, 100, 100, 100, null, null))
respData1.put(new TopicPartition("foo", 1), new FetchResponse.PartitionData(
Errors.NONE, 10, 10, 10, null, null))
val resp1 = context1.updateAndGenerateResponseData(respData1)
assertEquals(Errors.NONE, resp1.error())
assertTrue(resp1.sessionId() != INVALID_SESSION_ID)
assertEquals(2, resp1.responseData().size())
// Create an incremental fetch request that removes foo-0 and foo-1
// Verify that the previous fetch session was closed.
val reqData2 = new util.LinkedHashMap[TopicPartition, FetchRequest.PartitionData]
val removed2 = new util.ArrayList[TopicPartition]
removed2.add(new TopicPartition("foo", 0))
removed2.add(new TopicPartition("foo", 1))
val context2 = fetchManager.newContext(
new JFetchMetadata(resp1.sessionId(), 1), reqData2, removed2, false)
assertEquals(classOf[SessionlessFetchContext], context2.getClass)
val respData2 = new util.LinkedHashMap[TopicPartition, FetchResponse.PartitionData[Records]]
val resp2 = context2.updateAndGenerateResponseData(respData2)
assertEquals(INVALID_SESSION_ID, resp2.sessionId())
assertTrue(resp2.responseData().isEmpty)
assertEquals(0, cache.size())
}
}
| Ishiihara/kafka | core/src/test/scala/unit/kafka/server/FetchSessionTest.scala | Scala | apache-2.0 | 15,623 |
/*
* Copyright 2016-2017 original author or authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tap.data
/**
* Created by [email protected] on 17/10/17.
*/
trait TapAnalytics {
}
| uts-cic/tap | src/main/scala/tap/data/TapAnalytics.scala | Scala | apache-2.0 | 721 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.metron.integration.wrapper
import java.util.Properties
import kafka.server.{KafkaServer, RunningAsBroker}
import kafka.utils.TestUtils
object TestUtilsWrapper {
// Don't expose properties with defaults
def createBrokerConfig(nodeId: Int, zkConnect: String, port: Int): Properties = {
TestUtils.createBrokerConfig(nodeId = nodeId, zkConnect = zkConnect, port = port)
}
def waitUntilBrokerIsRunning(kafkaServer: KafkaServer, msg: String, waitTime: Long) = {
TestUtils.waitUntilTrue(()=>{ kafkaServer.brokerState.currentState == RunningAsBroker.state},msg,waitTime)
}
} | justinleet/metron | metron-platform/metron-integration-test/src/main/scala/org/apache/metron/integration/wrapper/TestUtilsWrapper.scala | Scala | apache-2.0 | 1,423 |
import scala.io.Source
object Solution extends App {
val lines = Source.stdin.getLines().toList
val s = lines.tail.head
val characters = s.distinct
val strings = (for {
i <- 0 until characters.length - 1
j <- i + 1 until characters.length
} yield {
val c1 = characters(i)
val c2 = characters(j)
val subset = s.replaceAll("[^" + c1 + "" + c2 + "]", "")
subset
}).filter(isAlternating)
val maxLength = if(strings.isEmpty) 0 else strings.maxBy(_.length).length
println(maxLength)
def isAlternating(s: String): Boolean = s.sliding(2).forall(ss => ss(0) != ss(1))
} | PaulNoth/hackerrank | practice/algorithms/strings/two_characters/solution.scala | Scala | mit | 606 |
package dtc.cats.instances
import java.time._
import cats.Invariant
import dtc.Local
object local extends CatsLocalInstances
trait CatsLocalInstances {
implicit val localInvariant: Invariant[Local] = new Invariant[Local] {
def imap[A, B](ev: Local[A])(f: A => B)(g: B => A): Local[B] = new Local[B] {
def compare(x: B, y: B): Int = ev.compare(g(x), g(y))
def millisecond(x: B): Int = ev.millisecond(g(x))
def second(t: B): Int = ev.second(g(t))
def minute(t: B): Int = ev.minute(g(t))
def hour(t: B): Int = ev.hour(g(t))
def dayOfMonth(t: B): Int = ev.dayOfMonth(g(t))
def dayOfWeek(x: B): DayOfWeek = ev.dayOfWeek(g(x))
def month(t: B): Int = ev.month(g(t))
def year(t: B): Int = ev.year(g(t))
def date(x: B): LocalDate = ev.date(g(x))
def time(x: B): LocalTime = ev.time(g(x))
def plus(x: B, d: Duration): B = f(ev.plus(g(x), d))
def minus(x: B, d: Duration): B = f(ev.minus(g(x), d))
def plusDays(x: B, days: Int): B = f(ev.plusDays(g(x), days))
def plusMonths(x: B, months: Int): B = f(ev.plusMonths(g(x), months))
def plusYears(x: B, years: Int): B = f(ev.plusYears(g(x), years))
def withYear(x: B, year: Int): B = f(ev.withYear(g(x), year))
def withMonth(x: B, month: Int): B = f(ev.withMonth(g(x), month))
def withDayOfMonth(x: B, dayOfMonth: Int): B = f(ev.withDayOfMonth(g(x), dayOfMonth))
def withHour(x: B, hour: Int): B = f(ev.withHour(g(x), hour))
def withMinute(x: B, minute: Int): B = f(ev.withMinute(g(x), minute))
def withSecond(x: B, second: Int): B = f(ev.withSecond(g(x), second))
def withMillisecond(x: B, millisecond: Int): B = f(ev.withMillisecond(g(x), millisecond))
def withTime(x: B, time: LocalTime): B = f(ev.withTime(g(x), time))
def withDate(x: B, date: LocalDate): B = f(ev.withDate(g(x), date))
def yearsUntil(x: B, until: B): Long = ev.yearsUntil(g(x), g(until))
def monthsUntil(x: B, until: B): Long = ev.monthsUntil(g(x), g(until))
def daysUntil(x: B, until: B): Long = ev.daysUntil(g(x), g(until))
def hoursUntil(x: B, until: B): Long = ev.hoursUntil(g(x), g(until))
def minutesUntil(x: B, until: B): Long = ev.minutesUntil(g(x), g(until))
def secondsUntil(x: B, until: B): Long = ev.secondsUntil(g(x), g(until))
def millisecondsUntil(x: B, until: B): Long = ev.millisecondsUntil(g(x), g(until))
def of(date: LocalDate, time: LocalTime): B = f(ev.of(date, time))
def of(year: Int, month: Int, day: Int, hour: Int, minute: Int, second: Int, millisecond: Int): B =
f(ev.of(year, month, day, hour, minute, second, millisecond))
}
}
}
| vpavkin/dtc | cats/shared/src/main/scala/dtc/cats/instances/local.scala | Scala | apache-2.0 | 2,695 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 Enterprise Data Management Council
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.edmcouncil.rdf_toolkit.owlapi_serializer
import java.nio.file.Path
import javax.xml.parsers.DocumentBuilderFactory
import org.w3c.dom.Document
class RdfXmlSorter private (input: Path) {
def xmlDocument: Document = {
val file = input.toFile
val dbFactory = DocumentBuilderFactory.newInstance
val dBuilder = dbFactory.newDocumentBuilder()
val doc = dBuilder.parse(file)
//
// optional, but recommended
//
// read this - http://stackoverflow.com/questions/13786607/normalization-in-dom-parsing-with-java-how-does-it-work
//
doc.getDocumentElement.normalize()
doc
}
// def sortedAsString = org.ow2.easywsdl.tooling.java2wsdl.util.XMLSorter.sort(xmlDocument) // TODO: [ABC] had to comment this out, not downloaded by build.sbt
// def printIt() = { // TODO: [ABC] had to comment this out, not downloaded by build.sbt
// print(sortedAsString)
// }
}
object RdfXmlSorter {
def apply(path: Path) = new RdfXmlSorter(path)
}
| edmcouncil/rdf-serializer | src/main/scala/org/edmcouncil/rdf_toolkit/owlapi_serializer/RdfXmlSorter.scala | Scala | mit | 2,169 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.calcite
import org.apache.flink.table.planner.utils.ShortcutUtils.unwrapTypeFactory
import org.apache.flink.table.types.logical.utils.LogicalTypeMerging
import org.apache.flink.table.types.logical.{DecimalType, LocalZonedTimestampType, TimestampType}
import org.apache.calcite.rel.`type`.{RelDataType, RelDataTypeFactory, RelDataTypeFactoryImpl, RelDataTypeSystemImpl}
import org.apache.calcite.sql.`type`.{SqlTypeName, SqlTypeUtil}
/**
* Custom type system for Flink.
*/
class FlinkTypeSystem extends RelDataTypeSystemImpl {
// set the maximum precision of a NUMERIC or DECIMAL type to DecimalType.MAX_PRECISION.
override def getMaxNumericPrecision: Int = DecimalType.MAX_PRECISION
// the max scale can't be greater than precision
override def getMaxNumericScale: Int = DecimalType.MAX_PRECISION
override def getDefaultPrecision(typeName: SqlTypeName): Int = typeName match {
// Calcite will limit the length of the VARCHAR field to 65536
case SqlTypeName.VARCHAR | SqlTypeName.VARBINARY =>
Int.MaxValue
// by default we support timestamp with microseconds precision (Timestamp(6))
case SqlTypeName.TIMESTAMP =>
TimestampType.DEFAULT_PRECISION
// by default we support timestamp with local time zone with microseconds precision
// Timestamp(6) with local time zone
case SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE =>
LocalZonedTimestampType.DEFAULT_PRECISION
case _ =>
super.getDefaultPrecision(typeName)
}
override def getMaxPrecision(typeName: SqlTypeName): Int = typeName match {
case SqlTypeName.VARCHAR | SqlTypeName.CHAR | SqlTypeName.VARBINARY | SqlTypeName.BINARY =>
Int.MaxValue
// The maximum precision of TIMESTAMP is 3 in Calcite,
// change it to 9 to support nanoseconds precision
case SqlTypeName.TIMESTAMP => TimestampType.MAX_PRECISION
// The maximum precision of TIMESTAMP_WITH_LOCAL_TIME_ZONE is 3 in Calcite,
// change it to 9 to support nanoseconds precision
case SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE => LocalZonedTimestampType.MAX_PRECISION
case _ =>
super.getMaxPrecision(typeName)
}
// when union a number of CHAR types of different lengths, we should cast to a VARCHAR
// this fixes the problem of CASE WHEN with different length string literals but get wrong
// result with additional space suffix
override def shouldConvertRaggedUnionTypesToVarying(): Boolean = true
override def deriveAvgAggType(
typeFactory: RelDataTypeFactory,
argRelDataType: RelDataType)
: RelDataType = {
val argType = FlinkTypeFactory.toLogicalType(argRelDataType)
val resultType = LogicalTypeMerging.findAvgAggType(argType)
unwrapTypeFactory(typeFactory).createFieldTypeFromLogicalType(resultType)
}
override def deriveSumType(
typeFactory: RelDataTypeFactory,
argRelDataType: RelDataType)
: RelDataType = {
val argType = FlinkTypeFactory.toLogicalType(argRelDataType)
val resultType = LogicalTypeMerging.findSumAggType(argType)
unwrapTypeFactory(typeFactory).createFieldTypeFromLogicalType(resultType)
}
override def deriveDecimalPlusType(
typeFactory: RelDataTypeFactory,
type1: RelDataType,
type2: RelDataType): RelDataType = {
deriveDecimalType(typeFactory, type1, type2,
(p1, s1, p2, s2) => LogicalTypeMerging.findAdditionDecimalType(p1, s1, p2, s2))
}
override def deriveDecimalModType(
typeFactory: RelDataTypeFactory,
type1: RelDataType,
type2: RelDataType): RelDataType = {
deriveDecimalType(typeFactory, type1, type2,
(p1, s1, p2, s2) => {
if (s1 == 0 && s2 == 0) {
return type2
}
LogicalTypeMerging.findModuloDecimalType(p1, s1, p2, s2)
})
}
override def deriveDecimalDivideType(
typeFactory: RelDataTypeFactory,
type1: RelDataType,
type2: RelDataType): RelDataType = {
deriveDecimalType(typeFactory, type1, type2,
(p1, s1, p2, s2) => LogicalTypeMerging.findDivisionDecimalType(p1, s1, p2, s2))
}
override def deriveDecimalMultiplyType(
typeFactory: RelDataTypeFactory,
type1: RelDataType,
type2: RelDataType): RelDataType = {
deriveDecimalType(typeFactory, type1, type2,
(p1, s1, p2, s2) => LogicalTypeMerging.findMultiplicationDecimalType(p1, s1, p2, s2))
}
/**
* Use derivation from [[LogicalTypeMerging]] to derive decimal type.
*/
private def deriveDecimalType(
typeFactory: RelDataTypeFactory,
type1: RelDataType,
type2: RelDataType,
deriveImpl: (Int, Int, Int, Int) => DecimalType): RelDataType = {
if (SqlTypeUtil.isExactNumeric(type1) && SqlTypeUtil.isExactNumeric(type2) &&
(SqlTypeUtil.isDecimal(type1) || SqlTypeUtil.isDecimal(type2))) {
val decType1 = adjustType(typeFactory, type1)
val decType2 = adjustType(typeFactory, type2)
val result = deriveImpl(
decType1.getPrecision, decType1.getScale, decType2.getPrecision, decType2.getScale)
typeFactory.createSqlType(SqlTypeName.DECIMAL, result.getPrecision, result.getScale)
} else {
null
}
}
/**
* Java numeric will always have invalid precision/scale,
* use its default decimal precision/scale instead.
*/
private def adjustType(
typeFactory: RelDataTypeFactory,
relDataType: RelDataType): RelDataType = {
if (RelDataTypeFactoryImpl.isJavaType(relDataType)) {
typeFactory.decimalOf(relDataType)
} else {
relDataType
}
}
}
object FlinkTypeSystem {
val DECIMAL_SYSTEM_DEFAULT = new DecimalType(DecimalType.MAX_PRECISION, 18)
}
| tillrohrmann/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/calcite/FlinkTypeSystem.scala | Scala | apache-2.0 | 6,484 |
package io.udash.web.server
import io.udash.rest._
import io.udash.rpc._
import io.udash.rpc.utils.{CallLogging, DefaultAtmosphereFramework}
import io.udash.web.guide.demos.activity.{Call, CallLogger}
import io.udash.web.guide.demos.rest.MainServerREST
import io.udash.web.guide.rest.ExposedRestInterfaces
import io.udash.web.guide.rpc.ExposedRpcInterfaces
import io.udash.web.guide.{GuideExceptions, MainServerRPC}
import monix.execution.Scheduler
import org.eclipse.jetty.server.Server
import org.eclipse.jetty.server.handler.ContextHandlerCollection
import org.eclipse.jetty.server.handler.gzip.GzipHandler
import org.eclipse.jetty.server.session.SessionHandler
import org.eclipse.jetty.servlet.{DefaultServlet, ServletContextHandler, ServletHolder}
class ApplicationServer(val port: Int, homepageResourceBase: String, guideResourceBase: String)(implicit scheduler: Scheduler) {
private val server = new Server(port)
def start(): Unit = {
server.start()
}
def stop(): Unit = {
server.stop()
}
private val homepage = {
val ctx = createContextHandler(Array("udash.io", "www.udash.io", "udash.local", "127.0.0.1"))
ctx.addServlet(createStaticHandler(homepageResourceBase), "/*")
ctx
}
private val guide = {
val ctx = createContextHandler(Array("guide.udash.io", "www.guide.udash.io", "guide.udash.local", "127.0.0.2", "localhost"))
ctx.getSessionHandler.addEventListener(new org.atmosphere.cpr.SessionSupport())
ctx.addServlet(createStaticHandler(guideResourceBase), "/*")
val atmosphereHolder = {
val config = new DefaultAtmosphereServiceConfig[MainServerRPC](clientId => {
val callLogger = new CallLogger
new DefaultExposesServerRPC[MainServerRPC](new ExposedRpcInterfaces(callLogger, guideResourceBase)(clientId)) with CallLogging[MainServerRPC] {
override protected val metadata: ServerRpcMetadata[MainServerRPC] = MainServerRPC.metadata
override def log(rpcName: String, methodName: String, args: Seq[String]): Unit =
callLogger.append(Call(rpcName, methodName, args))
}
})
val framework = new DefaultAtmosphereFramework(config, exceptionsRegistry = GuideExceptions.registry)
val atmosphereHolder = new ServletHolder(new RpcServlet(framework))
atmosphereHolder.setAsyncSupported(true)
atmosphereHolder
}
ctx.addServlet(atmosphereHolder, "/atm/*")
val restHolder = new ServletHolder(
RestServlet[MainServerREST](new ExposedRestInterfaces)
)
restHolder.setAsyncSupported(true)
ctx.addServlet(restHolder, "/rest_api/*")
ctx
}
private val contexts = new ContextHandlerCollection
contexts.setHandlers(Array(homepage, guide))
private val rewriteHandler = {
import org.eclipse.jetty.rewrite.handler.RewriteRegexRule
val rewrite = new org.eclipse.jetty.rewrite.handler.RewriteHandler()
rewrite.setRewriteRequestURI(true)
rewrite.setRewritePathInfo(false)
val spaRewrite = new RewriteRegexRule
spaRewrite.setRegex("^/(?!assets|scripts|styles|atm|rest_api)(.*/?)*$")
spaRewrite.setReplacement("/")
rewrite.addRule(spaRewrite)
rewrite.setHandler(contexts)
rewrite
}
server.setHandler(rewriteHandler)
private def createContextHandler(hosts: Array[String]): ServletContextHandler = {
val context = new ServletContextHandler
context.setSessionHandler(new SessionHandler)
context.setGzipHandler(new GzipHandler)
context.setVirtualHosts(hosts)
context
}
private def createStaticHandler(resourceBase: String): ServletHolder = {
val appHolder = new ServletHolder(new DefaultServlet)
appHolder.setAsyncSupported(true)
appHolder.setInitParameter("resourceBase", resourceBase)
appHolder
}
} | UdashFramework/udash-core | guide/backend/src/main/scala/io/udash/web/server/ApplicationServer.scala | Scala | apache-2.0 | 3,764 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.optimizer
import org.apache.spark.sql.catalyst.SimpleCatalystConf
import org.apache.spark.sql.catalyst.analysis.EliminateSubqueryAliases
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.Add
import org.apache.spark.sql.catalyst.plans.{FullOuter, LeftOuter, PlanTest, RightOuter}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules._
class LimitPushdownSuite extends PlanTest {
private object Optimize extends RuleExecutor[LogicalPlan] {
val batches =
Batch("Subqueries", Once,
EliminateSubqueryAliases) ::
Batch("Limit pushdown", FixedPoint(100),
LimitPushDown(conf),
CombineLimits,
ConstantFolding,
BooleanSimplification) :: Nil
}
private val testRelation = LocalRelation('a.int, 'b.int, 'c.int)
private val testRelation2 = LocalRelation('d.int, 'e.int, 'f.int)
private val x = testRelation.subquery('x)
private val y = testRelation.subquery('y)
// Union ---------------------------------------------------------------------------------------
test("Union: limit to each side") {
val unionQuery = Union(testRelation, testRelation2).limit(1)
val unionOptimized = Optimize.execute(unionQuery.analyze)
val unionCorrectAnswer =
Limit(1, Union(LocalLimit(1, testRelation), LocalLimit(1, testRelation2))).analyze
comparePlans(unionOptimized, unionCorrectAnswer)
}
test("Union: limit to each side with constant-foldable limit expressions") {
val unionQuery = Union(testRelation, testRelation2).limit(Add(1, 1))
val unionOptimized = Optimize.execute(unionQuery.analyze)
val unionCorrectAnswer =
Limit(2, Union(LocalLimit(2, testRelation), LocalLimit(2, testRelation2))).analyze
comparePlans(unionOptimized, unionCorrectAnswer)
}
test("Union: limit to each side with the new limit number") {
val unionQuery = Union(testRelation, testRelation2.limit(3)).limit(1)
val unionOptimized = Optimize.execute(unionQuery.analyze)
val unionCorrectAnswer =
Limit(1, Union(LocalLimit(1, testRelation), LocalLimit(1, testRelation2))).analyze
comparePlans(unionOptimized, unionCorrectAnswer)
}
test("Union: no limit to both sides if children having smaller limit values") {
val unionQuery = Union(testRelation.limit(1), testRelation2.select('d).limit(1)).limit(2)
val unionOptimized = Optimize.execute(unionQuery.analyze)
val unionCorrectAnswer =
Limit(2, Union(testRelation.limit(1), testRelation2.select('d).limit(1))).analyze
comparePlans(unionOptimized, unionCorrectAnswer)
}
test("Union: limit to each sides if children having larger limit values") {
val testLimitUnion = Union(testRelation.limit(3), testRelation2.select('d).limit(4))
val unionQuery = testLimitUnion.limit(2)
val unionOptimized = Optimize.execute(unionQuery.analyze)
val unionCorrectAnswer =
Limit(2, Union(LocalLimit(2, testRelation), LocalLimit(2, testRelation2.select('d)))).analyze
comparePlans(unionOptimized, unionCorrectAnswer)
}
// Outer join ----------------------------------------------------------------------------------
test("left outer join") {
val originalQuery = x.join(y, LeftOuter).limit(1)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = Limit(1, LocalLimit(1, y).join(y, LeftOuter)).analyze
comparePlans(optimized, correctAnswer)
}
test("right outer join") {
val originalQuery = x.join(y, RightOuter).limit(1)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = Limit(1, x.join(LocalLimit(1, y), RightOuter)).analyze
comparePlans(optimized, correctAnswer)
}
test("larger limits are not pushed on top of smaller ones in right outer join") {
val originalQuery = x.join(y.limit(5), RightOuter).limit(10)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = Limit(10, x.join(Limit(5, y), RightOuter)).analyze
comparePlans(optimized, correctAnswer)
}
test("full outer join where neither side is limited and both sides have same statistics") {
assert(x.stats(conf).sizeInBytes === y.stats(conf).sizeInBytes)
val originalQuery = x.join(y, FullOuter).limit(1)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = Limit(1, LocalLimit(1, x).join(y, FullOuter)).analyze
comparePlans(optimized, correctAnswer)
}
test("full outer join where neither side is limited and left side has larger statistics") {
val xBig = testRelation.copy(data = Seq.fill(2)(null)).subquery('x)
assert(xBig.stats(conf).sizeInBytes > y.stats(conf).sizeInBytes)
val originalQuery = xBig.join(y, FullOuter).limit(1)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = Limit(1, LocalLimit(1, xBig).join(y, FullOuter)).analyze
comparePlans(optimized, correctAnswer)
}
test("full outer join where neither side is limited and right side has larger statistics") {
val yBig = testRelation.copy(data = Seq.fill(2)(null)).subquery('y)
assert(x.stats(conf).sizeInBytes < yBig.stats(conf).sizeInBytes)
val originalQuery = x.join(yBig, FullOuter).limit(1)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = Limit(1, x.join(LocalLimit(1, yBig), FullOuter)).analyze
comparePlans(optimized, correctAnswer)
}
test("full outer join where both sides are limited") {
val originalQuery = x.limit(2).join(y.limit(2), FullOuter).limit(1)
val optimized = Optimize.execute(originalQuery.analyze)
val correctAnswer = Limit(1, Limit(2, x).join(Limit(2, y), FullOuter)).analyze
comparePlans(optimized, correctAnswer)
}
}
| sachintyagi22/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LimitPushdownSuite.scala | Scala | apache-2.0 | 6,646 |
package day9
import cats._
import cats.data._
import cats.implicits._
import scala.util.Try
import scala.annotation.tailrec
case class Prob[A](list: List[(A, Double)])
trait ProbInstances { self =>
def flatten[B](xs: Prob[Prob[B]]): Prob[B] = {
def multall(innerxs: Prob[B], p: Double) =
innerxs.list map { case (x, r) => (x, p * r) }
Prob((xs.list map { case (innerxs, p) => multall(innerxs, p) }).flatten)
}
implicit val probInstance: Monad[Prob] = new Monad[Prob] {
def pure[A](a: A): Prob[A] = Prob((a, 1.0) :: Nil)
def flatMap[A, B](fa: Prob[A])(f: A => Prob[B]): Prob[B] = self.flatten(map(fa)(f))
override def map[A, B](fa: Prob[A])(f: A => B): Prob[B] =
Prob(fa.list map { case (x, p) => (f(x), p) })
def tailRecM[A, B](a: A)(f: A => Prob[Either[A, B]]): Prob[B] = {
val buf = List.newBuilder[(B, Double)]
@tailrec def go(lists: List[List[(Either[A, B], Double)]]): Unit =
lists match {
case (ab :: abs) :: tail => ab match {
case (Right(b), p) =>
buf += ((b, p))
go(abs :: tail)
case (Left(a), p) =>
go(f(a).list :: abs :: tail)
}
case Nil :: tail => go(tail)
case Nil => ()
}
go(f(a).list :: Nil)
Prob(buf.result)
}
}
implicit def probShow[A]: Show[Prob[A]] = Show.fromToString
}
case object Prob extends ProbInstances
sealed trait Coin
object Coin {
case object Heads extends Coin
case object Tails extends Coin
implicit val coinEq: Eq[Coin] = new Eq[Coin] {
def eqv(a1: Coin, a2: Coin): Boolean = a1 == a2
}
def heads: Coin = Heads
def tails: Coin = Tails
}
object Main extends App {
def join[F[_]: FlatMap, A](fa: F[F[A]]): F[A] =
fa.flatten
println(s"""
join(1.some.some)
${join(1.some.some)}
""")
println(s"""
FlatMap[Option].flatten(1.some.some)
${FlatMap[Option].flatten(1.some.some)}
""")
def binSmalls(acc: Int, x: Int): Option[Int] =
if (x > 9) none[Int] else (acc + x).some
println(s"""
(Foldable[List].foldM(List(2, 8, 3, 1), 0) {binSmalls})
${(Foldable[List].foldM(List(2, 8, 3, 1), 0) {binSmalls})}
""")
println(s"""
(Foldable[List].foldM(List(2, 11, 3, 1), 0) {binSmalls})
${(Foldable[List].foldM(List(2, 11, 3, 1), 0) {binSmalls})}
""")
def foldingFunction(list: List[Double], next: String): Option[List[Double]] =
(list, next) match {
case (x :: y :: ys, "*") => ((y * x) :: ys).some
case (x :: y :: ys, "+") => ((y + x) :: ys).some
case (x :: y :: ys, "-") => ((y - x) :: ys).some
case (xs, numString) => parseInt(numString) map {_ :: xs}
}
def parseInt(x: String): Option[Int] =
(scala.util.Try(x.toInt) map { Some(_) }
recover { case _: NumberFormatException => None }).get
println(s"""
foldingFunction(List(3, 2), "*")
${foldingFunction(List(3, 2), "*")}
""")
println(s"""
foldingFunction(Nil, "*")
${foldingFunction(Nil, "*")}
""")
println(s"""
foldingFunction(Nil, "haha")
${foldingFunction(Nil, "haha")}
""")
def solveRPN(s: String): Option[Double] =
for {
List(x) <- (
Foldable[List].foldM(
s.split(' ').toList,
Nil: List[Double]
) {foldingFunction}
)
} yield x
println(s"""
solveRPN("1 2 * 4 +")
${solveRPN("1 2 * 4 +")}
""")
println(s"""
solveRPN("1 2 * 4")
${solveRPN("1 2 * 4")}
""")
println(s"""
solveRPN("1 haha * 4 +")
${solveRPN("1 haha * 4 +")}
""")
val f = Kleisli { (x: Int) => (x + 1).some }
val g = Kleisli { (x: Int) => (x * 100).some }
println(s"""
4.some >>= (f compose g).run
${4.some >>= (f compose g).run}
""")
println(s"""
4.some >>= (f andThen g).run
${4.some >>= (f andThen g).run}
""")
val l = f.lift[List]
println(s"""
List(1, 2, 3) >>= l.run
${List(1, 2, 3) >>= l.run}
""")
println(s"""
Prob((3, 0.5) :: (5, 0.25) :: (9, 0.25) :: Nil) map {-_}
${Prob((3, 0.5) :: (5, 0.25) :: (9, 0.25) :: Nil) map {-_}}
""")
import Coin.{heads, tails}
def coin: Prob[Coin] = Prob(heads -> 0.5 :: tails -> 0.5 :: Nil)
def loadedCoin: Prob[Coin] = Prob(heads -> 0.1 :: tails -> 0.9 :: Nil)
def flipThree: Prob[Boolean] = for {
a <- coin
b <- coin
c <- loadedCoin
} yield { List(a, b, c) forall {_ === tails} }
println(s"""
flipThree
${flipThree}
""")
val xss = List(List(1), List(2, 3), List(4))
println(s"""
xss.flatten
${xss.flatten}
""")
println(s"""
xss.foldLeft(List(): List[Int]) { _ ++ _ }
${xss.foldLeft(List(): List[Int]) { _ ++ _ }}
""")
}
| wgx731/learning-cats | src/main/scala/day9.scala | Scala | unlicense | 4,705 |
package controllers
import models.User
import play.Logger
import play.api.mvc.{Action, Controller}
import play.api.data._
import play.api.data.Forms._
import services.UserServiceImpl._
import play.api.mvc._
import play.filters.csrf._
/**
* Created by svirdi on 1/11/15.
*/
object Users extends Controller {
case class LoginData(uname: String, pass: String)
val loginForm = Form(
mapping(
"username" -> nonEmptyText,
"password" -> nonEmptyText
)(LoginData.apply)(LoginData.unapply)
)
def login =
Action { request =>
Ok(views.html.users.login(loginForm)(request))
}
def authenticate =
Action { implicit request =>
loginForm.bindFromRequest.fold(e => BadRequest(views.html.users.login(e)),
formData => {
val user: Option[Option[User]] = memoUser(formData.uname, formData.pass)(formData.uname, formData.pass)
user.map { optUser =>
if (optUser.isDefined)
Redirect("posts").withSession(request.session + ("admin" -> "success"))
else
Redirect("/svirdi") }.getOrElse(Redirect("/svirdi"))
})
}
}
| virdis/functional-imperative.com | app/controllers/Users.scala | Scala | gpl-2.0 | 1,162 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package statements
import org.jetbrains.plugins.scala.lang.psi.api.base.ScIdList
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScTypedDefinition
/**
* @author Alexander Podkhalyuzin
*/
trait ScVariableDeclaration extends ScVariable with ScTypedDeclaration {
def getIdList: ScIdList
override def declaredElements : Seq[ScTypedDefinition]
override def isAbstract: Boolean = true
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/api/statements/ScVariableDeclaration.scala | Scala | apache-2.0 | 470 |
package views.html
import play.templates._
import play.templates.TemplateMagic._
import play.api.templates._
import play.api.templates.PlayMagic._
import models._
import controllers._
import java.lang._
import java.util._
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import play.api.i18n._
import play.core.j.PlayMagicForJava._
import play.mvc._
import play.data._
import play.api.data.Field
import play.mvc.Http.Context.Implicit._
import views.html._
/**/
object game extends BaseScalaTemplate[play.api.templates.Html,Format[play.api.templates.Html]](play.api.templates.HtmlFormat) with play.api.templates.Template2[String,String,play.api.templates.Html] {
/**/
def apply/*1.2*/(message: String, status: String):play.api.templates.Html = {
_display_ {
Seq[Any](format.raw/*1.35*/("""
"""),_display_(Seq[Any](/*3.2*/main("Welcome to Play Framework")/*3.35*/ {_display_(Seq[Any](format.raw/*3.37*/("""
<link rel='stylesheet' href='"""),_display_(Seq[Any](/*5.31*/routes/*5.37*/.Assets.at("stylesheets/style.css"))),format.raw/*5.72*/("""'>
<div id="w">
<canvas id="game" width="720" height="641"></canvas>
<br>
<div id="status">"""),_display_(Seq[Any](/*10.23*/status)),format.raw/*10.29*/("""</div>
<img id="background" src='"""),_display_(Seq[Any](/*11.32*/routes/*11.38*/.Assets.at("images/res/background.png"))),format.raw/*11.77*/("""' class="hidden-image">
<img id="stoneb" src='"""),_display_(Seq[Any](/*12.28*/routes/*12.34*/.Assets.at("images/res/grau.png"))),format.raw/*12.67*/("""' class="hidden-image">
<img id="stonew" src='"""),_display_(Seq[Any](/*13.28*/routes/*13.34*/.Assets.at("images/res/weiss.png"))),format.raw/*13.68*/("""' class="hidden-image">
<img id="dice1" src='"""),_display_(Seq[Any](/*14.27*/routes/*14.33*/.Assets.at("images/res/dice1.png"))),format.raw/*14.67*/("""' class="hidden-image">
<img id="dice2" src='"""),_display_(Seq[Any](/*15.27*/routes/*15.33*/.Assets.at("images/res/dice2.png"))),format.raw/*15.67*/("""' class="hidden-image">
<img id="dice3" src='"""),_display_(Seq[Any](/*16.27*/routes/*16.33*/.Assets.at("images/res/dice3.png"))),format.raw/*16.67*/("""' class="hidden-image">
<img id="dice4" src='"""),_display_(Seq[Any](/*17.27*/routes/*17.33*/.Assets.at("images/res/dice4.png"))),format.raw/*17.67*/("""' class="hidden-image">
<img id="dice5" src='"""),_display_(Seq[Any](/*18.27*/routes/*18.33*/.Assets.at("images/res/dice5.png"))),format.raw/*18.67*/("""' class="hidden-image">
<img id="dice6" src='"""),_display_(Seq[Any](/*19.27*/routes/*19.33*/.Assets.at("images/res/dice6.png"))),format.raw/*19.67*/("""' class="hidden-image">
</div>
<script src='"""),_display_(Seq[Any](/*21.15*/routes/*21.21*/.Assets.at("javascripts/drawDesk.js"))),format.raw/*21.58*/("""' type="text/javascript"></script>
<script>
var msg = """),_display_(Seq[Any](/*23.16*/Html(message))),format.raw/*23.29*/(""";
document.onload = function () """),format.raw/*25.35*/("""{"""),format.raw/*25.36*/("""
paintComponent(msg);
"""),format.raw/*27.5*/("""}"""),format.raw/*27.6*/(""";
window.onload = function () """),format.raw/*29.33*/("""{"""),format.raw/*29.34*/("""
paintComponent(msg);
"""),format.raw/*31.5*/("""}"""),format.raw/*31.6*/(""";
</script>
""")))})),format.raw/*34.2*/("""
"""))}
}
def render(message:String,status:String): play.api.templates.Html = apply(message,status)
def f:((String,String) => play.api.templates.Html) = (message,status) => apply(message,status)
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Mon Jan 13 10:56:52 CET 2014
SOURCE: E:/GitHub/Backgammon-Web-master_for_2.1.5/Backgammon-Web-master/app/views/game.scala.html
HASH: b02b0c14a92f8488a48e5771ab2c8da9f5921988
MATRIX: 729->1|839->34|876->37|917->70|956->72|1023->104|1037->110|1093->145|1234->250|1262->256|1336->294|1351->300|1412->339|1499->390|1514->396|1569->429|1656->480|1671->486|1727->520|1813->570|1828->576|1884->610|1970->660|1985->666|2041->700|2127->750|2142->756|2198->790|2284->840|2299->846|2355->880|2441->930|2456->936|2512->970|2598->1020|2613->1026|2669->1060|2750->1105|2765->1111|2824->1148|2919->1207|2954->1220|3019->1257|3048->1258|3109->1292|3137->1293|3200->1328|3229->1329|3290->1363|3318->1364|3363->1378
LINES: 26->1|29->1|31->3|31->3|31->3|33->5|33->5|33->5|38->10|38->10|39->11|39->11|39->11|40->12|40->12|40->12|41->13|41->13|41->13|42->14|42->14|42->14|43->15|43->15|43->15|44->16|44->16|44->16|45->17|45->17|45->17|46->18|46->18|46->18|47->19|47->19|47->19|49->21|49->21|49->21|51->23|51->23|53->25|53->25|55->27|55->27|57->29|57->29|59->31|59->31|62->34
-- GENERATED --
*/
| bfcmyxa/Backgammon-Web-master_for_2.1.5 | target/scala-2.10/src_managed/main/views/html/game.template.scala | Scala | gpl-2.0 | 4,904 |
/*
Copyright 2015 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.serialization
import java.io.{ByteArrayInputStream, InputStream, OutputStream}
import scala.util.{Failure, Success, Try}
import scala.util.control.NonFatal
/**
* In large-scale partitioning algorithms, we often use sorting. This typeclass represents something we can
* efficiently serialize with an added law: that we can (hopefully fast) compare the raw data.
*/
trait OrderedSerialization[T] extends Ordering[T] with Serialization[T] {
/**
* This compares two InputStreams. After this call, the position in the InputStreams is mutated to be the
* end of the record.
*/
def compareBinary(a: InputStream, b: InputStream): OrderedSerialization.Result
}
object OrderedSerialization {
/**
* Represents the result of a comparison that might fail due to an error deserializing
*/
sealed trait Result {
/**
* Throws if the items cannot be compared
*/
def unsafeToInt: Int
def toTry: Try[Int]
}
/**
* Create a Result from an Int.
*/
def resultFrom(i: Int): Result =
if (i > 0) Greater
else if (i < 0) Less
else Equal
def resultFrom(t: Try[Int]): Result = t match {
case Success(i) => resultFrom(i)
case Failure(e) => CompareFailure(e)
}
final case class CompareFailure(ex: Throwable) extends Result {
def unsafeToInt = throw ex
def toTry = Failure(ex)
}
case object Greater extends Result {
val unsafeToInt = 1
val toTry = Success(unsafeToInt)
}
case object Equal extends Result {
val unsafeToInt = 0
val toTry = Success(unsafeToInt)
}
case object Less extends Result {
val unsafeToInt = -1
val toTry = Success(unsafeToInt)
}
def compare[T](a: T, b: T)(implicit ord: OrderedSerialization[T]): Int =
ord.compare(a, b)
def compareBinary[T](a: InputStream, b: InputStream)(implicit ord: OrderedSerialization[T]): Result =
ord.compareBinary(a, b)
def writeThenCompare[T](a: T, b: T)(implicit ordb: OrderedSerialization[T]): Result = {
val abytes = Serialization.toBytes(a)
val bbytes = Serialization.toBytes(b)
val ain = new ByteArrayInputStream(abytes)
val bin = new ByteArrayInputStream(bbytes)
ordb.compareBinary(ain, bin)
}
/**
* This is slow, but always an option. Avoid this if you can, especially for large items
*/
def readThenCompare[T: OrderedSerialization](as: InputStream, bs: InputStream): Result = try resultFrom {
val a = Serialization.read[T](as)
val b = Serialization.read[T](bs)
compare(a.get, b.get)
} catch {
case NonFatal(e) => CompareFailure(e)
}
private[this] def internalTransformer[T, U, V](
packFn: T => U,
unpackFn: U => V,
presentFn: Try[V] => Try[T]
)(implicit otherOrdSer: OrderedSerialization[U]): OrderedSerialization[T] =
new OrderedSerialization[T] {
private[this] var cache: (T, U) = null
private[this] def packCache(t: T): U = {
val readCache = cache
if (readCache == null || readCache._1 != t) {
val u = packFn(t)
cache = (t, u)
u
} else {
readCache._2
}
}
override def hash(t: T) = otherOrdSer.hash(packCache(t))
override def compareBinary(
a: java.io.InputStream,
b: java.io.InputStream
): OrderedSerialization.Result =
otherOrdSer.compareBinary(a, b)
override def compare(x: T, y: T) =
otherOrdSer.compare(packFn(x), packFn(y))
override def read(in: InputStream): Try[T] =
presentFn(otherOrdSer.read(in).map(unpackFn))
override def write(out: OutputStream, t: T): Try[Unit] =
otherOrdSer.write(out, packCache(t))
override def staticSize: Option[Int] = otherOrdSer.staticSize
override def dynamicSize(t: T): Option[Int] = otherOrdSer.dynamicSize(packCache(t))
}
def viaTransform[T, U](packFn: T => U, unpackFn: U => T)(implicit
otherOrdSer: OrderedSerialization[U]
): OrderedSerialization[T] =
internalTransformer[T, U, T](packFn, unpackFn, identity)
def viaTryTransform[T, U](packFn: T => U, unpackFn: U => Try[T])(implicit
otherOrdSer: OrderedSerialization[U]
): OrderedSerialization[T] =
internalTransformer[T, U, Try[T]](packFn, unpackFn, _.flatMap(identity))
/**
* The the serialized comparison matches the unserialized comparison
*/
def compareBinaryMatchesCompare[T](implicit ordb: OrderedSerialization[T]): Law2[T] =
Law2(
"compare(a, b) == compareBinary(aBin, bBin)",
(a: T, b: T) => resultFrom(ordb.compare(a, b)) == writeThenCompare(a, b)
)
/**
* ordering must be transitive. If this is not so, sort-based partitioning will be broken
*/
def orderingTransitive[T](implicit ordb: OrderedSerialization[T]): Law3[T] =
Law3(
"transitivity",
(a: T, b: T, c: T) =>
if (ordb.lteq(a, b) && ordb.lteq(b, c)) { ordb.lteq(a, c) }
else true
)
/**
* ordering must be antisymmetric. If this is not so, sort-based partitioning will be broken
*/
def orderingAntisymmetry[T](implicit ordb: OrderedSerialization[T]): Law2[T] =
Law2(
"antisymmetry",
(a: T, b: T) =>
if (ordb.lteq(a, b) && ordb.lteq(b, a)) { ordb.equiv(a, b) }
else true
)
/**
* ordering must be total. If this is not so, sort-based partitioning will be broken
*/
def orderingTotality[T](implicit ordb: OrderedSerialization[T]): Law2[T] =
Law2("totality", (a: T, b: T) => (ordb.lteq(a, b) || ordb.lteq(b, a)))
def allLaws[T: OrderedSerialization]: Iterable[Law[T]] =
Serialization.allLaws ++ List[Law[T]](
compareBinaryMatchesCompare[T],
orderingTransitive[T],
orderingAntisymmetry[T],
orderingTotality[T]
)
}
/**
* This may be useful when a type is used deep in a tuple or case class, and in that case the earlier
* comparators will have likely already done the work. Be aware that avoiding deserialization on compare
* usually very helpful.
*
* Note: it is your responsibility that the hash in serialization is consistent with the ordering (if
* equivalent in the ordering, the hash must match).
*/
final case class DeserializingOrderedSerialization[T](serialization: Serialization[T], ordering: Ordering[T])
extends OrderedSerialization[T] {
final override def read(i: InputStream) = serialization.read(i)
final override def write(o: OutputStream, t: T) = serialization.write(o, t)
final override def hash(t: T) = serialization.hash(t)
final override def compare(a: T, b: T) = ordering.compare(a, b)
final override def compareBinary(a: InputStream, b: InputStream) =
try OrderedSerialization.resultFrom {
compare(read(a).get, read(b).get)
} catch {
case NonFatal(e) => OrderedSerialization.CompareFailure(e)
}
final override def staticSize = serialization.staticSize
final override def dynamicSize(t: T) = serialization.dynamicSize(t)
}
case object UnitOrderedSerialization extends OrderedSerialization[Unit] with EquivSerialization[Unit] {
private[this] val same = OrderedSerialization.Equal
private[this] val someZero = Some(0)
final override def read(i: InputStream) = Serialization.successUnit
final override def write(o: OutputStream, t: Unit) = Serialization.successUnit
final override def hash(t: Unit) = 0
final override def compare(a: Unit, b: Unit) = 0
final override def compareBinary(a: InputStream, b: InputStream) =
same
final override def staticSize = someZero
final override def dynamicSize(t: Unit) = someZero
}
| twitter/scalding | scalding-serialization/src/main/scala/com/twitter/scalding/serialization/OrderedSerialization.scala | Scala | apache-2.0 | 8,123 |
package uk.gov.dvla.vehicles.presentation.common.views
import org.scalatest.selenium.WebBrowser.go
import org.scalatest.selenium.WebBrowser.pageTitle
import uk.gov.dvla.vehicles.presentation.common.composition.TestHarness
import uk.gov.dvla.vehicles.presentation.common.helpers.UiSpec
import uk.gov.dvla.vehicles.presentation.common.pages.{ErrorPanel, ValtechInputTextAreaPage}
class ValtechInputTextAreaIntegrationSpec extends UiSpec with TestHarness {
"ValtechInputTextArea integration" should {
"be presented" in new WebBrowserForSelenium {
go to ValtechInputTextAreaPage
pageTitle should equal(ValtechInputTextAreaPage.title)
}
"redirects to the next page given valid input" in new WebBrowserForSelenium {
ValtechInputTextAreaPage.navigate()
pageTitle should equal("Success") // Check the new title of the success page
}
"reject submit when field is blank" in new WebBrowserForSelenium {
ValtechInputTextAreaPage.navigate(documentReferenceNumber = "")
ErrorPanel.numberOfErrors should equal(2)
}
"reject submit when field contains less than minimum characters" in new WebBrowserForSelenium {
ValtechInputTextAreaPage.navigate(documentReferenceNumber = "1")
ErrorPanel.numberOfErrors should equal(1)
}
"reject submit when vehicleReferenceNumber contains more than maximum characters" in new WebBrowserForSelenium {
ValtechInputTextAreaPage.navigate(documentReferenceNumber = "1" * 1201)
ErrorPanel.numberOfErrors should equal(1)
}
}
}
| dvla/vehicles-presentation-common | common-test/test/uk/gov/dvla/vehicles/presentation/common/views/ValtechInputTextAreaIntegrationSpec.scala | Scala | mit | 1,549 |
/*
* Dac_~.scala
* (Cord)
*
* Copyright (c) 2015-2020 Hanns Holger Rutz.
*
* This software is published under the GNU Lesser General Public License v2.1+
*
*
* For further information, please contact Hanns Holger Rutz at
* [email protected]
*/
package de.sciss.cord
package objects
import de.sciss.cord.audio.AudioSystem
import de.sciss.cord.impl.{AudioNodeImpl, InletImpl, NoArgs, NoOutlets, ObjNodeImpl, SingleInlet}
import org.scalajs.dom
class Dac_~(val parent: Patcher)
extends ObjNodeImpl("dac~")
with AudioNodeImpl
with SingleInlet with NoOutlets with NoArgs { obj =>
private var audioNodes = List.empty[dom.AudioNode]
private def audioTarget = AudioSystem.context.destination
protected def dspStarted(): Unit = {
audioNodes = inlet.cords.collect {
case cord if cord.tpe == AudioType => cord.source.audio
}
// println(s"dac~ connecting ${audioNodes.size} nodes.")
audioNodes.foreach(_.connect(audioTarget))
}
protected def dspStopped(): Unit = {
audioNodes.foreach(_.disconnect(audioTarget))
audioNodes = Nil
}
object inlet extends InletImpl {
def accepts(tpe: Type) = true
def node: Node = obj
/** Tries to send a message into this inlet. Throws an error if `MessageType` is not accepted. */
def ! (message: M): Unit = message match {
case M(0) | M("stop" ) => parent.dsp.active = false
case M(_: Int) | M("start") => parent.dsp.active = true
}
override def cordAdded (cord: Cord): Unit = if (cord.tpe == AudioType && parent.dsp.active) {
val audioNode = cord.source.audio
audioNodes ::= audioNode
audioNode.connect(audioTarget)
}
override def cordRemoved(cord: Cord): Unit = if (cord.tpe == AudioType && parent.dsp.active) {
val audioNode = cord.source.audio
audioNodes = audioNodes.diff(audioNode :: Nil)
audioNode.disconnect(audioTarget)
}
}
}
| Sciss/Cord | src/main/scala/de/sciss/cord/objects/Dac_~.scala | Scala | lgpl-2.1 | 1,923 |
package org.kangmo.tradeapi
import org.kangmo.http._
import org.kangmo.helper._
import java.math.BigDecimal
import scala.concurrent.{Future,Await}
import scala.concurrent.duration._
case class Version (
major : Int,
minor : Int,
revision : Int
)
case class Constants (
transactionFee : BigDecimal,
minKrwWithdrawal : BigDecimal,
maxKrwWithdrawal : BigDecimal,
krwWithdrawalFee : BigDecimal,
btcWithdrawalFee : BigDecimal,
minBtcWithdrawal : BigDecimal,
maxBtcWithdrawal : BigDecimal,
minBtcOrder : BigDecimal,
maxBtcOrder : BigDecimal,
minBtcPrice : BigDecimal,
maxBtcPrice : BigDecimal
)
case class OAuthResponse(token_type: String, access_token: String, expires_in: Long, refresh_token: String)
object API {
val market = new MarketChannel()
// You can Wrap an API with this function to synchronously wait for the API call.
def sync[T](f : Future[T]) : T = {
Await.result(f, 60 seconds /*timeout*/ )
}
def version() : Version = {
val jsonResponse = HTTP.get(URLPrefix.prefix + s"version")
val versonObject = Json.deserialize[Version](jsonResponse)
versonObject
}
def constants() : Constants = {
val jsonResponse = HTTP.get(URLPrefix.prefix + s"constants")
val constantsObject = Json.deserialize[Constants](jsonResponse)
constantsObject
}
class Channel(context: Context) {
val order = new TradeChannel(context)
val coin = new CoinChannel(context)
val fiat = new FiatChannel(context)
val user = new UserChannel(context)
}
def createChannel(apiKey:String, apiSecret: String, email:String, password:String) : Channel = {
val postData = s"client_id=${apiKey}&client_secret=${apiSecret}&username=${email}&password=${password}&grant_type=password"
val jsonResponse = HTTP.post(URLPrefix.prefix + s"oauth2/access_token", postData)
val r = Json.deserialize[OAuthResponse](jsonResponse)
new Channel( Context(r.token_type, r.access_token, r.expires_in, r.refresh_token) )
}
} | Kangmo/korbit-nodejs-sdk | main/src/main/scala/scala/API.scala | Scala | apache-2.0 | 1,931 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.elodina.mesos.exhibitor
import java.io.{IOException, InputStream, OutputStream}
import java.text.SimpleDateFormat
import java.util
import java.util.Date
import org.apache.mesos.Protos
import org.apache.mesos.Protos._
import scala.collection.JavaConversions._
import scala.collection.mutable
object Util {
def parseList(s: String, entrySep: Char = ',', valueSep: Char = '=', nullValues: Boolean = true): List[(String, String)] = {
def splitEscaped(s: String, sep: Char, unescape: Boolean = false): Array[String] = {
val parts = new util.ArrayList[String]()
var escaped = false
var part = ""
for (c <- s.toCharArray) {
if (c == '\\\\' && !escaped) escaped = true
else if (c == sep && !escaped) {
parts.add(part)
part = ""
} else {
if (escaped && !unescape) part += "\\\\"
part += c
escaped = false
}
}
if (escaped) throw new IllegalArgumentException("open escaping")
if (part != "") parts.add(part)
parts.toArray(Array[String]())
}
val result = new mutable.ListBuffer[(String, String)]()
if (s == null) return result.toList
for (entry <- splitEscaped(s, entrySep)) {
if (entry.trim.isEmpty) throw new IllegalArgumentException(s)
val pair = splitEscaped(entry, valueSep, unescape = true)
val key: String = pair(0).trim
val value: String = if (pair.length > 1) pair(1).trim else null
if (value == null && !nullValues) throw new IllegalArgumentException(s)
result += key -> value
}
result.toList
}
def parseMap(s: String, entrySep: Char = ',', valueSep: Char = '=', nullValues: Boolean = true): Map[String, String] = parseList(s, entrySep, valueSep, nullValues).toMap
def formatList(list: List[(String, _ <: Any)], entrySep: Char = ',', valueSep: Char = '='): String = {
def escape(s: String): String = {
var result = ""
for (c <- s.toCharArray) {
if (c == entrySep || c == valueSep || c == '\\\\') result += "\\\\"
result += c
}
result
}
var s = ""
list.foreach { tuple =>
if (!s.isEmpty) s += entrySep
s += escape(tuple._1)
if (tuple._2 != null) s += valueSep + escape("" + tuple._2)
}
s
}
def formatMap(map: collection.Map[String, _ <: Any], entrySep: Char = ',', valueSep: Char = '='): String = formatList(map.toList, entrySep, valueSep)
def formatConstraints(constraints: scala.collection.Map[String, List[Constraint]]): String = formatList(constraints.toList.flatMap { case (name, values) =>
values.map(name -> _)
})
case class Range(start: Int, end: Int) {
def overlap(r: Range): Option[Range] = {
var x: Range = this
var y: Range = r
if (x.start > y.start) {
val t = x
x = y
y = t
}
assert(x.start <= y.start)
if (y.start > x.end) return None
assert(y.start <= x.end)
val start = y.start
val end = Math.min(x.end, y.end)
Some(Range(start, end))
}
def values: List[Int] = (start to end).toList
override def toString: String = if (start == end) s"$start" else s"$start..$end"
}
object Range {
def apply(s: String): Range = parse(s)
def apply(start: Int): Range = Range(start, start)
private def parse(range: String): Range = {
val idx = range.indexOf("..")
if (idx == -1) {
val value = range.toInt
Range(value, value)
} else {
val start = range.substring(0, idx).toInt
val end = range.substring(idx + 2).toInt
if (start > end) throw new IllegalArgumentException("start > end")
Range(start, end)
}
}
def parseRanges(ranges: String): List[Range] = {
if (ranges.isEmpty) Nil
else ranges.split(",").map(parse).toList
}
}
object Str {
def simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ssX")
def dateTime(date: Date): String = simpleDateFormat.format(date)
def framework(framework: FrameworkInfo): String = "%s name: %s host: %s failover_timeout: %.2f".format(id(framework.getId.getValue), framework.getName, framework.getHostname, framework.getFailoverTimeout)
def master(master: MasterInfo): String = "%s pid: %s host: %s".format(id(master.getId), master.getId, master.getHostname)
def slave(slave: SlaveInfo): String = "%s host: %s port: %d %s".format(id(slave.getId.getValue), slave.getHostname, slave.getPort, resources(slave.getResourcesList))
def offer(offer: Offer): String = "%s%s %s %s".format(offer.getHostname, id(offer.getId.getValue), resources(offer.getResourcesList), attributes(offer.getAttributesList))
def offers(offers: Iterable[Offer]): String = offers.map(Str.offer).mkString("\\n")
def task(task: TaskInfo): String = "%s slave: %s %s data: %s".format(task.getTaskId.getValue, id(task.getSlaveId.getValue), resources(task.getResourcesList), new String(task.getData.toByteArray))
def resources(resources: util.List[Protos.Resource]): String = {
var s = ""
val order: util.List[String] = "cpus mem disk ports".split(" ").toList
for (resource <- resources.sortBy(r => order.indexOf(r.getName))) {
if (!s.isEmpty) s += " "
s += resource.getName + ":"
if (resource.hasScalar)
s += "%.2f".format(resource.getScalar.getValue)
if (resource.hasRanges)
for (range <- resource.getRanges.getRangeList)
s += "[" + range.getBegin + ".." + range.getEnd + "]"
}
s
}
def attributes(attributes: util.List[Protos.Attribute]): String = {
var s = ""
for (attr <- attributes) {
if (!s.isEmpty) s += ";"
s += attr.getName + ":"
if (attr.hasText) s += attr.getText.getValue
if (attr.hasScalar) s += "%.2f".format(attr.getScalar.getValue)
}
s
}
def taskStatus(status: TaskStatus): String = {
var s = ""
s += status.getTaskId.getValue
s += " " + status.getState.name()
s += " slave:" + id(status.getSlaveId.getValue)
if (status.getState != TaskState.TASK_RUNNING)
s += " reason:" + status.getReason.name()
if (status.getMessage != null && status.getMessage != "")
s += " message:" + status.getMessage
s
}
def id(id: String): String = "#" + suffix(id, 5)
def suffix(s: String, maxLen: Int): String = {
if (s.length <= maxLen) return s
s.substring(s.length - maxLen)
}
}
def copyAndClose(in: InputStream, out: OutputStream): Unit = {
val buffer = new Array[Byte](128 * 1024)
var actuallyRead = 0
try {
while (actuallyRead != -1) {
actuallyRead = in.read(buffer)
if (actuallyRead != -1) out.write(buffer, 0, actuallyRead)
}
} finally {
try {
in.close()
}
catch {
case ignore: IOException =>
}
try {
out.close()
}
catch {
case ignore: IOException =>
}
}
}
def getScalarResources(offer: Offer, name: String): Double = {
offer.getResourcesList.foldLeft(0.0) { (all, current) =>
if (current.getName == name) all + current.getScalar.getValue
else all
}
}
def getRangeResources(offer: Offer, name: String): List[Protos.Value.Range] = {
offer.getResourcesList.foldLeft[List[Protos.Value.Range]](List()) { case (all, current) =>
if (current.getName == name) all ++ current.getRanges.getRangeList
else all
}
}
}
| stealthly/exhibitor-mesos-framework | src/main/scala/net/elodina/mesos/exhibitor/Util.scala | Scala | apache-2.0 | 8,370 |
package ru.maizy.cheesecake.server.tests.bodyparser
/**
* Copyright (c) Nikita Kovaliov, maizy.ru, 2016
* See LICENSE.txt for details.
*/
import com.typesafe.config.ConfigFactory
import org.scalatest.FlatSpecLike
import org.scalatest.prop.TableDrivenPropertyChecks._
import org.scalatest.prop.Tables.Table
import ru.maizy.cheesecake.server.ExtractResult
import ru.maizy.cheesecake.server.bodyparser.{ RegexpParserSpec => SpecType }
import ru.maizy.cheesecake.server.tests.BaseSpec
class RegexpParserSpecSpec extends BaseSpec with FlatSpecLike {
def fromConfig(config: String): ExtractResult[SpecType] = {
SpecType.fromConfig(ConfigFactory.parseString(config))
}
"RegexpParserSpec" should "build from right config" in {
val result = fromConfig {
// TODO: scala style has incorrect detection in multiline strings
// scalastyle:off
"""
{
type: regexp
pattern: "(?m)^pi: [\\\\d\\\\.]+$"
}
"""
// scalastyle:on
}
result.warnings should be(empty)
result.result should be(defined)
result.result.get.pattern.pattern.pattern shouldBe "(?m)^pi: [\\\\d\\\\.]+$"
}
val badConfigs = Table(
// scalastyle:off
("label", "config"),
("without `pattern`",
"""
{
type: regexp
}
"""),
("with bad `pattern`",
"""
{
type: regexp
pattern: "\\\\d["
}
"""),
("with bad `pattern` type",
"""
{
type: regexp
pattern: {wtf: 1}
}
""")
// scalastyle:on
)
forAll(badConfigs) { (label, config) =>
it should s"ignore bad config ($label) and return warnings" in {
val result = fromConfig(config)
result.warnings should not be empty
result.result should be(empty)
}
}
}
| maizy/cheesecake | server/src/test/scala/ru/maizy/cheesecake/server/tests/bodyparser/RegexpParserSpecSpec.scala | Scala | apache-2.0 | 1,809 |
package android
import java.io.File
import sbt.{Configuration, Task, Def, Setting}
import scala.language.experimental.macros
import scala.util.{Failure, Success, Try}
package object dsl {
def list[A](body: Seq[A]): List[A] = macro dsl.Macros.listImplN[A]
def list[A](body: A): List[A] = macro dsl.Macros.listImpl1[A]
def inProject(project: String)(ss: Setting[_]*): Seq[Setting[_]] =
inProject(sbt.ProjectRef(sbt.file(".").getCanonicalFile, project))(ss:_*)
def inProject(project: sbt.ProjectRef)(ss: Setting[_]*): Seq[Setting[_]] =
ss map VariantSettings.fixProjectScope(project)
private def stringFlags(key: sbt.TaskKey[Seq[String]], ss: Seq[String]) = key ++= ss
private def stringFlags(key: sbt.TaskKey[Seq[String]], config: Configuration, ss: Seq[String]) =
key in config ++= ss
def javacFlags(opts: String*) = stringFlags(sbt.Keys.javacOptions, opts)
def javacFlags(config: Configuration)(opts: String*) =
stringFlags(sbt.Keys.javacOptions, config, opts)
def scalacFlags(opts: String*) = stringFlags(sbt.Keys.scalacOptions, opts)
def scalacFlags(config: Configuration)(opts: String*) =
stringFlags(sbt.Keys.scalacOptions, config, opts)
def useLibrary(library: String) =
Keys.libraryRequests += ((library, true))
def buildTools(version: String) =
Keys.buildToolsVersion := Option(version)
private def extendVariant(key: sbt.SettingKey[Map[String,Seq[Setting[_]]]], name: String, ss: Seq[Setting[_]]) =
key <<= key { vs =>
val ss2 = vs(name)
vs + ((name, ss2 ++ ss))
}
def extendFlavor(name: String)(ss: Setting[_]*): Setting[_] =
extendVariant(Keys.flavors, name, ss)
def flavor(name: String)(ss: Setting[_]*): Setting[_] =
Keys.flavors += ((name, ss))
def extendBuildType(name: String)(ss: Setting[_]*): Setting[_] =
extendVariant(Keys.buildTypes, name, ss)
def buildType(name: String)(ss: Setting[_]*) =
Keys.buildTypes += ((name, ss))
def buildConfig(`type`: String, name: String, value: Def.Initialize[Task[String]]) =
Keys.buildConfigOptions <+= value map { v => (`type`, name, v) }
def buildConfig(`type`: String, name: String, value: String) =
Keys.buildConfigOptions += ((`type`, name, value))
def resValue(`type`: String, name: String, value: String) =
Keys.resValues += ((`type`, name, value))
def resValue(`type`: String, name: String, value: Def.Initialize[Task[String]]) =
Keys.resValues <+= value map { v =>
(`type`, name, v)
}
def signingConfig(keystore: File,
alias: String,
storePass: Option[String] = None,
keyPass: Option[String] = None,
singlePass: Boolean = true,
storeType: String = "jks") = {
val sp = storePass orElse keyPass
val config = if (sp.isEmpty) {
if (singlePass)
PromptStorepassSigningConfig(keystore, alias, storeType)
else
PromptPasswordsSigningConfig(keystore, alias, storeType)
} else
PlainSigningConfig(keystore, sp.get, alias, keyPass, storeType)
Keys.apkSigningConfig := Some(config)
}
def apkExclude(name: String*) = Keys.packagingOptions := {
val opts = Keys.packagingOptions.value
opts.copy(excludes = opts.excludes ++ name)
}
def apkPickFirst(name: String*) = Keys.packagingOptions := {
val opts = Keys.packagingOptions.value
opts.copy(pickFirsts = opts.pickFirsts ++ name)
}
def apkMerge(name: String*) = Keys.packagingOptions := {
val opts = Keys.packagingOptions.value
opts.copy(merges = opts.merges ++ name)
}
def manifestPlaceholder(key: String, value: String) =
Keys.manifestPlaceholders += ((key,value))
def manifestPlaceholder(key: String, value: Def.Initialize[Task[String]]) =
Keys.manifestPlaceholders <+= value map { v => (key,v) }
def apkVersionName(name: String) = Keys.versionName := Option(name)
def apkVersionCode(code: Int) = Keys.versionCode := Option(code)
def apkVersionName(name: Def.Initialize[Task[String]]) = Keys.versionName <<= name map Option.apply
def apkVersionCode(code: Def.Initialize[Task[Int]]) = Keys.versionCode <<= code map Option.apply
private[android] def checkVersion(tag: String, version: String): Unit = {
Try(version.toInt) match {
case Success(_) =>
case Failure(_) => if (version.length > 1)
PluginFail(tag + " must be an integer value or a single letter")
}
}
def dexMainClassList(classes: String*) = Keys.dexMainClassesConfig := {
val layout = Keys.projectLayout.value
implicit val out = Keys.outputLayout.value
sbt.IO.writeLines(layout.maindexlistTxt, classes)
layout.maindexlistTxt
}
}
package dsl {
private[android] object Macros {
import scala.reflect.macros.Context
def listImplN[A](c: Context)(body: c.Expr[Seq[A]])(implicit ev: c.WeakTypeTag[A]): c.Expr[List[A]] = {
import c.universe._
val xs = body.tree.children
if (xs.isEmpty)
c.Expr[List[A]](Apply(Select(body.tree, newTermName("toList")), Nil))
else
commonImpl(c)(body)
}
def listImpl1[A](c: Context)
(body: c.Expr[A])
(implicit ev: c.WeakTypeTag[A]): c.Expr[List[A]] = {
import c.universe._
val xs = body.tree.children
if (xs.isEmpty)
c.Expr[List[A]](Apply(Ident(newTermName("List")), body.tree :: Nil))
else
commonImpl(c)(body)
}
def commonImpl[A](c: Context)(body: c.Expr[_])(implicit ev: c.WeakTypeTag[A]): c.Expr[List[A]] = {
import c.universe._
val seqA = c.weakTypeOf[Seq[A]]
c.Expr[List[A]](body.tree.children.reduce { (a,ch) =>
val acc = if (a.tpe != null && a.tpe <:< ev.tpe) {
Apply(Ident(newTermName("List")), a :: Nil)
} else a
if (ch.tpe <:< seqA)
Apply(Select(acc, newTermName("$plus$plus")), List(ch))
else if (ch.tpe <:< ev.tpe)
Apply(Select(acc, newTermName("$colon$plus")), List(ch))
else c.abort(ch.pos, s"Unexpected type: ${ch.tpe}, needed ${ev.tpe}")
})
}
}
}
| dant3/android-sdk-plugin | src/dsl.scala | Scala | bsd-3-clause | 6,054 |
Subsets and Splits